1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "langhooks.h"
28 #include "gimple-pretty-print.h"
31 #include "tree-pass.h"
32 #include "tree-stdarg.h"
34 /* A simple pass that attempts to optimize stdarg functions on architectures
35 that need to save register arguments to stack on entry to stdarg functions.
36 If the function doesn't use any va_start macros, no registers need to
37 be saved. If va_start macros are used, the va_list variables don't escape
38 the function, it is only necessary to save registers that will be used
39 in va_arg macros. E.g. if va_arg is only used with integral types
40 in the function, floating point registers don't need to be saved, etc. */
43 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
44 is executed at most as many times as VA_START_BB. */
47 reachable_at_most_once (basic_block va_arg_bb
, basic_block va_start_bb
)
49 vec
<edge
> stack
= vNULL
;
55 if (va_arg_bb
== va_start_bb
)
58 if (! dominated_by_p (CDI_DOMINATORS
, va_arg_bb
, va_start_bb
))
61 visited
= sbitmap_alloc (last_basic_block
);
62 bitmap_clear (visited
);
65 FOR_EACH_EDGE (e
, ei
, va_arg_bb
->preds
)
68 while (! stack
.is_empty ())
75 if (e
->flags
& EDGE_COMPLEX
)
81 if (src
== va_start_bb
)
84 /* va_arg_bb can be executed more times than va_start_bb. */
91 gcc_assert (src
!= ENTRY_BLOCK_PTR
);
93 if (! bitmap_bit_p (visited
, src
->index
))
95 bitmap_set_bit (visited
, src
->index
);
96 FOR_EACH_EDGE (e
, ei
, src
->preds
)
102 sbitmap_free (visited
);
107 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
108 return constant, otherwise return HOST_WIDE_INT_M1U.
109 GPR_P is true if this is GPR counter. */
111 static unsigned HOST_WIDE_INT
112 va_list_counter_bump (struct stdarg_info
*si
, tree counter
, tree rhs
,
117 unsigned HOST_WIDE_INT ret
= 0, val
, counter_val
;
118 unsigned int max_size
;
120 if (si
->offsets
== NULL
)
124 si
->offsets
= XNEWVEC (int, num_ssa_names
);
125 for (i
= 0; i
< num_ssa_names
; ++i
)
129 counter_val
= gpr_p
? cfun
->va_list_gpr_size
: cfun
->va_list_fpr_size
;
130 max_size
= gpr_p
? VA_LIST_MAX_GPR_SIZE
: VA_LIST_MAX_FPR_SIZE
;
131 orig_lhs
= lhs
= rhs
;
134 enum tree_code rhs_code
;
137 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
139 if (counter_val
>= max_size
)
145 ret
-= counter_val
- si
->offsets
[SSA_NAME_VERSION (lhs
)];
149 stmt
= SSA_NAME_DEF_STMT (lhs
);
151 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != lhs
)
152 return HOST_WIDE_INT_M1U
;
154 rhs_code
= gimple_assign_rhs_code (stmt
);
155 rhs1
= gimple_assign_rhs1 (stmt
);
156 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
157 || gimple_assign_cast_p (stmt
))
158 && TREE_CODE (rhs1
) == SSA_NAME
)
164 if ((rhs_code
== POINTER_PLUS_EXPR
165 || rhs_code
== PLUS_EXPR
)
166 && TREE_CODE (rhs1
) == SSA_NAME
167 && host_integerp (gimple_assign_rhs2 (stmt
), 1))
169 ret
+= tree_low_cst (gimple_assign_rhs2 (stmt
), 1);
174 if (rhs_code
== ADDR_EXPR
175 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
176 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
177 && host_integerp (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1), 1))
179 ret
+= tree_low_cst (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1), 1);
180 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
184 if (get_gimple_rhs_class (rhs_code
) != GIMPLE_SINGLE_RHS
)
185 return HOST_WIDE_INT_M1U
;
187 rhs
= gimple_assign_rhs1 (stmt
);
188 if (TREE_CODE (counter
) != TREE_CODE (rhs
))
189 return HOST_WIDE_INT_M1U
;
191 if (TREE_CODE (counter
) == COMPONENT_REF
)
193 if (get_base_address (counter
) != get_base_address (rhs
)
194 || TREE_CODE (TREE_OPERAND (rhs
, 1)) != FIELD_DECL
195 || TREE_OPERAND (counter
, 1) != TREE_OPERAND (rhs
, 1))
196 return HOST_WIDE_INT_M1U
;
198 else if (counter
!= rhs
)
199 return HOST_WIDE_INT_M1U
;
205 val
= ret
+ counter_val
;
208 enum tree_code rhs_code
;
211 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
215 si
->offsets
[SSA_NAME_VERSION (lhs
)] = max_size
;
217 si
->offsets
[SSA_NAME_VERSION (lhs
)] = val
;
219 stmt
= SSA_NAME_DEF_STMT (lhs
);
221 rhs_code
= gimple_assign_rhs_code (stmt
);
222 rhs1
= gimple_assign_rhs1 (stmt
);
223 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
224 || gimple_assign_cast_p (stmt
))
225 && TREE_CODE (rhs1
) == SSA_NAME
)
231 if ((rhs_code
== POINTER_PLUS_EXPR
232 || rhs_code
== PLUS_EXPR
)
233 && TREE_CODE (rhs1
) == SSA_NAME
234 && host_integerp (gimple_assign_rhs2 (stmt
), 1))
236 val
-= tree_low_cst (gimple_assign_rhs2 (stmt
), 1);
241 if (rhs_code
== ADDR_EXPR
242 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
243 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
244 && host_integerp (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1), 1))
246 val
-= tree_low_cst (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1), 1);
247 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
258 /* Called by walk_tree to look for references to va_list variables. */
261 find_va_list_reference (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
264 bitmap va_list_vars
= (bitmap
) ((struct walk_stmt_info
*) data
)->info
;
267 if (TREE_CODE (var
) == SSA_NAME
)
269 if (bitmap_bit_p (va_list_vars
, SSA_NAME_VERSION (var
)))
272 else if (TREE_CODE (var
) == VAR_DECL
)
274 if (bitmap_bit_p (va_list_vars
, DECL_UID (var
) + num_ssa_names
))
282 /* Helper function of va_list_counter_struct_op. Compute
283 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
284 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
285 statement. GPR_P is true if AP is a GPR counter, false if it is
289 va_list_counter_op (struct stdarg_info
*si
, tree ap
, tree var
, bool gpr_p
,
292 unsigned HOST_WIDE_INT increment
;
294 if (si
->compute_sizes
< 0)
296 si
->compute_sizes
= 0;
297 if (si
->va_start_count
== 1
298 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
299 si
->compute_sizes
= 1;
301 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
303 "bb%d will %sbe executed at most once for each va_start "
304 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
305 si
->va_start_bb
->index
);
310 && (increment
= va_list_counter_bump (si
, ap
, var
, gpr_p
)) + 1 > 1)
312 if (gpr_p
&& cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
314 cfun
->va_list_gpr_size
+= increment
;
318 if (!gpr_p
&& cfun
->va_list_fpr_size
+ increment
< VA_LIST_MAX_FPR_SIZE
)
320 cfun
->va_list_fpr_size
+= increment
;
325 if (write_p
|| !si
->compute_sizes
)
328 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
330 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
335 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
336 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
337 is false, AP has been seen in VAR = AP assignment.
338 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
339 va_arg operation that doesn't cause the va_list variable to escape
343 va_list_counter_struct_op (struct stdarg_info
*si
, tree ap
, tree var
,
348 if (TREE_CODE (ap
) != COMPONENT_REF
349 || TREE_CODE (TREE_OPERAND (ap
, 1)) != FIELD_DECL
)
352 if (TREE_CODE (var
) != SSA_NAME
353 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (var
)))
356 base
= get_base_address (ap
);
357 if (TREE_CODE (base
) != VAR_DECL
358 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (base
) + num_ssa_names
))
361 if (TREE_OPERAND (ap
, 1) == va_list_gpr_counter_field
)
362 va_list_counter_op (si
, ap
, var
, true, write_p
);
363 else if (TREE_OPERAND (ap
, 1) == va_list_fpr_counter_field
)
364 va_list_counter_op (si
, ap
, var
, false, write_p
);
370 /* Check for TEM = AP. Return true if found and the caller shouldn't
371 search for va_list references in the statement. */
374 va_list_ptr_read (struct stdarg_info
*si
, tree ap
, tree tem
)
376 if (TREE_CODE (ap
) != VAR_DECL
377 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
380 if (TREE_CODE (tem
) != SSA_NAME
381 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem
)))
384 if (si
->compute_sizes
< 0)
386 si
->compute_sizes
= 0;
387 if (si
->va_start_count
== 1
388 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
389 si
->compute_sizes
= 1;
391 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
393 "bb%d will %sbe executed at most once for each va_start "
394 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
395 si
->va_start_bb
->index
);
398 /* For void * or char * va_list types, there is just one counter.
399 If va_arg is used in a loop, we don't know how many registers need
401 if (! si
->compute_sizes
)
404 if (va_list_counter_bump (si
, ap
, tem
, true) == HOST_WIDE_INT_M1U
)
407 /* Note the temporary, as we need to track whether it doesn't escape
408 the current function. */
409 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (tem
));
419 sequence and update cfun->va_list_gpr_size. Return true if found. */
422 va_list_ptr_write (struct stdarg_info
*si
, tree ap
, tree tem2
)
424 unsigned HOST_WIDE_INT increment
;
426 if (TREE_CODE (ap
) != VAR_DECL
427 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
430 if (TREE_CODE (tem2
) != SSA_NAME
431 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem2
)))
434 if (si
->compute_sizes
<= 0)
437 increment
= va_list_counter_bump (si
, ap
, tem2
, true);
438 if (increment
+ 1 <= 1)
441 if (cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
442 cfun
->va_list_gpr_size
+= increment
;
444 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
450 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
451 containing value of some va_list variable plus optionally some constant,
452 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
453 depending whether LHS is a function local temporary. */
456 check_va_list_escapes (struct stdarg_info
*si
, tree lhs
, tree rhs
)
458 if (! POINTER_TYPE_P (TREE_TYPE (rhs
)))
461 if (TREE_CODE (rhs
) == SSA_NAME
)
463 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (rhs
)))
466 else if (TREE_CODE (rhs
) == ADDR_EXPR
467 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
468 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0)) == SSA_NAME
)
470 tree ptr
= TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0);
471 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (ptr
)))
477 if (TREE_CODE (lhs
) != SSA_NAME
)
479 si
->va_list_escapes
= true;
483 if (si
->compute_sizes
< 0)
485 si
->compute_sizes
= 0;
486 if (si
->va_start_count
== 1
487 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
488 si
->compute_sizes
= 1;
490 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
492 "bb%d will %sbe executed at most once for each va_start "
493 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
494 si
->va_start_bb
->index
);
497 /* For void * or char * va_list types, there is just one counter.
498 If va_arg is used in a loop, we don't know how many registers need
500 if (! si
->compute_sizes
)
502 si
->va_list_escapes
= true;
506 if (va_list_counter_bump (si
, si
->va_start_ap
, lhs
, true)
507 == HOST_WIDE_INT_M1U
)
509 si
->va_list_escapes
= true;
513 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (lhs
));
517 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
518 Return true if va_list might be escaping. */
521 check_all_va_list_escapes (struct stdarg_info
*si
)
527 gimple_stmt_iterator i
;
529 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
534 gimple phi
= gsi_stmt (i
);
536 lhs
= PHI_RESULT (phi
);
537 if (virtual_operand_p (lhs
)
538 || bitmap_bit_p (si
->va_list_escape_vars
,
539 SSA_NAME_VERSION (lhs
)))
542 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
544 tree rhs
= USE_FROM_PTR (uop
);
545 if (TREE_CODE (rhs
) == SSA_NAME
546 && bitmap_bit_p (si
->va_list_escape_vars
,
547 SSA_NAME_VERSION (rhs
)))
549 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
551 fputs ("va_list escapes in ", dump_file
);
552 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
553 fputc ('\n', dump_file
);
560 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
562 gimple stmt
= gsi_stmt (i
);
566 if (is_gimple_debug (stmt
))
569 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_ALL_USES
)
571 if (! bitmap_bit_p (si
->va_list_escape_vars
,
572 SSA_NAME_VERSION (use
)))
575 if (is_gimple_assign (stmt
))
577 tree rhs
= gimple_assign_rhs1 (stmt
);
578 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
581 if (rhs_code
== MEM_REF
582 && TREE_OPERAND (rhs
, 0) == use
583 && TYPE_SIZE_UNIT (TREE_TYPE (rhs
))
584 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs
)), 1)
585 && si
->offsets
[SSA_NAME_VERSION (use
)] != -1)
587 unsigned HOST_WIDE_INT gpr_size
;
588 tree access_size
= TYPE_SIZE_UNIT (TREE_TYPE (rhs
));
590 gpr_size
= si
->offsets
[SSA_NAME_VERSION (use
)]
591 + tree_low_cst (TREE_OPERAND (rhs
, 1), 0)
592 + tree_low_cst (access_size
, 1);
593 if (gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
594 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
595 else if (gpr_size
> cfun
->va_list_gpr_size
)
596 cfun
->va_list_gpr_size
= gpr_size
;
600 /* va_arg sequences may contain
601 other_ap_temp = ap_temp;
602 other_ap_temp = ap_temp + constant;
603 other_ap_temp = (some_type *) ap_temp;
607 && ((rhs_code
== POINTER_PLUS_EXPR
608 && (TREE_CODE (gimple_assign_rhs2 (stmt
))
610 || gimple_assign_cast_p (stmt
)
611 || (get_gimple_rhs_class (rhs_code
)
612 == GIMPLE_SINGLE_RHS
)))
614 tree lhs
= gimple_assign_lhs (stmt
);
616 if (TREE_CODE (lhs
) == SSA_NAME
617 && bitmap_bit_p (si
->va_list_escape_vars
,
618 SSA_NAME_VERSION (lhs
)))
621 if (TREE_CODE (lhs
) == VAR_DECL
622 && bitmap_bit_p (si
->va_list_vars
,
623 DECL_UID (lhs
) + num_ssa_names
))
626 else if (rhs_code
== ADDR_EXPR
627 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
628 && TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0) == use
)
630 tree lhs
= gimple_assign_lhs (stmt
);
632 if (bitmap_bit_p (si
->va_list_escape_vars
,
633 SSA_NAME_VERSION (lhs
)))
638 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
640 fputs ("va_list escapes in ", dump_file
);
641 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
642 fputc ('\n', dump_file
);
653 /* Return true if this optimization pass should be done.
654 It makes only sense for stdarg functions. */
657 gate_optimize_stdarg (void)
659 /* This optimization is only for stdarg functions. */
660 return cfun
->stdarg
!= 0;
664 /* Entry point to the stdarg optimization pass. */
667 execute_optimize_stdarg (void)
670 bool va_list_escapes
= false;
671 bool va_list_simple_ptr
;
672 struct stdarg_info si
;
673 struct walk_stmt_info wi
;
674 const char *funcname
= NULL
;
677 cfun
->va_list_gpr_size
= 0;
678 cfun
->va_list_fpr_size
= 0;
679 memset (&si
, 0, sizeof (si
));
680 si
.va_list_vars
= BITMAP_ALLOC (NULL
);
681 si
.va_list_escape_vars
= BITMAP_ALLOC (NULL
);
684 funcname
= lang_hooks
.decl_printable_name (current_function_decl
, 2);
686 cfun_va_list
= targetm
.fn_abi_va_list (cfun
->decl
);
687 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
688 && (TREE_TYPE (cfun_va_list
) == void_type_node
689 || TREE_TYPE (cfun_va_list
) == char_type_node
);
690 gcc_assert (is_gimple_reg_type (cfun_va_list
) == va_list_simple_ptr
);
694 gimple_stmt_iterator i
;
696 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
698 gimple stmt
= gsi_stmt (i
);
701 if (!is_gimple_call (stmt
))
704 callee
= gimple_call_fndecl (stmt
);
706 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
709 switch (DECL_FUNCTION_CODE (callee
))
711 case BUILT_IN_VA_START
:
713 /* If old style builtins are used, don't optimize anything. */
714 case BUILT_IN_SAVEREGS
:
715 case BUILT_IN_NEXT_ARG
:
716 va_list_escapes
= true;
723 ap
= gimple_call_arg (stmt
, 0);
725 if (TREE_CODE (ap
) != ADDR_EXPR
)
727 va_list_escapes
= true;
730 ap
= TREE_OPERAND (ap
, 0);
731 if (TREE_CODE (ap
) == ARRAY_REF
)
733 if (! integer_zerop (TREE_OPERAND (ap
, 1)))
735 va_list_escapes
= true;
738 ap
= TREE_OPERAND (ap
, 0);
740 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap
))
741 != TYPE_MAIN_VARIANT (targetm
.fn_abi_va_list (cfun
->decl
))
742 || TREE_CODE (ap
) != VAR_DECL
)
744 va_list_escapes
= true;
748 if (is_global_var (ap
))
750 va_list_escapes
= true;
754 bitmap_set_bit (si
.va_list_vars
, DECL_UID (ap
) + num_ssa_names
);
756 /* VA_START_BB and VA_START_AP will be only used if there is just
757 one va_start in the function. */
766 /* If there were no va_start uses in the function, there is no need to
768 if (si
.va_start_count
== 0)
771 /* If some va_list arguments weren't local, we can't optimize. */
775 /* For void * or char * va_list, something useful can be done only
776 if there is just one va_start. */
777 if (va_list_simple_ptr
&& si
.va_start_count
> 1)
779 va_list_escapes
= true;
783 /* For struct * va_list, if the backend didn't tell us what the counter fields
784 are, there is nothing more we can do. */
785 if (!va_list_simple_ptr
786 && va_list_gpr_counter_field
== NULL_TREE
787 && va_list_fpr_counter_field
== NULL_TREE
)
789 va_list_escapes
= true;
793 /* For void * or char * va_list there is just one counter
794 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
795 if (va_list_simple_ptr
)
796 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
798 calculate_dominance_info (CDI_DOMINATORS
);
799 memset (&wi
, 0, sizeof (wi
));
800 wi
.info
= si
.va_list_vars
;
804 gimple_stmt_iterator i
;
806 si
.compute_sizes
= -1;
809 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
810 them as assignments for the purpose of escape analysis. This is
811 not needed for non-simple va_list because virtual phis don't perform
812 any real data movement. */
813 if (va_list_simple_ptr
)
819 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
821 gimple phi
= gsi_stmt (i
);
822 lhs
= PHI_RESULT (phi
);
824 if (virtual_operand_p (lhs
))
827 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
829 rhs
= USE_FROM_PTR (uop
);
830 if (va_list_ptr_read (&si
, rhs
, lhs
))
832 else if (va_list_ptr_write (&si
, lhs
, rhs
))
835 check_va_list_escapes (&si
, lhs
, rhs
);
837 if (si
.va_list_escapes
)
839 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
841 fputs ("va_list escapes in ", dump_file
);
842 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
843 fputc ('\n', dump_file
);
845 va_list_escapes
= true;
851 for (i
= gsi_start_bb (bb
);
852 !gsi_end_p (i
) && !va_list_escapes
;
855 gimple stmt
= gsi_stmt (i
);
857 /* Don't look at __builtin_va_{start,end}, they are ok. */
858 if (is_gimple_call (stmt
))
860 tree callee
= gimple_call_fndecl (stmt
);
863 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
864 && (DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_START
865 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_END
))
869 if (is_gimple_assign (stmt
))
871 tree lhs
= gimple_assign_lhs (stmt
);
872 tree rhs
= gimple_assign_rhs1 (stmt
);
874 if (va_list_simple_ptr
)
876 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
877 == GIMPLE_SINGLE_RHS
)
879 /* Check for ap ={v} {}. */
880 if (TREE_CLOBBER_P (rhs
))
883 /* Check for tem = ap. */
884 else if (va_list_ptr_read (&si
, rhs
, lhs
))
887 /* Check for the last insn in:
892 else if (va_list_ptr_write (&si
, lhs
, rhs
))
896 if ((gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
897 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == INTEGER_CST
)
898 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
899 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
900 == GIMPLE_SINGLE_RHS
))
901 check_va_list_escapes (&si
, lhs
, rhs
);
905 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
906 == GIMPLE_SINGLE_RHS
)
908 /* Check for ap ={v} {}. */
909 if (TREE_CLOBBER_P (rhs
))
912 /* Check for ap[0].field = temp. */
913 else if (va_list_counter_struct_op (&si
, lhs
, rhs
, true))
916 /* Check for temp = ap[0].field. */
917 else if (va_list_counter_struct_op (&si
, rhs
, lhs
,
922 /* Do any architecture specific checking. */
923 if (targetm
.stdarg_optimize_hook
924 && targetm
.stdarg_optimize_hook (&si
, stmt
))
928 else if (is_gimple_debug (stmt
))
931 /* All other uses of va_list are either va_copy (that is not handled
932 in this optimization), taking address of va_list variable or
933 passing va_list to other functions (in that case va_list might
934 escape the function and therefore va_start needs to set it up
935 fully), or some unexpected use of va_list. None of these should
936 happen in a gimplified VA_ARG_EXPR. */
937 if (si
.va_list_escapes
938 || walk_gimple_op (stmt
, find_va_list_reference
, &wi
))
940 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
942 fputs ("va_list escapes in ", dump_file
);
943 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
944 fputc ('\n', dump_file
);
946 va_list_escapes
= true;
954 if (! va_list_escapes
955 && va_list_simple_ptr
956 && ! bitmap_empty_p (si
.va_list_escape_vars
)
957 && check_all_va_list_escapes (&si
))
958 va_list_escapes
= true;
963 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
964 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
966 BITMAP_FREE (si
.va_list_vars
);
967 BITMAP_FREE (si
.va_list_escape_vars
);
971 fprintf (dump_file
, "%s: va_list escapes %d, needs to save ",
972 funcname
, (int) va_list_escapes
);
973 if (cfun
->va_list_gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
974 fputs ("all", dump_file
);
976 fprintf (dump_file
, "%d", cfun
->va_list_gpr_size
);
977 fputs (" GPR units and ", dump_file
);
978 if (cfun
->va_list_fpr_size
>= VA_LIST_MAX_FPR_SIZE
)
979 fputs ("all", dump_file
);
981 fprintf (dump_file
, "%d", cfun
->va_list_fpr_size
);
982 fputs (" FPR units.\n", dump_file
);
990 const pass_data pass_data_stdarg
=
992 GIMPLE_PASS
, /* type */
994 OPTGROUP_NONE
, /* optinfo_flags */
996 true, /* has_execute */
998 ( PROP_cfg
| PROP_ssa
), /* properties_required */
999 0, /* properties_provided */
1000 0, /* properties_destroyed */
1001 0, /* todo_flags_start */
1002 0, /* todo_flags_finish */
1005 class pass_stdarg
: public gimple_opt_pass
1008 pass_stdarg(gcc::context
*ctxt
)
1009 : gimple_opt_pass(pass_data_stdarg
, ctxt
)
1012 /* opt_pass methods: */
1013 bool gate () { return gate_optimize_stdarg (); }
1014 unsigned int execute () { return execute_optimize_stdarg (); }
1016 }; // class pass_stdarg
1021 make_pass_stdarg (gcc::context
*ctxt
)
1023 return new pass_stdarg (ctxt
);