1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "langhooks.h"
28 #include "diagnostic.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-stdarg.h"
34 /* A simple pass that attempts to optimize stdarg functions on architectures
35 that need to save register arguments to stack on entry to stdarg functions.
36 If the function doesn't use any va_start macros, no registers need to
37 be saved. If va_start macros are used, the va_list variables don't escape
38 the function, it is only necessary to save registers that will be used
39 in va_arg macros. E.g. if va_arg is only used with integral types
40 in the function, floating point registers don't need to be saved, etc. */
43 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
44 is executed at most as many times as VA_START_BB. */
47 reachable_at_most_once (basic_block va_arg_bb
, basic_block va_start_bb
)
49 VEC (edge
, heap
) *stack
= NULL
;
55 if (va_arg_bb
== va_start_bb
)
58 if (! dominated_by_p (CDI_DOMINATORS
, va_arg_bb
, va_start_bb
))
61 visited
= sbitmap_alloc (last_basic_block
);
62 sbitmap_zero (visited
);
65 FOR_EACH_EDGE (e
, ei
, va_arg_bb
->preds
)
66 VEC_safe_push (edge
, heap
, stack
, e
);
68 while (! VEC_empty (edge
, stack
))
72 e
= VEC_pop (edge
, stack
);
75 if (e
->flags
& EDGE_COMPLEX
)
81 if (src
== va_start_bb
)
84 /* va_arg_bb can be executed more times than va_start_bb. */
91 gcc_assert (src
!= ENTRY_BLOCK_PTR
);
93 if (! TEST_BIT (visited
, src
->index
))
95 SET_BIT (visited
, src
->index
);
96 FOR_EACH_EDGE (e
, ei
, src
->preds
)
97 VEC_safe_push (edge
, heap
, stack
, e
);
101 VEC_free (edge
, heap
, stack
);
102 sbitmap_free (visited
);
107 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
108 return constant, otherwise return (unsigned HOST_WIDE_INT) -1.
109 GPR_P is true if this is GPR counter. */
111 static unsigned HOST_WIDE_INT
112 va_list_counter_bump (struct stdarg_info
*si
, tree counter
, tree rhs
,
117 unsigned HOST_WIDE_INT ret
= 0, val
, counter_val
;
118 unsigned int max_size
;
120 if (si
->offsets
== NULL
)
124 si
->offsets
= XNEWVEC (int, num_ssa_names
);
125 for (i
= 0; i
< num_ssa_names
; ++i
)
129 counter_val
= gpr_p
? cfun
->va_list_gpr_size
: cfun
->va_list_fpr_size
;
130 max_size
= gpr_p
? VA_LIST_MAX_GPR_SIZE
: VA_LIST_MAX_FPR_SIZE
;
131 orig_lhs
= lhs
= rhs
;
134 enum tree_code rhs_code
;
136 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
138 if (counter_val
>= max_size
)
144 ret
-= counter_val
- si
->offsets
[SSA_NAME_VERSION (lhs
)];
148 stmt
= SSA_NAME_DEF_STMT (lhs
);
150 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != lhs
)
151 return (unsigned HOST_WIDE_INT
) -1;
153 rhs_code
= gimple_assign_rhs_code (stmt
);
154 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
155 || gimple_assign_cast_p (stmt
))
156 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
158 lhs
= gimple_assign_rhs1 (stmt
);
162 if ((rhs_code
== POINTER_PLUS_EXPR
163 || rhs_code
== PLUS_EXPR
)
164 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
165 && host_integerp (gimple_assign_rhs2 (stmt
), 1))
167 ret
+= tree_low_cst (gimple_assign_rhs2 (stmt
), 1);
168 lhs
= gimple_assign_rhs1 (stmt
);
172 if (get_gimple_rhs_class (rhs_code
) != GIMPLE_SINGLE_RHS
)
173 return (unsigned HOST_WIDE_INT
) -1;
175 rhs
= gimple_assign_rhs1 (stmt
);
176 if (TREE_CODE (counter
) != TREE_CODE (rhs
))
177 return (unsigned HOST_WIDE_INT
) -1;
179 if (TREE_CODE (counter
) == COMPONENT_REF
)
181 if (get_base_address (counter
) != get_base_address (rhs
)
182 || TREE_CODE (TREE_OPERAND (rhs
, 1)) != FIELD_DECL
183 || TREE_OPERAND (counter
, 1) != TREE_OPERAND (rhs
, 1))
184 return (unsigned HOST_WIDE_INT
) -1;
186 else if (counter
!= rhs
)
187 return (unsigned HOST_WIDE_INT
) -1;
193 val
= ret
+ counter_val
;
196 enum tree_code rhs_code
;
198 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
202 si
->offsets
[SSA_NAME_VERSION (lhs
)] = max_size
;
204 si
->offsets
[SSA_NAME_VERSION (lhs
)] = val
;
206 stmt
= SSA_NAME_DEF_STMT (lhs
);
208 rhs_code
= gimple_assign_rhs_code (stmt
);
209 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
210 || gimple_assign_cast_p (stmt
))
211 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
213 lhs
= gimple_assign_rhs1 (stmt
);
217 if ((rhs_code
== POINTER_PLUS_EXPR
218 || rhs_code
== PLUS_EXPR
)
219 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
220 && host_integerp (gimple_assign_rhs2 (stmt
), 1))
222 val
-= tree_low_cst (gimple_assign_rhs2 (stmt
), 1);
223 lhs
= gimple_assign_rhs1 (stmt
);
234 /* Called by walk_tree to look for references to va_list variables. */
237 find_va_list_reference (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
240 bitmap va_list_vars
= (bitmap
) ((struct walk_stmt_info
*) data
)->info
;
243 if (TREE_CODE (var
) == SSA_NAME
)
244 var
= SSA_NAME_VAR (var
);
246 if (TREE_CODE (var
) == VAR_DECL
247 && bitmap_bit_p (va_list_vars
, DECL_UID (var
)))
254 /* Helper function of va_list_counter_struct_op. Compute
255 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
256 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
257 statement. GPR_P is true if AP is a GPR counter, false if it is
261 va_list_counter_op (struct stdarg_info
*si
, tree ap
, tree var
, bool gpr_p
,
264 unsigned HOST_WIDE_INT increment
;
266 if (si
->compute_sizes
< 0)
268 si
->compute_sizes
= 0;
269 if (si
->va_start_count
== 1
270 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
271 si
->compute_sizes
= 1;
273 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
275 "bb%d will %sbe executed at most once for each va_start "
276 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
277 si
->va_start_bb
->index
);
282 && (increment
= va_list_counter_bump (si
, ap
, var
, gpr_p
)) + 1 > 1)
284 if (gpr_p
&& cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
286 cfun
->va_list_gpr_size
+= increment
;
290 if (!gpr_p
&& cfun
->va_list_fpr_size
+ increment
< VA_LIST_MAX_FPR_SIZE
)
292 cfun
->va_list_fpr_size
+= increment
;
297 if (write_p
|| !si
->compute_sizes
)
300 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
302 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
307 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
308 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
309 is false, AP has been seen in VAR = AP assignment.
310 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
311 va_arg operation that doesn't cause the va_list variable to escape
315 va_list_counter_struct_op (struct stdarg_info
*si
, tree ap
, tree var
,
320 if (TREE_CODE (ap
) != COMPONENT_REF
321 || TREE_CODE (TREE_OPERAND (ap
, 1)) != FIELD_DECL
)
324 if (TREE_CODE (var
) != SSA_NAME
325 || bitmap_bit_p (si
->va_list_vars
, DECL_UID (SSA_NAME_VAR (var
))))
328 base
= get_base_address (ap
);
329 if (TREE_CODE (base
) != VAR_DECL
330 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (base
)))
333 if (TREE_OPERAND (ap
, 1) == va_list_gpr_counter_field
)
334 va_list_counter_op (si
, ap
, var
, true, write_p
);
335 else if (TREE_OPERAND (ap
, 1) == va_list_fpr_counter_field
)
336 va_list_counter_op (si
, ap
, var
, false, write_p
);
342 /* Check for TEM = AP. Return true if found and the caller shouldn't
343 search for va_list references in the statement. */
346 va_list_ptr_read (struct stdarg_info
*si
, tree ap
, tree tem
)
348 if (TREE_CODE (ap
) != VAR_DECL
349 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
)))
352 if (TREE_CODE (tem
) != SSA_NAME
353 || bitmap_bit_p (si
->va_list_vars
,
354 DECL_UID (SSA_NAME_VAR (tem
)))
355 || is_global_var (SSA_NAME_VAR (tem
)))
358 if (si
->compute_sizes
< 0)
360 si
->compute_sizes
= 0;
361 if (si
->va_start_count
== 1
362 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
363 si
->compute_sizes
= 1;
365 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
367 "bb%d will %sbe executed at most once for each va_start "
368 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
369 si
->va_start_bb
->index
);
372 /* For void * or char * va_list types, there is just one counter.
373 If va_arg is used in a loop, we don't know how many registers need
375 if (! si
->compute_sizes
)
378 if (va_list_counter_bump (si
, ap
, tem
, true) == (unsigned HOST_WIDE_INT
) -1)
381 /* Note the temporary, as we need to track whether it doesn't escape
382 the current function. */
383 bitmap_set_bit (si
->va_list_escape_vars
,
384 DECL_UID (SSA_NAME_VAR (tem
)));
393 sequence and update cfun->va_list_gpr_size. Return true if found. */
396 va_list_ptr_write (struct stdarg_info
*si
, tree ap
, tree tem2
)
398 unsigned HOST_WIDE_INT increment
;
400 if (TREE_CODE (ap
) != VAR_DECL
401 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
)))
404 if (TREE_CODE (tem2
) != SSA_NAME
405 || bitmap_bit_p (si
->va_list_vars
, DECL_UID (SSA_NAME_VAR (tem2
))))
408 if (si
->compute_sizes
<= 0)
411 increment
= va_list_counter_bump (si
, ap
, tem2
, true);
412 if (increment
+ 1 <= 1)
415 if (cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
416 cfun
->va_list_gpr_size
+= increment
;
418 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
424 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
425 containing value of some va_list variable plus optionally some constant,
426 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
427 depending whether LHS is a function local temporary. */
430 check_va_list_escapes (struct stdarg_info
*si
, tree lhs
, tree rhs
)
432 if (! POINTER_TYPE_P (TREE_TYPE (rhs
)))
435 if (TREE_CODE (rhs
) != SSA_NAME
436 || ! bitmap_bit_p (si
->va_list_escape_vars
,
437 DECL_UID (SSA_NAME_VAR (rhs
))))
440 if (TREE_CODE (lhs
) != SSA_NAME
|| is_global_var (SSA_NAME_VAR (lhs
)))
442 si
->va_list_escapes
= true;
446 if (si
->compute_sizes
< 0)
448 si
->compute_sizes
= 0;
449 if (si
->va_start_count
== 1
450 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
451 si
->compute_sizes
= 1;
453 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
455 "bb%d will %sbe executed at most once for each va_start "
456 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
457 si
->va_start_bb
->index
);
460 /* For void * or char * va_list types, there is just one counter.
461 If va_arg is used in a loop, we don't know how many registers need
463 if (! si
->compute_sizes
)
465 si
->va_list_escapes
= true;
469 if (va_list_counter_bump (si
, si
->va_start_ap
, lhs
, true)
470 == (unsigned HOST_WIDE_INT
) -1)
472 si
->va_list_escapes
= true;
476 bitmap_set_bit (si
->va_list_escape_vars
,
477 DECL_UID (SSA_NAME_VAR (lhs
)));
481 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
482 Return true if va_list might be escaping. */
485 check_all_va_list_escapes (struct stdarg_info
*si
)
491 gimple_stmt_iterator i
;
493 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
495 gimple stmt
= gsi_stmt (i
);
499 if (is_gimple_debug (stmt
))
502 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_ALL_USES
)
504 if (! bitmap_bit_p (si
->va_list_escape_vars
,
505 DECL_UID (SSA_NAME_VAR (use
))))
508 if (is_gimple_assign (stmt
))
510 tree rhs
= gimple_assign_rhs1 (stmt
);
511 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
514 if (gimple_assign_rhs_code (stmt
) == INDIRECT_REF
515 && TREE_OPERAND (rhs
, 0) == use
516 && TYPE_SIZE_UNIT (TREE_TYPE (rhs
))
517 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs
)), 1)
518 && si
->offsets
[SSA_NAME_VERSION (use
)] != -1)
520 unsigned HOST_WIDE_INT gpr_size
;
521 tree access_size
= TYPE_SIZE_UNIT (TREE_TYPE (rhs
));
523 gpr_size
= si
->offsets
[SSA_NAME_VERSION (use
)]
524 + tree_low_cst (access_size
, 1);
525 if (gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
526 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
527 else if (gpr_size
> cfun
->va_list_gpr_size
)
528 cfun
->va_list_gpr_size
= gpr_size
;
532 /* va_arg sequences may contain
533 other_ap_temp = ap_temp;
534 other_ap_temp = ap_temp + constant;
535 other_ap_temp = (some_type *) ap_temp;
539 && ((rhs_code
== POINTER_PLUS_EXPR
540 && (TREE_CODE (gimple_assign_rhs2 (stmt
))
542 || gimple_assign_cast_p (stmt
)
543 || (get_gimple_rhs_class (rhs_code
)
544 == GIMPLE_SINGLE_RHS
)))
546 tree lhs
= gimple_assign_lhs (stmt
);
548 if (TREE_CODE (lhs
) == SSA_NAME
549 && bitmap_bit_p (si
->va_list_escape_vars
,
550 DECL_UID (SSA_NAME_VAR (lhs
))))
553 if (TREE_CODE (lhs
) == VAR_DECL
554 && bitmap_bit_p (si
->va_list_vars
,
560 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
562 fputs ("va_list escapes in ", dump_file
);
563 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
564 fputc ('\n', dump_file
);
575 /* Return true if this optimization pass should be done.
576 It makes only sense for stdarg functions. */
579 gate_optimize_stdarg (void)
581 /* This optimization is only for stdarg functions. */
582 return cfun
->stdarg
!= 0;
586 /* Entry point to the stdarg optimization pass. */
589 execute_optimize_stdarg (void)
592 bool va_list_escapes
= false;
593 bool va_list_simple_ptr
;
594 struct stdarg_info si
;
595 struct walk_stmt_info wi
;
596 const char *funcname
= NULL
;
599 cfun
->va_list_gpr_size
= 0;
600 cfun
->va_list_fpr_size
= 0;
601 memset (&si
, 0, sizeof (si
));
602 si
.va_list_vars
= BITMAP_ALLOC (NULL
);
603 si
.va_list_escape_vars
= BITMAP_ALLOC (NULL
);
606 funcname
= lang_hooks
.decl_printable_name (current_function_decl
, 2);
608 cfun_va_list
= targetm
.fn_abi_va_list (cfun
->decl
);
609 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
610 && (TREE_TYPE (cfun_va_list
) == void_type_node
611 || TREE_TYPE (cfun_va_list
) == char_type_node
);
612 gcc_assert (is_gimple_reg_type (cfun_va_list
) == va_list_simple_ptr
);
616 gimple_stmt_iterator i
;
618 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
620 gimple stmt
= gsi_stmt (i
);
623 if (!is_gimple_call (stmt
))
626 callee
= gimple_call_fndecl (stmt
);
628 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
631 switch (DECL_FUNCTION_CODE (callee
))
633 case BUILT_IN_VA_START
:
635 /* If old style builtins are used, don't optimize anything. */
636 case BUILT_IN_SAVEREGS
:
637 case BUILT_IN_ARGS_INFO
:
638 case BUILT_IN_NEXT_ARG
:
639 va_list_escapes
= true;
646 ap
= gimple_call_arg (stmt
, 0);
648 if (TREE_CODE (ap
) != ADDR_EXPR
)
650 va_list_escapes
= true;
653 ap
= TREE_OPERAND (ap
, 0);
654 if (TREE_CODE (ap
) == ARRAY_REF
)
656 if (! integer_zerop (TREE_OPERAND (ap
, 1)))
658 va_list_escapes
= true;
661 ap
= TREE_OPERAND (ap
, 0);
663 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap
))
664 != TYPE_MAIN_VARIANT (targetm
.fn_abi_va_list (cfun
->decl
))
665 || TREE_CODE (ap
) != VAR_DECL
)
667 va_list_escapes
= true;
671 if (is_global_var (ap
))
673 va_list_escapes
= true;
677 bitmap_set_bit (si
.va_list_vars
, DECL_UID (ap
));
679 /* VA_START_BB and VA_START_AP will be only used if there is just
680 one va_start in the function. */
689 /* If there were no va_start uses in the function, there is no need to
691 if (si
.va_start_count
== 0)
694 /* If some va_list arguments weren't local, we can't optimize. */
698 /* For void * or char * va_list, something useful can be done only
699 if there is just one va_start. */
700 if (va_list_simple_ptr
&& si
.va_start_count
> 1)
702 va_list_escapes
= true;
706 /* For struct * va_list, if the backend didn't tell us what the counter fields
707 are, there is nothing more we can do. */
708 if (!va_list_simple_ptr
709 && va_list_gpr_counter_field
== NULL_TREE
710 && va_list_fpr_counter_field
== NULL_TREE
)
712 va_list_escapes
= true;
716 /* For void * or char * va_list there is just one counter
717 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
718 if (va_list_simple_ptr
)
719 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
721 calculate_dominance_info (CDI_DOMINATORS
);
722 memset (&wi
, 0, sizeof (wi
));
723 wi
.info
= si
.va_list_vars
;
727 gimple_stmt_iterator i
;
729 si
.compute_sizes
= -1;
732 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
733 them as assignments for the purpose of escape analysis. This is
734 not needed for non-simple va_list because virtual phis don't perform
735 any real data movement. */
736 if (va_list_simple_ptr
)
742 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
744 gimple phi
= gsi_stmt (i
);
745 lhs
= PHI_RESULT (phi
);
747 if (!is_gimple_reg (lhs
))
750 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
752 rhs
= USE_FROM_PTR (uop
);
753 if (va_list_ptr_read (&si
, rhs
, lhs
))
755 else if (va_list_ptr_write (&si
, lhs
, rhs
))
758 check_va_list_escapes (&si
, lhs
, rhs
);
760 if (si
.va_list_escapes
)
762 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
764 fputs ("va_list escapes in ", dump_file
);
765 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
766 fputc ('\n', dump_file
);
768 va_list_escapes
= true;
774 for (i
= gsi_start_bb (bb
);
775 !gsi_end_p (i
) && !va_list_escapes
;
778 gimple stmt
= gsi_stmt (i
);
780 /* Don't look at __builtin_va_{start,end}, they are ok. */
781 if (is_gimple_call (stmt
))
783 tree callee
= gimple_call_fndecl (stmt
);
786 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
787 && (DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_START
788 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_END
))
792 if (is_gimple_assign (stmt
))
794 tree lhs
= gimple_assign_lhs (stmt
);
795 tree rhs
= gimple_assign_rhs1 (stmt
);
797 if (va_list_simple_ptr
)
799 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
800 == GIMPLE_SINGLE_RHS
)
802 /* Check for tem = ap. */
803 if (va_list_ptr_read (&si
, rhs
, lhs
))
806 /* Check for the last insn in:
811 else if (va_list_ptr_write (&si
, lhs
, rhs
))
815 if ((gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
816 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == INTEGER_CST
)
817 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
818 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
819 == GIMPLE_SINGLE_RHS
))
820 check_va_list_escapes (&si
, lhs
, rhs
);
824 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
825 == GIMPLE_SINGLE_RHS
)
827 /* Check for ap[0].field = temp. */
828 if (va_list_counter_struct_op (&si
, lhs
, rhs
, true))
831 /* Check for temp = ap[0].field. */
832 else if (va_list_counter_struct_op (&si
, rhs
, lhs
,
837 /* Do any architecture specific checking. */
838 if (targetm
.stdarg_optimize_hook
839 && targetm
.stdarg_optimize_hook (&si
, stmt
))
843 else if (is_gimple_debug (stmt
))
846 /* All other uses of va_list are either va_copy (that is not handled
847 in this optimization), taking address of va_list variable or
848 passing va_list to other functions (in that case va_list might
849 escape the function and therefore va_start needs to set it up
850 fully), or some unexpected use of va_list. None of these should
851 happen in a gimplified VA_ARG_EXPR. */
852 if (si
.va_list_escapes
853 || walk_gimple_op (stmt
, find_va_list_reference
, &wi
))
855 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
857 fputs ("va_list escapes in ", dump_file
);
858 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
859 fputc ('\n', dump_file
);
861 va_list_escapes
= true;
869 if (! va_list_escapes
870 && va_list_simple_ptr
871 && ! bitmap_empty_p (si
.va_list_escape_vars
)
872 && check_all_va_list_escapes (&si
))
873 va_list_escapes
= true;
878 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
879 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
881 BITMAP_FREE (si
.va_list_vars
);
882 BITMAP_FREE (si
.va_list_escape_vars
);
886 fprintf (dump_file
, "%s: va_list escapes %d, needs to save ",
887 funcname
, (int) va_list_escapes
);
888 if (cfun
->va_list_gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
889 fputs ("all", dump_file
);
891 fprintf (dump_file
, "%d", cfun
->va_list_gpr_size
);
892 fputs (" GPR units and ", dump_file
);
893 if (cfun
->va_list_fpr_size
>= VA_LIST_MAX_FPR_SIZE
)
894 fputs ("all", dump_file
);
896 fprintf (dump_file
, "%d", cfun
->va_list_fpr_size
);
897 fputs (" FPR units.\n", dump_file
);
903 struct gimple_opt_pass pass_stdarg
=
908 gate_optimize_stdarg
, /* gate */
909 execute_optimize_stdarg
, /* execute */
912 0, /* static_pass_number */
914 PROP_cfg
| PROP_ssa
, /* properties_required */
915 0, /* properties_provided */
916 0, /* properties_destroyed */
917 0, /* todo_flags_start */
918 TODO_dump_func
/* todo_flags_finish */