1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jakub Jelinek <jakub@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "langhooks.h"
29 #include "gimple-pretty-print.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "tree-stdarg.h"
35 /* A simple pass that attempts to optimize stdarg functions on architectures
36 that need to save register arguments to stack on entry to stdarg functions.
37 If the function doesn't use any va_start macros, no registers need to
38 be saved. If va_start macros are used, the va_list variables don't escape
39 the function, it is only necessary to save registers that will be used
40 in va_arg macros. E.g. if va_arg is only used with integral types
41 in the function, floating point registers don't need to be saved, etc. */
44 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
45 is executed at most as many times as VA_START_BB. */
48 reachable_at_most_once (basic_block va_arg_bb
, basic_block va_start_bb
)
50 VEC (edge
, heap
) *stack
= NULL
;
56 if (va_arg_bb
== va_start_bb
)
59 if (! dominated_by_p (CDI_DOMINATORS
, va_arg_bb
, va_start_bb
))
62 visited
= sbitmap_alloc (last_basic_block
);
63 sbitmap_zero (visited
);
66 FOR_EACH_EDGE (e
, ei
, va_arg_bb
->preds
)
67 VEC_safe_push (edge
, heap
, stack
, e
);
69 while (! VEC_empty (edge
, stack
))
73 e
= VEC_pop (edge
, stack
);
76 if (e
->flags
& EDGE_COMPLEX
)
82 if (src
== va_start_bb
)
85 /* va_arg_bb can be executed more times than va_start_bb. */
92 gcc_assert (src
!= ENTRY_BLOCK_PTR
);
94 if (! TEST_BIT (visited
, src
->index
))
96 SET_BIT (visited
, src
->index
);
97 FOR_EACH_EDGE (e
, ei
, src
->preds
)
98 VEC_safe_push (edge
, heap
, stack
, e
);
102 VEC_free (edge
, heap
, stack
);
103 sbitmap_free (visited
);
108 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
109 return constant, otherwise return (unsigned HOST_WIDE_INT) -1.
110 GPR_P is true if this is GPR counter. */
112 static unsigned HOST_WIDE_INT
113 va_list_counter_bump (struct stdarg_info
*si
, tree counter
, tree rhs
,
118 unsigned HOST_WIDE_INT ret
= 0, val
, counter_val
;
119 unsigned int max_size
;
121 if (si
->offsets
== NULL
)
125 si
->offsets
= XNEWVEC (int, num_ssa_names
);
126 for (i
= 0; i
< num_ssa_names
; ++i
)
130 counter_val
= gpr_p
? cfun
->va_list_gpr_size
: cfun
->va_list_fpr_size
;
131 max_size
= gpr_p
? VA_LIST_MAX_GPR_SIZE
: VA_LIST_MAX_FPR_SIZE
;
132 orig_lhs
= lhs
= rhs
;
135 enum tree_code rhs_code
;
137 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
139 if (counter_val
>= max_size
)
145 ret
-= counter_val
- si
->offsets
[SSA_NAME_VERSION (lhs
)];
149 stmt
= SSA_NAME_DEF_STMT (lhs
);
151 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != lhs
)
152 return (unsigned HOST_WIDE_INT
) -1;
154 rhs_code
= gimple_assign_rhs_code (stmt
);
155 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
156 || gimple_assign_cast_p (stmt
))
157 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
159 lhs
= gimple_assign_rhs1 (stmt
);
163 if ((rhs_code
== POINTER_PLUS_EXPR
164 || rhs_code
== PLUS_EXPR
)
165 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
166 && host_integerp (gimple_assign_rhs2 (stmt
), 1))
168 ret
+= tree_low_cst (gimple_assign_rhs2 (stmt
), 1);
169 lhs
= gimple_assign_rhs1 (stmt
);
173 if (get_gimple_rhs_class (rhs_code
) != GIMPLE_SINGLE_RHS
)
174 return (unsigned HOST_WIDE_INT
) -1;
176 rhs
= gimple_assign_rhs1 (stmt
);
177 if (TREE_CODE (counter
) != TREE_CODE (rhs
))
178 return (unsigned HOST_WIDE_INT
) -1;
180 if (TREE_CODE (counter
) == COMPONENT_REF
)
182 if (get_base_address (counter
) != get_base_address (rhs
)
183 || TREE_CODE (TREE_OPERAND (rhs
, 1)) != FIELD_DECL
184 || TREE_OPERAND (counter
, 1) != TREE_OPERAND (rhs
, 1))
185 return (unsigned HOST_WIDE_INT
) -1;
187 else if (counter
!= rhs
)
188 return (unsigned HOST_WIDE_INT
) -1;
194 val
= ret
+ counter_val
;
197 enum tree_code rhs_code
;
199 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
203 si
->offsets
[SSA_NAME_VERSION (lhs
)] = max_size
;
205 si
->offsets
[SSA_NAME_VERSION (lhs
)] = val
;
207 stmt
= SSA_NAME_DEF_STMT (lhs
);
209 rhs_code
= gimple_assign_rhs_code (stmt
);
210 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
211 || gimple_assign_cast_p (stmt
))
212 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
214 lhs
= gimple_assign_rhs1 (stmt
);
218 if ((rhs_code
== POINTER_PLUS_EXPR
219 || rhs_code
== PLUS_EXPR
)
220 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
221 && host_integerp (gimple_assign_rhs2 (stmt
), 1))
223 val
-= tree_low_cst (gimple_assign_rhs2 (stmt
), 1);
224 lhs
= gimple_assign_rhs1 (stmt
);
235 /* Called by walk_tree to look for references to va_list variables. */
238 find_va_list_reference (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
241 bitmap va_list_vars
= (bitmap
) ((struct walk_stmt_info
*) data
)->info
;
244 if (TREE_CODE (var
) == SSA_NAME
)
245 var
= SSA_NAME_VAR (var
);
247 if (TREE_CODE (var
) == VAR_DECL
248 && bitmap_bit_p (va_list_vars
, DECL_UID (var
)))
255 /* Helper function of va_list_counter_struct_op. Compute
256 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
257 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
258 statement. GPR_P is true if AP is a GPR counter, false if it is
262 va_list_counter_op (struct stdarg_info
*si
, tree ap
, tree var
, bool gpr_p
,
265 unsigned HOST_WIDE_INT increment
;
267 if (si
->compute_sizes
< 0)
269 si
->compute_sizes
= 0;
270 if (si
->va_start_count
== 1
271 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
272 si
->compute_sizes
= 1;
274 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
276 "bb%d will %sbe executed at most once for each va_start "
277 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
278 si
->va_start_bb
->index
);
283 && (increment
= va_list_counter_bump (si
, ap
, var
, gpr_p
)) + 1 > 1)
285 if (gpr_p
&& cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
287 cfun
->va_list_gpr_size
+= increment
;
291 if (!gpr_p
&& cfun
->va_list_fpr_size
+ increment
< VA_LIST_MAX_FPR_SIZE
)
293 cfun
->va_list_fpr_size
+= increment
;
298 if (write_p
|| !si
->compute_sizes
)
301 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
303 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
308 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
309 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
310 is false, AP has been seen in VAR = AP assignment.
311 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
312 va_arg operation that doesn't cause the va_list variable to escape
316 va_list_counter_struct_op (struct stdarg_info
*si
, tree ap
, tree var
,
321 if (TREE_CODE (ap
) != COMPONENT_REF
322 || TREE_CODE (TREE_OPERAND (ap
, 1)) != FIELD_DECL
)
325 if (TREE_CODE (var
) != SSA_NAME
326 || bitmap_bit_p (si
->va_list_vars
, DECL_UID (SSA_NAME_VAR (var
))))
329 base
= get_base_address (ap
);
330 if (TREE_CODE (base
) != VAR_DECL
331 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (base
)))
334 if (TREE_OPERAND (ap
, 1) == va_list_gpr_counter_field
)
335 va_list_counter_op (si
, ap
, var
, true, write_p
);
336 else if (TREE_OPERAND (ap
, 1) == va_list_fpr_counter_field
)
337 va_list_counter_op (si
, ap
, var
, false, write_p
);
343 /* Check for TEM = AP. Return true if found and the caller shouldn't
344 search for va_list references in the statement. */
347 va_list_ptr_read (struct stdarg_info
*si
, tree ap
, tree tem
)
349 if (TREE_CODE (ap
) != VAR_DECL
350 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
)))
353 if (TREE_CODE (tem
) != SSA_NAME
354 || bitmap_bit_p (si
->va_list_vars
,
355 DECL_UID (SSA_NAME_VAR (tem
)))
356 || is_global_var (SSA_NAME_VAR (tem
)))
359 if (si
->compute_sizes
< 0)
361 si
->compute_sizes
= 0;
362 if (si
->va_start_count
== 1
363 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
364 si
->compute_sizes
= 1;
366 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
368 "bb%d will %sbe executed at most once for each va_start "
369 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
370 si
->va_start_bb
->index
);
373 /* For void * or char * va_list types, there is just one counter.
374 If va_arg is used in a loop, we don't know how many registers need
376 if (! si
->compute_sizes
)
379 if (va_list_counter_bump (si
, ap
, tem
, true) == (unsigned HOST_WIDE_INT
) -1)
382 /* Note the temporary, as we need to track whether it doesn't escape
383 the current function. */
384 bitmap_set_bit (si
->va_list_escape_vars
,
385 DECL_UID (SSA_NAME_VAR (tem
)));
394 sequence and update cfun->va_list_gpr_size. Return true if found. */
397 va_list_ptr_write (struct stdarg_info
*si
, tree ap
, tree tem2
)
399 unsigned HOST_WIDE_INT increment
;
401 if (TREE_CODE (ap
) != VAR_DECL
402 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
)))
405 if (TREE_CODE (tem2
) != SSA_NAME
406 || bitmap_bit_p (si
->va_list_vars
, DECL_UID (SSA_NAME_VAR (tem2
))))
409 if (si
->compute_sizes
<= 0)
412 increment
= va_list_counter_bump (si
, ap
, tem2
, true);
413 if (increment
+ 1 <= 1)
416 if (cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
417 cfun
->va_list_gpr_size
+= increment
;
419 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
425 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
426 containing value of some va_list variable plus optionally some constant,
427 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
428 depending whether LHS is a function local temporary. */
431 check_va_list_escapes (struct stdarg_info
*si
, tree lhs
, tree rhs
)
433 if (! POINTER_TYPE_P (TREE_TYPE (rhs
)))
436 if (TREE_CODE (rhs
) != SSA_NAME
437 || ! bitmap_bit_p (si
->va_list_escape_vars
,
438 DECL_UID (SSA_NAME_VAR (rhs
))))
441 if (TREE_CODE (lhs
) != SSA_NAME
|| is_global_var (SSA_NAME_VAR (lhs
)))
443 si
->va_list_escapes
= true;
447 if (si
->compute_sizes
< 0)
449 si
->compute_sizes
= 0;
450 if (si
->va_start_count
== 1
451 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
452 si
->compute_sizes
= 1;
454 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
456 "bb%d will %sbe executed at most once for each va_start "
457 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
458 si
->va_start_bb
->index
);
461 /* For void * or char * va_list types, there is just one counter.
462 If va_arg is used in a loop, we don't know how many registers need
464 if (! si
->compute_sizes
)
466 si
->va_list_escapes
= true;
470 if (va_list_counter_bump (si
, si
->va_start_ap
, lhs
, true)
471 == (unsigned HOST_WIDE_INT
) -1)
473 si
->va_list_escapes
= true;
477 bitmap_set_bit (si
->va_list_escape_vars
,
478 DECL_UID (SSA_NAME_VAR (lhs
)));
482 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
483 Return true if va_list might be escaping. */
486 check_all_va_list_escapes (struct stdarg_info
*si
)
492 gimple_stmt_iterator i
;
494 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
496 gimple stmt
= gsi_stmt (i
);
500 if (is_gimple_debug (stmt
))
503 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_ALL_USES
)
505 if (! bitmap_bit_p (si
->va_list_escape_vars
,
506 DECL_UID (SSA_NAME_VAR (use
))))
509 if (is_gimple_assign (stmt
))
511 tree rhs
= gimple_assign_rhs1 (stmt
);
512 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
515 if (gimple_assign_rhs_code (stmt
) == MEM_REF
516 && TREE_OPERAND (rhs
, 0) == use
517 && TYPE_SIZE_UNIT (TREE_TYPE (rhs
))
518 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs
)), 1)
519 && si
->offsets
[SSA_NAME_VERSION (use
)] != -1)
521 unsigned HOST_WIDE_INT gpr_size
;
522 tree access_size
= TYPE_SIZE_UNIT (TREE_TYPE (rhs
));
524 gpr_size
= si
->offsets
[SSA_NAME_VERSION (use
)]
525 + tree_low_cst (TREE_OPERAND (rhs
, 1), 0)
526 + tree_low_cst (access_size
, 1);
527 if (gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
528 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
529 else if (gpr_size
> cfun
->va_list_gpr_size
)
530 cfun
->va_list_gpr_size
= gpr_size
;
534 /* va_arg sequences may contain
535 other_ap_temp = ap_temp;
536 other_ap_temp = ap_temp + constant;
537 other_ap_temp = (some_type *) ap_temp;
541 && ((rhs_code
== POINTER_PLUS_EXPR
542 && (TREE_CODE (gimple_assign_rhs2 (stmt
))
544 || gimple_assign_cast_p (stmt
)
545 || (get_gimple_rhs_class (rhs_code
)
546 == GIMPLE_SINGLE_RHS
)))
548 tree lhs
= gimple_assign_lhs (stmt
);
550 if (TREE_CODE (lhs
) == SSA_NAME
551 && bitmap_bit_p (si
->va_list_escape_vars
,
552 DECL_UID (SSA_NAME_VAR (lhs
))))
555 if (TREE_CODE (lhs
) == VAR_DECL
556 && bitmap_bit_p (si
->va_list_vars
,
562 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
564 fputs ("va_list escapes in ", dump_file
);
565 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
566 fputc ('\n', dump_file
);
577 /* Return true if this optimization pass should be done.
578 It makes only sense for stdarg functions. */
581 gate_optimize_stdarg (void)
583 /* This optimization is only for stdarg functions. */
584 return cfun
->stdarg
!= 0;
588 /* Entry point to the stdarg optimization pass. */
591 execute_optimize_stdarg (void)
594 bool va_list_escapes
= false;
595 bool va_list_simple_ptr
;
596 struct stdarg_info si
;
597 struct walk_stmt_info wi
;
598 const char *funcname
= NULL
;
601 cfun
->va_list_gpr_size
= 0;
602 cfun
->va_list_fpr_size
= 0;
603 memset (&si
, 0, sizeof (si
));
604 si
.va_list_vars
= BITMAP_ALLOC (NULL
);
605 si
.va_list_escape_vars
= BITMAP_ALLOC (NULL
);
608 funcname
= lang_hooks
.decl_printable_name (current_function_decl
, 2);
610 cfun_va_list
= targetm
.fn_abi_va_list (cfun
->decl
);
611 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
612 && (TREE_TYPE (cfun_va_list
) == void_type_node
613 || TREE_TYPE (cfun_va_list
) == char_type_node
);
614 gcc_assert (is_gimple_reg_type (cfun_va_list
) == va_list_simple_ptr
);
618 gimple_stmt_iterator i
;
620 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
622 gimple stmt
= gsi_stmt (i
);
625 if (!is_gimple_call (stmt
))
628 callee
= gimple_call_fndecl (stmt
);
630 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
633 switch (DECL_FUNCTION_CODE (callee
))
635 case BUILT_IN_VA_START
:
637 /* If old style builtins are used, don't optimize anything. */
638 case BUILT_IN_SAVEREGS
:
639 case BUILT_IN_ARGS_INFO
:
640 case BUILT_IN_NEXT_ARG
:
641 va_list_escapes
= true;
648 ap
= gimple_call_arg (stmt
, 0);
650 if (TREE_CODE (ap
) != ADDR_EXPR
)
652 va_list_escapes
= true;
655 ap
= TREE_OPERAND (ap
, 0);
656 if (TREE_CODE (ap
) == ARRAY_REF
)
658 if (! integer_zerop (TREE_OPERAND (ap
, 1)))
660 va_list_escapes
= true;
663 ap
= TREE_OPERAND (ap
, 0);
665 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap
))
666 != TYPE_MAIN_VARIANT (targetm
.fn_abi_va_list (cfun
->decl
))
667 || TREE_CODE (ap
) != VAR_DECL
)
669 va_list_escapes
= true;
673 if (is_global_var (ap
))
675 va_list_escapes
= true;
679 bitmap_set_bit (si
.va_list_vars
, DECL_UID (ap
));
681 /* VA_START_BB and VA_START_AP will be only used if there is just
682 one va_start in the function. */
691 /* If there were no va_start uses in the function, there is no need to
693 if (si
.va_start_count
== 0)
696 /* If some va_list arguments weren't local, we can't optimize. */
700 /* For void * or char * va_list, something useful can be done only
701 if there is just one va_start. */
702 if (va_list_simple_ptr
&& si
.va_start_count
> 1)
704 va_list_escapes
= true;
708 /* For struct * va_list, if the backend didn't tell us what the counter fields
709 are, there is nothing more we can do. */
710 if (!va_list_simple_ptr
711 && va_list_gpr_counter_field
== NULL_TREE
712 && va_list_fpr_counter_field
== NULL_TREE
)
714 va_list_escapes
= true;
718 /* For void * or char * va_list there is just one counter
719 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
720 if (va_list_simple_ptr
)
721 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
723 calculate_dominance_info (CDI_DOMINATORS
);
724 memset (&wi
, 0, sizeof (wi
));
725 wi
.info
= si
.va_list_vars
;
729 gimple_stmt_iterator i
;
731 si
.compute_sizes
= -1;
734 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
735 them as assignments for the purpose of escape analysis. This is
736 not needed for non-simple va_list because virtual phis don't perform
737 any real data movement. */
738 if (va_list_simple_ptr
)
744 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
746 gimple phi
= gsi_stmt (i
);
747 lhs
= PHI_RESULT (phi
);
749 if (!is_gimple_reg (lhs
))
752 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
754 rhs
= USE_FROM_PTR (uop
);
755 if (va_list_ptr_read (&si
, rhs
, lhs
))
757 else if (va_list_ptr_write (&si
, lhs
, rhs
))
760 check_va_list_escapes (&si
, lhs
, rhs
);
762 if (si
.va_list_escapes
)
764 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
766 fputs ("va_list escapes in ", dump_file
);
767 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
768 fputc ('\n', dump_file
);
770 va_list_escapes
= true;
776 for (i
= gsi_start_bb (bb
);
777 !gsi_end_p (i
) && !va_list_escapes
;
780 gimple stmt
= gsi_stmt (i
);
782 /* Don't look at __builtin_va_{start,end}, they are ok. */
783 if (is_gimple_call (stmt
))
785 tree callee
= gimple_call_fndecl (stmt
);
788 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
789 && (DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_START
790 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_END
))
794 if (is_gimple_assign (stmt
))
796 tree lhs
= gimple_assign_lhs (stmt
);
797 tree rhs
= gimple_assign_rhs1 (stmt
);
799 if (va_list_simple_ptr
)
801 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
802 == GIMPLE_SINGLE_RHS
)
804 /* Check for tem = ap. */
805 if (va_list_ptr_read (&si
, rhs
, lhs
))
808 /* Check for the last insn in:
813 else if (va_list_ptr_write (&si
, lhs
, rhs
))
817 if ((gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
818 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == INTEGER_CST
)
819 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
820 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
821 == GIMPLE_SINGLE_RHS
))
822 check_va_list_escapes (&si
, lhs
, rhs
);
826 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
827 == GIMPLE_SINGLE_RHS
)
829 /* Check for ap[0].field = temp. */
830 if (va_list_counter_struct_op (&si
, lhs
, rhs
, true))
833 /* Check for temp = ap[0].field. */
834 else if (va_list_counter_struct_op (&si
, rhs
, lhs
,
839 /* Do any architecture specific checking. */
840 if (targetm
.stdarg_optimize_hook
841 && targetm
.stdarg_optimize_hook (&si
, stmt
))
845 else if (is_gimple_debug (stmt
))
848 /* All other uses of va_list are either va_copy (that is not handled
849 in this optimization), taking address of va_list variable or
850 passing va_list to other functions (in that case va_list might
851 escape the function and therefore va_start needs to set it up
852 fully), or some unexpected use of va_list. None of these should
853 happen in a gimplified VA_ARG_EXPR. */
854 if (si
.va_list_escapes
855 || walk_gimple_op (stmt
, find_va_list_reference
, &wi
))
857 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
859 fputs ("va_list escapes in ", dump_file
);
860 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
861 fputc ('\n', dump_file
);
863 va_list_escapes
= true;
871 if (! va_list_escapes
872 && va_list_simple_ptr
873 && ! bitmap_empty_p (si
.va_list_escape_vars
)
874 && check_all_va_list_escapes (&si
))
875 va_list_escapes
= true;
880 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
881 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
883 BITMAP_FREE (si
.va_list_vars
);
884 BITMAP_FREE (si
.va_list_escape_vars
);
888 fprintf (dump_file
, "%s: va_list escapes %d, needs to save ",
889 funcname
, (int) va_list_escapes
);
890 if (cfun
->va_list_gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
891 fputs ("all", dump_file
);
893 fprintf (dump_file
, "%d", cfun
->va_list_gpr_size
);
894 fputs (" GPR units and ", dump_file
);
895 if (cfun
->va_list_fpr_size
>= VA_LIST_MAX_FPR_SIZE
)
896 fputs ("all", dump_file
);
898 fprintf (dump_file
, "%d", cfun
->va_list_fpr_size
);
899 fputs (" FPR units.\n", dump_file
);
905 struct gimple_opt_pass pass_stdarg
=
910 gate_optimize_stdarg
, /* gate */
911 execute_optimize_stdarg
, /* execute */
914 0, /* static_pass_number */
916 PROP_cfg
| PROP_ssa
, /* properties_required */
917 0, /* properties_provided */
918 0, /* properties_destroyed */
919 0, /* todo_flags_start */
920 TODO_dump_func
/* todo_flags_finish */