1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
24 #include "coretypes.h"
28 #include "langhooks.h"
29 #include "diagnostic.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "tree-stdarg.h"
35 /* A simple pass that attempts to optimize stdarg functions on architectures
36 that need to save register arguments to stack on entry to stdarg functions.
37 If the function doesn't use any va_start macros, no registers need to
38 be saved. If va_start macros are used, the va_list variables don't escape
39 the function, it is only necessary to save registers that will be used
40 in va_arg macros. E.g. if va_arg is only used with integral types
41 in the function, floating point registers don't need to be saved, etc. */
44 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
45 is executed at most as many times as VA_START_BB. */
48 reachable_at_most_once (basic_block va_arg_bb
, basic_block va_start_bb
)
56 if (va_arg_bb
== va_start_bb
)
59 if (! dominated_by_p (CDI_DOMINATORS
, va_arg_bb
, va_start_bb
))
62 stack
= XNEWVEC (edge
, n_basic_blocks
+ 1);
65 visited
= sbitmap_alloc (last_basic_block
);
66 sbitmap_zero (visited
);
69 FOR_EACH_EDGE (e
, ei
, va_arg_bb
->preds
)
80 if (e
->flags
& EDGE_COMPLEX
)
86 if (src
== va_start_bb
)
89 /* va_arg_bb can be executed more times than va_start_bb. */
96 gcc_assert (src
!= ENTRY_BLOCK_PTR
);
98 if (! TEST_BIT (visited
, src
->index
))
100 SET_BIT (visited
, src
->index
);
101 FOR_EACH_EDGE (e
, ei
, src
->preds
)
107 sbitmap_free (visited
);
112 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
113 return constant, otherwise return (unsigned HOST_WIDE_INT) -1.
114 GPR_P is true if this is GPR counter. */
116 static unsigned HOST_WIDE_INT
117 va_list_counter_bump (struct stdarg_info
*si
, tree counter
, tree rhs
,
120 tree stmt
, lhs
, orig_lhs
;
121 unsigned HOST_WIDE_INT ret
= 0, val
, counter_val
;
122 unsigned int max_size
;
124 if (si
->offsets
== NULL
)
128 si
->offsets
= XNEWVEC (int, num_ssa_names
);
129 for (i
= 0; i
< num_ssa_names
; ++i
)
133 counter_val
= gpr_p
? cfun
->va_list_gpr_size
: cfun
->va_list_fpr_size
;
134 max_size
= gpr_p
? VA_LIST_MAX_GPR_SIZE
: VA_LIST_MAX_FPR_SIZE
;
135 orig_lhs
= lhs
= rhs
;
138 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
140 if (counter_val
>= max_size
)
146 ret
-= counter_val
- si
->offsets
[SSA_NAME_VERSION (lhs
)];
150 stmt
= SSA_NAME_DEF_STMT (lhs
);
152 if (TREE_CODE (stmt
) != GIMPLE_MODIFY_STMT
153 || GIMPLE_STMT_OPERAND (stmt
, 0) != lhs
)
154 return (unsigned HOST_WIDE_INT
) -1;
156 rhs
= GIMPLE_STMT_OPERAND (stmt
, 1);
157 if (TREE_CODE (rhs
) == WITH_SIZE_EXPR
)
158 rhs
= TREE_OPERAND (rhs
, 0);
160 if (TREE_CODE (rhs
) == SSA_NAME
)
166 if ((TREE_CODE (rhs
) == NOP_EXPR
167 || TREE_CODE (rhs
) == CONVERT_EXPR
)
168 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
170 lhs
= TREE_OPERAND (rhs
, 0);
174 if (TREE_CODE (rhs
) == PLUS_EXPR
175 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
176 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == INTEGER_CST
177 && host_integerp (TREE_OPERAND (rhs
, 1), 1))
179 ret
+= tree_low_cst (TREE_OPERAND (rhs
, 1), 1);
180 lhs
= TREE_OPERAND (rhs
, 0);
184 if (TREE_CODE (counter
) != TREE_CODE (rhs
))
185 return (unsigned HOST_WIDE_INT
) -1;
187 if (TREE_CODE (counter
) == COMPONENT_REF
)
189 if (get_base_address (counter
) != get_base_address (rhs
)
190 || TREE_CODE (TREE_OPERAND (rhs
, 1)) != FIELD_DECL
191 || TREE_OPERAND (counter
, 1) != TREE_OPERAND (rhs
, 1))
192 return (unsigned HOST_WIDE_INT
) -1;
194 else if (counter
!= rhs
)
195 return (unsigned HOST_WIDE_INT
) -1;
201 val
= ret
+ counter_val
;
204 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
208 si
->offsets
[SSA_NAME_VERSION (lhs
)] = max_size
;
210 si
->offsets
[SSA_NAME_VERSION (lhs
)] = val
;
212 stmt
= SSA_NAME_DEF_STMT (lhs
);
214 rhs
= GIMPLE_STMT_OPERAND (stmt
, 1);
215 if (TREE_CODE (rhs
) == WITH_SIZE_EXPR
)
216 rhs
= TREE_OPERAND (rhs
, 0);
218 if (TREE_CODE (rhs
) == SSA_NAME
)
224 if ((TREE_CODE (rhs
) == NOP_EXPR
225 || TREE_CODE (rhs
) == CONVERT_EXPR
)
226 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
228 lhs
= TREE_OPERAND (rhs
, 0);
232 if (TREE_CODE (rhs
) == PLUS_EXPR
233 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
234 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == INTEGER_CST
235 && host_integerp (TREE_OPERAND (rhs
, 1), 1))
237 val
-= tree_low_cst (TREE_OPERAND (rhs
, 1), 1);
238 lhs
= TREE_OPERAND (rhs
, 0);
249 /* Called by walk_tree to look for references to va_list variables. */
252 find_va_list_reference (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
255 bitmap va_list_vars
= (bitmap
) data
;
258 if (TREE_CODE (var
) == SSA_NAME
)
259 var
= SSA_NAME_VAR (var
);
261 if (TREE_CODE (var
) == VAR_DECL
262 && bitmap_bit_p (va_list_vars
, DECL_UID (var
)))
269 /* Helper function of va_list_counter_struct_op. Compute
270 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
271 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
272 statement. GPR_P is true if AP is a GPR counter, false if it is
276 va_list_counter_op (struct stdarg_info
*si
, tree ap
, tree var
, bool gpr_p
,
279 unsigned HOST_WIDE_INT increment
;
281 if (si
->compute_sizes
< 0)
283 si
->compute_sizes
= 0;
284 if (si
->va_start_count
== 1
285 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
286 si
->compute_sizes
= 1;
288 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
290 "bb%d will %sbe executed at most once for each va_start "
291 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
292 si
->va_start_bb
->index
);
297 && (increment
= va_list_counter_bump (si
, ap
, var
, gpr_p
)) + 1 > 1)
299 if (gpr_p
&& cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
301 cfun
->va_list_gpr_size
+= increment
;
305 if (!gpr_p
&& cfun
->va_list_fpr_size
+ increment
< VA_LIST_MAX_FPR_SIZE
)
307 cfun
->va_list_fpr_size
+= increment
;
312 if (write_p
|| !si
->compute_sizes
)
315 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
317 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
322 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
323 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
324 is false, AP has been seen in VAR = AP assignment.
325 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
326 va_arg operation that doesn't cause the va_list variable to escape
330 va_list_counter_struct_op (struct stdarg_info
*si
, tree ap
, tree var
,
335 if (TREE_CODE (ap
) != COMPONENT_REF
336 || TREE_CODE (TREE_OPERAND (ap
, 1)) != FIELD_DECL
)
339 if (TREE_CODE (var
) != SSA_NAME
340 || bitmap_bit_p (si
->va_list_vars
, DECL_UID (SSA_NAME_VAR (var
))))
343 base
= get_base_address (ap
);
344 if (TREE_CODE (base
) != VAR_DECL
345 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (base
)))
348 if (TREE_OPERAND (ap
, 1) == va_list_gpr_counter_field
)
349 va_list_counter_op (si
, ap
, var
, true, write_p
);
350 else if (TREE_OPERAND (ap
, 1) == va_list_fpr_counter_field
)
351 va_list_counter_op (si
, ap
, var
, false, write_p
);
357 /* Check for TEM = AP. Return true if found and the caller shouldn't
358 search for va_list references in the statement. */
361 va_list_ptr_read (struct stdarg_info
*si
, tree ap
, tree tem
)
363 if (TREE_CODE (ap
) != VAR_DECL
364 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
)))
367 if (TREE_CODE (tem
) != SSA_NAME
368 || bitmap_bit_p (si
->va_list_vars
,
369 DECL_UID (SSA_NAME_VAR (tem
)))
370 || is_global_var (SSA_NAME_VAR (tem
)))
373 if (si
->compute_sizes
< 0)
375 si
->compute_sizes
= 0;
376 if (si
->va_start_count
== 1
377 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
378 si
->compute_sizes
= 1;
380 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
382 "bb%d will %sbe executed at most once for each va_start "
383 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
384 si
->va_start_bb
->index
);
387 /* For void * or char * va_list types, there is just one counter.
388 If va_arg is used in a loop, we don't know how many registers need
390 if (! si
->compute_sizes
)
393 if (va_list_counter_bump (si
, ap
, tem
, true) == (unsigned HOST_WIDE_INT
) -1)
396 /* Note the temporary, as we need to track whether it doesn't escape
397 the current function. */
398 bitmap_set_bit (si
->va_list_escape_vars
,
399 DECL_UID (SSA_NAME_VAR (tem
)));
408 sequence and update cfun->va_list_gpr_size. Return true if found. */
411 va_list_ptr_write (struct stdarg_info
*si
, tree ap
, tree tem2
)
413 unsigned HOST_WIDE_INT increment
;
415 if (TREE_CODE (ap
) != VAR_DECL
416 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
)))
419 if (TREE_CODE (tem2
) != SSA_NAME
420 || bitmap_bit_p (si
->va_list_vars
, DECL_UID (SSA_NAME_VAR (tem2
))))
423 if (si
->compute_sizes
<= 0)
426 increment
= va_list_counter_bump (si
, ap
, tem2
, true);
427 if (increment
+ 1 <= 1)
430 if (cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
431 cfun
->va_list_gpr_size
+= increment
;
433 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
439 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
440 containing value of some va_list variable plus optionally some constant,
441 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
442 depending whether LHS is a function local temporary. */
445 check_va_list_escapes (struct stdarg_info
*si
, tree lhs
, tree rhs
)
447 if (! POINTER_TYPE_P (TREE_TYPE (rhs
)))
450 if ((TREE_CODE (rhs
) == PLUS_EXPR
451 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == INTEGER_CST
)
452 || TREE_CODE (rhs
) == NOP_EXPR
453 || TREE_CODE (rhs
) == CONVERT_EXPR
)
454 rhs
= TREE_OPERAND (rhs
, 0);
456 if (TREE_CODE (rhs
) != SSA_NAME
457 || ! bitmap_bit_p (si
->va_list_escape_vars
,
458 DECL_UID (SSA_NAME_VAR (rhs
))))
461 if (TREE_CODE (lhs
) != SSA_NAME
|| is_global_var (SSA_NAME_VAR (lhs
)))
463 si
->va_list_escapes
= true;
467 if (si
->compute_sizes
< 0)
469 si
->compute_sizes
= 0;
470 if (si
->va_start_count
== 1
471 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
472 si
->compute_sizes
= 1;
474 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
476 "bb%d will %sbe executed at most once for each va_start "
477 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
478 si
->va_start_bb
->index
);
481 /* For void * or char * va_list types, there is just one counter.
482 If va_arg is used in a loop, we don't know how many registers need
484 if (! si
->compute_sizes
)
486 si
->va_list_escapes
= true;
490 if (va_list_counter_bump (si
, si
->va_start_ap
, lhs
, true)
491 == (unsigned HOST_WIDE_INT
) -1)
493 si
->va_list_escapes
= true;
497 bitmap_set_bit (si
->va_list_escape_vars
,
498 DECL_UID (SSA_NAME_VAR (lhs
)));
502 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
503 Return true if va_list might be escaping. */
506 check_all_va_list_escapes (struct stdarg_info
*si
)
512 block_stmt_iterator i
;
514 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
516 tree stmt
= bsi_stmt (i
), use
;
519 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_ALL_USES
)
521 if (! bitmap_bit_p (si
->va_list_escape_vars
,
522 DECL_UID (SSA_NAME_VAR (use
))))
525 if (TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
)
527 tree lhs
= GIMPLE_STMT_OPERAND (stmt
, 0);
528 tree rhs
= GIMPLE_STMT_OPERAND (stmt
, 1);
530 if (TREE_CODE (rhs
) == WITH_SIZE_EXPR
)
531 rhs
= TREE_OPERAND (rhs
, 0);
534 if (TREE_CODE (rhs
) == INDIRECT_REF
535 && TREE_OPERAND (rhs
, 0) == use
536 && TYPE_SIZE_UNIT (TREE_TYPE (rhs
))
537 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs
)), 1)
538 && si
->offsets
[SSA_NAME_VERSION (use
)] != -1)
540 unsigned HOST_WIDE_INT gpr_size
;
541 tree access_size
= TYPE_SIZE_UNIT (TREE_TYPE (rhs
));
543 gpr_size
= si
->offsets
[SSA_NAME_VERSION (use
)]
544 + tree_low_cst (access_size
, 1);
545 if (gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
546 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
547 else if (gpr_size
> cfun
->va_list_gpr_size
)
548 cfun
->va_list_gpr_size
= gpr_size
;
552 /* va_arg sequences may contain
553 other_ap_temp = ap_temp;
554 other_ap_temp = ap_temp + constant;
555 other_ap_temp = (some_type *) ap_temp;
558 if ((TREE_CODE (rhs
) == PLUS_EXPR
559 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == INTEGER_CST
)
560 || TREE_CODE (rhs
) == NOP_EXPR
561 || TREE_CODE (rhs
) == CONVERT_EXPR
)
562 rhs
= TREE_OPERAND (rhs
, 0);
566 if (TREE_CODE (lhs
) == SSA_NAME
567 && bitmap_bit_p (si
->va_list_escape_vars
,
568 DECL_UID (SSA_NAME_VAR (lhs
))))
571 if (TREE_CODE (lhs
) == VAR_DECL
572 && bitmap_bit_p (si
->va_list_vars
,
578 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
580 fputs ("va_list escapes in ", dump_file
);
581 print_generic_expr (dump_file
, stmt
, dump_flags
);
582 fputc ('\n', dump_file
);
593 /* Return true if this optimization pass should be done.
594 It makes only sense for stdarg functions. */
597 gate_optimize_stdarg (void)
599 /* This optimization is only for stdarg functions. */
600 return current_function_stdarg
!= 0;
604 /* Entry point to the stdarg optimization pass. */
607 execute_optimize_stdarg (void)
610 bool va_list_escapes
= false;
611 bool va_list_simple_ptr
;
612 struct stdarg_info si
;
613 const char *funcname
= NULL
;
615 cfun
->va_list_gpr_size
= 0;
616 cfun
->va_list_fpr_size
= 0;
617 memset (&si
, 0, sizeof (si
));
618 si
.va_list_vars
= BITMAP_ALLOC (NULL
);
619 si
.va_list_escape_vars
= BITMAP_ALLOC (NULL
);
622 funcname
= lang_hooks
.decl_printable_name (current_function_decl
, 2);
624 va_list_simple_ptr
= POINTER_TYPE_P (va_list_type_node
)
625 && (TREE_TYPE (va_list_type_node
) == void_type_node
626 || TREE_TYPE (va_list_type_node
) == char_type_node
);
627 gcc_assert (is_gimple_reg_type (va_list_type_node
) == va_list_simple_ptr
);
631 block_stmt_iterator i
;
633 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
635 tree stmt
= bsi_stmt (i
);
636 tree call
= get_call_expr_in (stmt
), callee
;
642 callee
= get_callee_fndecl (call
);
644 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
647 switch (DECL_FUNCTION_CODE (callee
))
649 case BUILT_IN_VA_START
:
651 /* If old style builtins are used, don't optimize anything. */
652 case BUILT_IN_SAVEREGS
:
653 case BUILT_IN_STDARG_START
:
654 case BUILT_IN_ARGS_INFO
:
655 case BUILT_IN_NEXT_ARG
:
656 va_list_escapes
= true;
663 ap
= CALL_EXPR_ARG (call
, 0);
665 if (TREE_CODE (ap
) != ADDR_EXPR
)
667 va_list_escapes
= true;
670 ap
= TREE_OPERAND (ap
, 0);
671 if (TREE_CODE (ap
) == ARRAY_REF
)
673 if (! integer_zerop (TREE_OPERAND (ap
, 1)))
675 va_list_escapes
= true;
678 ap
= TREE_OPERAND (ap
, 0);
680 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap
))
681 != TYPE_MAIN_VARIANT (va_list_type_node
)
682 || TREE_CODE (ap
) != VAR_DECL
)
684 va_list_escapes
= true;
688 if (is_global_var (ap
))
690 va_list_escapes
= true;
694 bitmap_set_bit (si
.va_list_vars
, DECL_UID (ap
));
696 /* VA_START_BB and VA_START_AP will be only used if there is just
697 one va_start in the function. */
706 /* If there were no va_start uses in the function, there is no need to
708 if (si
.va_start_count
== 0)
711 /* If some va_list arguments weren't local, we can't optimize. */
715 /* For void * or char * va_list, something useful can be done only
716 if there is just one va_start. */
717 if (va_list_simple_ptr
&& si
.va_start_count
> 1)
719 va_list_escapes
= true;
723 /* For struct * va_list, if the backend didn't tell us what the counter fields
724 are, there is nothing more we can do. */
725 if (!va_list_simple_ptr
726 && va_list_gpr_counter_field
== NULL_TREE
727 && va_list_fpr_counter_field
== NULL_TREE
)
729 va_list_escapes
= true;
733 /* For void * or char * va_list there is just one counter
734 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
735 if (va_list_simple_ptr
)
736 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
738 calculate_dominance_info (CDI_DOMINATORS
);
742 block_stmt_iterator i
;
744 si
.compute_sizes
= -1;
747 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
748 them as assignments for the purpose of escape analysis. This is
749 not needed for non-simple va_list because virtual phis don't perform
750 any real data movement. */
751 if (va_list_simple_ptr
)
757 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
759 lhs
= PHI_RESULT (phi
);
761 if (!is_gimple_reg (lhs
))
764 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
766 rhs
= USE_FROM_PTR (uop
);
767 if (va_list_ptr_read (&si
, rhs
, lhs
))
769 else if (va_list_ptr_write (&si
, lhs
, rhs
))
772 check_va_list_escapes (&si
, lhs
, rhs
);
774 if (si
.va_list_escapes
775 || walk_tree (&phi
, find_va_list_reference
,
776 si
.va_list_vars
, NULL
))
778 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
780 fputs ("va_list escapes in ", dump_file
);
781 print_generic_expr (dump_file
, phi
, dump_flags
);
782 fputc ('\n', dump_file
);
784 va_list_escapes
= true;
790 for (i
= bsi_start (bb
);
791 !bsi_end_p (i
) && !va_list_escapes
;
794 tree stmt
= bsi_stmt (i
);
797 /* Don't look at __builtin_va_{start,end}, they are ok. */
798 call
= get_call_expr_in (stmt
);
801 tree callee
= get_callee_fndecl (call
);
804 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
805 && (DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_START
806 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_END
))
810 if (TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
)
812 tree lhs
= GIMPLE_STMT_OPERAND (stmt
, 0);
813 tree rhs
= GIMPLE_STMT_OPERAND (stmt
, 1);
815 if (TREE_CODE (rhs
) == WITH_SIZE_EXPR
)
816 rhs
= TREE_OPERAND (rhs
, 0);
818 if (va_list_simple_ptr
)
820 /* Check for tem = ap. */
821 if (va_list_ptr_read (&si
, rhs
, lhs
))
824 /* Check for the last insn in:
829 else if (va_list_ptr_write (&si
, lhs
, rhs
))
833 check_va_list_escapes (&si
, lhs
, rhs
);
837 /* Check for ap[0].field = temp. */
838 if (va_list_counter_struct_op (&si
, lhs
, rhs
, true))
841 /* Check for temp = ap[0].field. */
842 else if (va_list_counter_struct_op (&si
, rhs
, lhs
, false))
845 /* Do any architecture specific checking. */
846 else if (targetm
.stdarg_optimize_hook
847 && targetm
.stdarg_optimize_hook (&si
, lhs
, rhs
))
852 /* All other uses of va_list are either va_copy (that is not handled
853 in this optimization), taking address of va_list variable or
854 passing va_list to other functions (in that case va_list might
855 escape the function and therefore va_start needs to set it up
856 fully), or some unexpected use of va_list. None of these should
857 happen in a gimplified VA_ARG_EXPR. */
858 if (si
.va_list_escapes
859 || walk_tree (&stmt
, find_va_list_reference
,
860 si
.va_list_vars
, NULL
))
862 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
864 fputs ("va_list escapes in ", dump_file
);
865 print_generic_expr (dump_file
, stmt
, dump_flags
);
866 fputc ('\n', dump_file
);
868 va_list_escapes
= true;
876 if (! va_list_escapes
877 && va_list_simple_ptr
878 && ! bitmap_empty_p (si
.va_list_escape_vars
)
879 && check_all_va_list_escapes (&si
))
880 va_list_escapes
= true;
885 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
886 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
888 BITMAP_FREE (si
.va_list_vars
);
889 BITMAP_FREE (si
.va_list_escape_vars
);
893 fprintf (dump_file
, "%s: va_list escapes %d, needs to save ",
894 funcname
, (int) va_list_escapes
);
895 if (cfun
->va_list_gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
896 fputs ("all", dump_file
);
898 fprintf (dump_file
, "%d", cfun
->va_list_gpr_size
);
899 fputs (" GPR units and ", dump_file
);
900 if (cfun
->va_list_fpr_size
>= VA_LIST_MAX_FPR_SIZE
)
901 fputs ("all", dump_file
);
903 fprintf (dump_file
, "%d", cfun
->va_list_fpr_size
);
904 fputs (" FPR units.\n", dump_file
);
910 struct tree_opt_pass pass_stdarg
=
913 gate_optimize_stdarg
, /* gate */
914 execute_optimize_stdarg
, /* execute */
917 0, /* static_pass_number */
919 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
920 0, /* properties_provided */
921 0, /* properties_destroyed */
922 0, /* todo_flags_start */
923 TODO_dump_func
, /* todo_flags_finish */