1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004, 2005, 2007 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "langhooks.h"
28 #include "diagnostic.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-stdarg.h"
34 /* A simple pass that attempts to optimize stdarg functions on architectures
35 that need to save register arguments to stack on entry to stdarg functions.
36 If the function doesn't use any va_start macros, no registers need to
37 be saved. If va_start macros are used, the va_list variables don't escape
38 the function, it is only necessary to save registers that will be used
39 in va_arg macros. E.g. if va_arg is only used with integral types
40 in the function, floating point registers don't need to be saved, etc. */
43 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
44 is executed at most as many times as VA_START_BB. */
47 reachable_at_most_once (basic_block va_arg_bb
, basic_block va_start_bb
)
55 if (va_arg_bb
== va_start_bb
)
58 if (! dominated_by_p (CDI_DOMINATORS
, va_arg_bb
, va_start_bb
))
61 stack
= XNEWVEC (edge
, n_basic_blocks
+ 1);
64 visited
= sbitmap_alloc (last_basic_block
);
65 sbitmap_zero (visited
);
68 FOR_EACH_EDGE (e
, ei
, va_arg_bb
->preds
)
79 if (e
->flags
& EDGE_COMPLEX
)
85 if (src
== va_start_bb
)
88 /* va_arg_bb can be executed more times than va_start_bb. */
95 gcc_assert (src
!= ENTRY_BLOCK_PTR
);
97 if (! TEST_BIT (visited
, src
->index
))
99 SET_BIT (visited
, src
->index
);
100 FOR_EACH_EDGE (e
, ei
, src
->preds
)
106 sbitmap_free (visited
);
111 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
112 return constant, otherwise return (unsigned HOST_WIDE_INT) -1.
113 GPR_P is true if this is GPR counter. */
115 static unsigned HOST_WIDE_INT
116 va_list_counter_bump (struct stdarg_info
*si
, tree counter
, tree rhs
,
119 tree stmt
, lhs
, orig_lhs
;
120 unsigned HOST_WIDE_INT ret
= 0, val
, counter_val
;
121 unsigned int max_size
;
123 if (si
->offsets
== NULL
)
127 si
->offsets
= XNEWVEC (int, num_ssa_names
);
128 for (i
= 0; i
< num_ssa_names
; ++i
)
132 counter_val
= gpr_p
? cfun
->va_list_gpr_size
: cfun
->va_list_fpr_size
;
133 max_size
= gpr_p
? VA_LIST_MAX_GPR_SIZE
: VA_LIST_MAX_FPR_SIZE
;
134 orig_lhs
= lhs
= rhs
;
137 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
139 if (counter_val
>= max_size
)
145 ret
-= counter_val
- si
->offsets
[SSA_NAME_VERSION (lhs
)];
149 stmt
= SSA_NAME_DEF_STMT (lhs
);
151 if (TREE_CODE (stmt
) != MODIFY_EXPR
152 || TREE_OPERAND (stmt
, 0) != lhs
)
153 return (unsigned HOST_WIDE_INT
) -1;
155 rhs
= TREE_OPERAND (stmt
, 1);
156 if (TREE_CODE (rhs
) == WITH_SIZE_EXPR
)
157 rhs
= TREE_OPERAND (rhs
, 0);
159 if (TREE_CODE (rhs
) == SSA_NAME
)
165 if ((TREE_CODE (rhs
) == NOP_EXPR
166 || TREE_CODE (rhs
) == CONVERT_EXPR
)
167 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
169 lhs
= TREE_OPERAND (rhs
, 0);
173 if (TREE_CODE (rhs
) == PLUS_EXPR
174 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
175 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == INTEGER_CST
176 && host_integerp (TREE_OPERAND (rhs
, 1), 1))
178 ret
+= tree_low_cst (TREE_OPERAND (rhs
, 1), 1);
179 lhs
= TREE_OPERAND (rhs
, 0);
183 if (TREE_CODE (counter
) != TREE_CODE (rhs
))
184 return (unsigned HOST_WIDE_INT
) -1;
186 if (TREE_CODE (counter
) == COMPONENT_REF
)
188 if (get_base_address (counter
) != get_base_address (rhs
)
189 || TREE_CODE (TREE_OPERAND (rhs
, 1)) != FIELD_DECL
190 || TREE_OPERAND (counter
, 1) != TREE_OPERAND (rhs
, 1))
191 return (unsigned HOST_WIDE_INT
) -1;
193 else if (counter
!= rhs
)
194 return (unsigned HOST_WIDE_INT
) -1;
200 val
= ret
+ counter_val
;
203 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
207 si
->offsets
[SSA_NAME_VERSION (lhs
)] = max_size
;
209 si
->offsets
[SSA_NAME_VERSION (lhs
)] = val
;
211 stmt
= SSA_NAME_DEF_STMT (lhs
);
213 rhs
= TREE_OPERAND (stmt
, 1);
214 if (TREE_CODE (rhs
) == WITH_SIZE_EXPR
)
215 rhs
= TREE_OPERAND (rhs
, 0);
217 if (TREE_CODE (rhs
) == SSA_NAME
)
223 if ((TREE_CODE (rhs
) == NOP_EXPR
224 || TREE_CODE (rhs
) == CONVERT_EXPR
)
225 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
227 lhs
= TREE_OPERAND (rhs
, 0);
231 if (TREE_CODE (rhs
) == PLUS_EXPR
232 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
233 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == INTEGER_CST
234 && host_integerp (TREE_OPERAND (rhs
, 1), 1))
236 val
-= tree_low_cst (TREE_OPERAND (rhs
, 1), 1);
237 lhs
= TREE_OPERAND (rhs
, 0);
248 /* Called by walk_tree to look for references to va_list variables. */
251 find_va_list_reference (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
254 bitmap va_list_vars
= (bitmap
) data
;
257 if (TREE_CODE (var
) == SSA_NAME
)
258 var
= SSA_NAME_VAR (var
);
260 if (TREE_CODE (var
) == VAR_DECL
261 && bitmap_bit_p (va_list_vars
, DECL_UID (var
)))
268 /* Helper function of va_list_counter_struct_op. Compute
269 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
270 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
271 statement. GPR_P is true if AP is a GPR counter, false if it is
275 va_list_counter_op (struct stdarg_info
*si
, tree ap
, tree var
, bool gpr_p
,
278 unsigned HOST_WIDE_INT increment
;
280 if (si
->compute_sizes
< 0)
282 si
->compute_sizes
= 0;
283 if (si
->va_start_count
== 1
284 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
285 si
->compute_sizes
= 1;
287 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
289 "bb%d will %sbe executed at most once for each va_start "
290 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
291 si
->va_start_bb
->index
);
296 && (increment
= va_list_counter_bump (si
, ap
, var
, gpr_p
)) + 1 > 1)
298 if (gpr_p
&& cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
300 cfun
->va_list_gpr_size
+= increment
;
304 if (!gpr_p
&& cfun
->va_list_fpr_size
+ increment
< VA_LIST_MAX_FPR_SIZE
)
306 cfun
->va_list_fpr_size
+= increment
;
311 if (write_p
|| !si
->compute_sizes
)
314 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
316 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
321 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
322 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
323 is false, AP has been seen in VAR = AP assignment.
324 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
325 va_arg operation that doesn't cause the va_list variable to escape
329 va_list_counter_struct_op (struct stdarg_info
*si
, tree ap
, tree var
,
334 if (TREE_CODE (ap
) != COMPONENT_REF
335 || TREE_CODE (TREE_OPERAND (ap
, 1)) != FIELD_DECL
)
338 if (TREE_CODE (var
) != SSA_NAME
339 || bitmap_bit_p (si
->va_list_vars
, DECL_UID (SSA_NAME_VAR (var
))))
342 base
= get_base_address (ap
);
343 if (TREE_CODE (base
) != VAR_DECL
344 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (base
)))
347 if (TREE_OPERAND (ap
, 1) == va_list_gpr_counter_field
)
348 va_list_counter_op (si
, ap
, var
, true, write_p
);
349 else if (TREE_OPERAND (ap
, 1) == va_list_fpr_counter_field
)
350 va_list_counter_op (si
, ap
, var
, false, write_p
);
356 /* Check for TEM = AP. Return true if found and the caller shouldn't
357 search for va_list references in the statement. */
360 va_list_ptr_read (struct stdarg_info
*si
, tree ap
, tree tem
)
362 if (TREE_CODE (ap
) != VAR_DECL
363 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
)))
366 if (TREE_CODE (tem
) != SSA_NAME
367 || bitmap_bit_p (si
->va_list_vars
,
368 DECL_UID (SSA_NAME_VAR (tem
)))
369 || is_global_var (SSA_NAME_VAR (tem
)))
372 if (si
->compute_sizes
< 0)
374 si
->compute_sizes
= 0;
375 if (si
->va_start_count
== 1
376 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
377 si
->compute_sizes
= 1;
379 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
381 "bb%d will %sbe executed at most once for each va_start "
382 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
383 si
->va_start_bb
->index
);
386 /* For void * or char * va_list types, there is just one counter.
387 If va_arg is used in a loop, we don't know how many registers need
389 if (! si
->compute_sizes
)
392 if (va_list_counter_bump (si
, ap
, tem
, true) == (unsigned HOST_WIDE_INT
) -1)
395 /* Note the temporary, as we need to track whether it doesn't escape
396 the current function. */
397 bitmap_set_bit (si
->va_list_escape_vars
,
398 DECL_UID (SSA_NAME_VAR (tem
)));
407 sequence and update cfun->va_list_gpr_size. Return true if found. */
410 va_list_ptr_write (struct stdarg_info
*si
, tree ap
, tree tem2
)
412 unsigned HOST_WIDE_INT increment
;
414 if (TREE_CODE (ap
) != VAR_DECL
415 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
)))
418 if (TREE_CODE (tem2
) != SSA_NAME
419 || bitmap_bit_p (si
->va_list_vars
, DECL_UID (SSA_NAME_VAR (tem2
))))
422 if (si
->compute_sizes
<= 0)
425 increment
= va_list_counter_bump (si
, ap
, tem2
, true);
426 if (increment
+ 1 <= 1)
429 if (cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
430 cfun
->va_list_gpr_size
+= increment
;
432 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
438 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
439 containing value of some va_list variable plus optionally some constant,
440 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
441 depending whether LHS is a function local temporary. */
444 check_va_list_escapes (struct stdarg_info
*si
, tree lhs
, tree rhs
)
446 if (! POINTER_TYPE_P (TREE_TYPE (rhs
)))
449 if ((TREE_CODE (rhs
) == PLUS_EXPR
450 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == INTEGER_CST
)
451 || TREE_CODE (rhs
) == NOP_EXPR
452 || TREE_CODE (rhs
) == CONVERT_EXPR
)
453 rhs
= TREE_OPERAND (rhs
, 0);
455 if (TREE_CODE (rhs
) != SSA_NAME
456 || ! bitmap_bit_p (si
->va_list_escape_vars
,
457 DECL_UID (SSA_NAME_VAR (rhs
))))
460 if (TREE_CODE (lhs
) != SSA_NAME
|| is_global_var (SSA_NAME_VAR (lhs
)))
462 si
->va_list_escapes
= true;
466 if (si
->compute_sizes
< 0)
468 si
->compute_sizes
= 0;
469 if (si
->va_start_count
== 1
470 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
471 si
->compute_sizes
= 1;
473 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
475 "bb%d will %sbe executed at most once for each va_start "
476 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
477 si
->va_start_bb
->index
);
480 /* For void * or char * va_list types, there is just one counter.
481 If va_arg is used in a loop, we don't know how many registers need
483 if (! si
->compute_sizes
)
485 si
->va_list_escapes
= true;
489 if (va_list_counter_bump (si
, si
->va_start_ap
, lhs
, true)
490 == (unsigned HOST_WIDE_INT
) -1)
492 si
->va_list_escapes
= true;
496 bitmap_set_bit (si
->va_list_escape_vars
,
497 DECL_UID (SSA_NAME_VAR (lhs
)));
501 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
502 Return true if va_list might be escaping. */
505 check_all_va_list_escapes (struct stdarg_info
*si
)
511 block_stmt_iterator i
;
513 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
515 tree stmt
= bsi_stmt (i
), use
;
518 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_ALL_USES
)
520 if (! bitmap_bit_p (si
->va_list_escape_vars
,
521 DECL_UID (SSA_NAME_VAR (use
))))
524 if (TREE_CODE (stmt
) == MODIFY_EXPR
)
526 tree lhs
= TREE_OPERAND (stmt
, 0);
527 tree rhs
= TREE_OPERAND (stmt
, 1);
529 if (TREE_CODE (rhs
) == WITH_SIZE_EXPR
)
530 rhs
= TREE_OPERAND (rhs
, 0);
533 if (TREE_CODE (rhs
) == INDIRECT_REF
534 && TREE_OPERAND (rhs
, 0) == use
535 && TYPE_SIZE_UNIT (TREE_TYPE (rhs
))
536 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs
)), 1)
537 && si
->offsets
[SSA_NAME_VERSION (use
)] != -1)
539 unsigned HOST_WIDE_INT gpr_size
;
540 tree access_size
= TYPE_SIZE_UNIT (TREE_TYPE (rhs
));
542 gpr_size
= si
->offsets
[SSA_NAME_VERSION (use
)]
543 + tree_low_cst (access_size
, 1);
544 if (gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
545 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
546 else if (gpr_size
> cfun
->va_list_gpr_size
)
547 cfun
->va_list_gpr_size
= gpr_size
;
551 /* va_arg sequences may contain
552 other_ap_temp = ap_temp;
553 other_ap_temp = ap_temp + constant;
554 other_ap_temp = (some_type *) ap_temp;
557 if ((TREE_CODE (rhs
) == PLUS_EXPR
558 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == INTEGER_CST
)
559 || TREE_CODE (rhs
) == NOP_EXPR
560 || TREE_CODE (rhs
) == CONVERT_EXPR
)
561 rhs
= TREE_OPERAND (rhs
, 0);
565 if (TREE_CODE (lhs
) == SSA_NAME
566 && bitmap_bit_p (si
->va_list_escape_vars
,
567 DECL_UID (SSA_NAME_VAR (lhs
))))
570 if (TREE_CODE (lhs
) == VAR_DECL
571 && bitmap_bit_p (si
->va_list_vars
,
577 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
579 fputs ("va_list escapes in ", dump_file
);
580 print_generic_expr (dump_file
, stmt
, dump_flags
);
581 fputc ('\n', dump_file
);
592 /* Return true if this optimization pass should be done.
593 It makes only sense for stdarg functions. */
596 gate_optimize_stdarg (void)
598 /* This optimization is only for stdarg functions. */
599 return current_function_stdarg
!= 0;
603 /* Entry point to the stdarg optimization pass. */
606 execute_optimize_stdarg (void)
609 bool va_list_escapes
= false;
610 bool va_list_simple_ptr
;
611 struct stdarg_info si
;
612 const char *funcname
= NULL
;
614 cfun
->va_list_gpr_size
= 0;
615 cfun
->va_list_fpr_size
= 0;
616 memset (&si
, 0, sizeof (si
));
617 si
.va_list_vars
= BITMAP_ALLOC (NULL
);
618 si
.va_list_escape_vars
= BITMAP_ALLOC (NULL
);
621 funcname
= lang_hooks
.decl_printable_name (current_function_decl
, 2);
623 va_list_simple_ptr
= POINTER_TYPE_P (va_list_type_node
)
624 && (TREE_TYPE (va_list_type_node
) == void_type_node
625 || TREE_TYPE (va_list_type_node
) == char_type_node
);
626 gcc_assert (is_gimple_reg_type (va_list_type_node
) == va_list_simple_ptr
);
630 block_stmt_iterator i
;
632 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
634 tree stmt
= bsi_stmt (i
);
635 tree call
= get_call_expr_in (stmt
), callee
;
641 callee
= get_callee_fndecl (call
);
643 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
646 switch (DECL_FUNCTION_CODE (callee
))
648 case BUILT_IN_VA_START
:
650 /* If old style builtins are used, don't optimize anything. */
651 case BUILT_IN_SAVEREGS
:
652 case BUILT_IN_STDARG_START
:
653 case BUILT_IN_ARGS_INFO
:
654 case BUILT_IN_NEXT_ARG
:
655 va_list_escapes
= true;
662 ap
= TREE_VALUE (TREE_OPERAND (call
, 1));
664 if (TREE_CODE (ap
) != ADDR_EXPR
)
666 va_list_escapes
= true;
669 ap
= TREE_OPERAND (ap
, 0);
670 if (TREE_CODE (ap
) == ARRAY_REF
)
672 if (! integer_zerop (TREE_OPERAND (ap
, 1)))
674 va_list_escapes
= true;
677 ap
= TREE_OPERAND (ap
, 0);
679 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap
))
680 != TYPE_MAIN_VARIANT (va_list_type_node
)
681 || TREE_CODE (ap
) != VAR_DECL
)
683 va_list_escapes
= true;
687 if (is_global_var (ap
))
689 va_list_escapes
= true;
693 bitmap_set_bit (si
.va_list_vars
, DECL_UID (ap
));
695 /* VA_START_BB and VA_START_AP will be only used if there is just
696 one va_start in the function. */
705 /* If there were no va_start uses in the function, there is no need to
707 if (si
.va_start_count
== 0)
710 /* If some va_list arguments weren't local, we can't optimize. */
714 /* For void * or char * va_list, something useful can be done only
715 if there is just one va_start. */
716 if (va_list_simple_ptr
&& si
.va_start_count
> 1)
718 va_list_escapes
= true;
722 /* For struct * va_list, if the backend didn't tell us what the counter fields
723 are, there is nothing more we can do. */
724 if (!va_list_simple_ptr
725 && va_list_gpr_counter_field
== NULL_TREE
726 && va_list_fpr_counter_field
== NULL_TREE
)
728 va_list_escapes
= true;
732 /* For void * or char * va_list there is just one counter
733 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
734 if (va_list_simple_ptr
)
735 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
737 calculate_dominance_info (CDI_DOMINATORS
);
741 block_stmt_iterator i
;
743 si
.compute_sizes
= -1;
746 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
747 them as assignments for the purpose of escape analysis. This is
748 not needed for non-simple va_list because virtual phis don't perform
749 any real data movement. */
750 if (va_list_simple_ptr
)
756 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
758 lhs
= PHI_RESULT (phi
);
760 if (!is_gimple_reg (lhs
))
763 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
765 rhs
= USE_FROM_PTR (uop
);
766 if (va_list_ptr_read (&si
, rhs
, lhs
))
768 else if (va_list_ptr_write (&si
, lhs
, rhs
))
771 check_va_list_escapes (&si
, lhs
, rhs
);
773 if (si
.va_list_escapes
774 || walk_tree (&phi
, find_va_list_reference
,
775 si
.va_list_vars
, NULL
))
777 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
779 fputs ("va_list escapes in ", dump_file
);
780 print_generic_expr (dump_file
, phi
, dump_flags
);
781 fputc ('\n', dump_file
);
783 va_list_escapes
= true;
789 for (i
= bsi_start (bb
);
790 !bsi_end_p (i
) && !va_list_escapes
;
793 tree stmt
= bsi_stmt (i
);
796 /* Don't look at __builtin_va_{start,end}, they are ok. */
797 call
= get_call_expr_in (stmt
);
800 tree callee
= get_callee_fndecl (call
);
803 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
804 && (DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_START
805 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_END
))
809 if (TREE_CODE (stmt
) == MODIFY_EXPR
)
811 tree lhs
= TREE_OPERAND (stmt
, 0);
812 tree rhs
= TREE_OPERAND (stmt
, 1);
814 if (TREE_CODE (rhs
) == WITH_SIZE_EXPR
)
815 rhs
= TREE_OPERAND (rhs
, 0);
817 if (va_list_simple_ptr
)
819 /* Check for tem = ap. */
820 if (va_list_ptr_read (&si
, rhs
, lhs
))
823 /* Check for the last insn in:
828 else if (va_list_ptr_write (&si
, lhs
, rhs
))
832 check_va_list_escapes (&si
, lhs
, rhs
);
836 /* Check for ap[0].field = temp. */
837 if (va_list_counter_struct_op (&si
, lhs
, rhs
, true))
840 /* Check for temp = ap[0].field. */
841 else if (va_list_counter_struct_op (&si
, rhs
, lhs
, false))
844 /* Do any architecture specific checking. */
845 else if (targetm
.stdarg_optimize_hook
846 && targetm
.stdarg_optimize_hook (&si
, lhs
, rhs
))
851 /* All other uses of va_list are either va_copy (that is not handled
852 in this optimization), taking address of va_list variable or
853 passing va_list to other functions (in that case va_list might
854 escape the function and therefore va_start needs to set it up
855 fully), or some unexpected use of va_list. None of these should
856 happen in a gimplified VA_ARG_EXPR. */
857 if (si
.va_list_escapes
858 || walk_tree (&stmt
, find_va_list_reference
,
859 si
.va_list_vars
, NULL
))
861 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
863 fputs ("va_list escapes in ", dump_file
);
864 print_generic_expr (dump_file
, stmt
, dump_flags
);
865 fputc ('\n', dump_file
);
867 va_list_escapes
= true;
875 if (! va_list_escapes
876 && va_list_simple_ptr
877 && ! bitmap_empty_p (si
.va_list_escape_vars
)
878 && check_all_va_list_escapes (&si
))
879 va_list_escapes
= true;
884 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
885 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
887 BITMAP_FREE (si
.va_list_vars
);
888 BITMAP_FREE (si
.va_list_escape_vars
);
892 fprintf (dump_file
, "%s: va_list escapes %d, needs to save ",
893 funcname
, (int) va_list_escapes
);
894 if (cfun
->va_list_gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
895 fputs ("all", dump_file
);
897 fprintf (dump_file
, "%d", cfun
->va_list_gpr_size
);
898 fputs (" GPR units and ", dump_file
);
899 if (cfun
->va_list_fpr_size
>= VA_LIST_MAX_FPR_SIZE
)
900 fputs ("all", dump_file
);
902 fprintf (dump_file
, "%d", cfun
->va_list_fpr_size
);
903 fputs (" FPR units.\n", dump_file
);
909 struct tree_opt_pass pass_stdarg
=
912 gate_optimize_stdarg
, /* gate */
913 execute_optimize_stdarg
, /* execute */
916 0, /* static_pass_number */
918 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
919 0, /* properties_provided */
920 0, /* properties_destroyed */
921 0, /* todo_flags_start */
922 TODO_dump_func
, /* todo_flags_finish */