1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "langhooks.h"
28 #include "gimple-pretty-print.h"
32 #include "gimple-iterator.h"
33 #include "gimple-walk.h"
34 #include "gimple-ssa.h"
35 #include "tree-phinodes.h"
36 #include "ssa-iterators.h"
37 #include "tree-ssanames.h"
39 #include "tree-pass.h"
40 #include "tree-stdarg.h"
42 /* A simple pass that attempts to optimize stdarg functions on architectures
43 that need to save register arguments to stack on entry to stdarg functions.
44 If the function doesn't use any va_start macros, no registers need to
45 be saved. If va_start macros are used, the va_list variables don't escape
46 the function, it is only necessary to save registers that will be used
47 in va_arg macros. E.g. if va_arg is only used with integral types
48 in the function, floating point registers don't need to be saved, etc. */
51 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
52 is executed at most as many times as VA_START_BB. */
55 reachable_at_most_once (basic_block va_arg_bb
, basic_block va_start_bb
)
57 vec
<edge
> stack
= vNULL
;
63 if (va_arg_bb
== va_start_bb
)
66 if (! dominated_by_p (CDI_DOMINATORS
, va_arg_bb
, va_start_bb
))
69 visited
= sbitmap_alloc (last_basic_block
);
70 bitmap_clear (visited
);
73 FOR_EACH_EDGE (e
, ei
, va_arg_bb
->preds
)
76 while (! stack
.is_empty ())
83 if (e
->flags
& EDGE_COMPLEX
)
89 if (src
== va_start_bb
)
92 /* va_arg_bb can be executed more times than va_start_bb. */
99 gcc_assert (src
!= ENTRY_BLOCK_PTR
);
101 if (! bitmap_bit_p (visited
, src
->index
))
103 bitmap_set_bit (visited
, src
->index
);
104 FOR_EACH_EDGE (e
, ei
, src
->preds
)
110 sbitmap_free (visited
);
115 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
116 return constant, otherwise return HOST_WIDE_INT_M1U.
117 GPR_P is true if this is GPR counter. */
119 static unsigned HOST_WIDE_INT
120 va_list_counter_bump (struct stdarg_info
*si
, tree counter
, tree rhs
,
125 unsigned HOST_WIDE_INT ret
= 0, val
, counter_val
;
126 unsigned int max_size
;
128 if (si
->offsets
== NULL
)
132 si
->offsets
= XNEWVEC (int, num_ssa_names
);
133 for (i
= 0; i
< num_ssa_names
; ++i
)
137 counter_val
= gpr_p
? cfun
->va_list_gpr_size
: cfun
->va_list_fpr_size
;
138 max_size
= gpr_p
? VA_LIST_MAX_GPR_SIZE
: VA_LIST_MAX_FPR_SIZE
;
139 orig_lhs
= lhs
= rhs
;
142 enum tree_code rhs_code
;
145 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
147 if (counter_val
>= max_size
)
153 ret
-= counter_val
- si
->offsets
[SSA_NAME_VERSION (lhs
)];
157 stmt
= SSA_NAME_DEF_STMT (lhs
);
159 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != lhs
)
160 return HOST_WIDE_INT_M1U
;
162 rhs_code
= gimple_assign_rhs_code (stmt
);
163 rhs1
= gimple_assign_rhs1 (stmt
);
164 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
165 || gimple_assign_cast_p (stmt
))
166 && TREE_CODE (rhs1
) == SSA_NAME
)
172 if ((rhs_code
== POINTER_PLUS_EXPR
173 || rhs_code
== PLUS_EXPR
)
174 && TREE_CODE (rhs1
) == SSA_NAME
175 && host_integerp (gimple_assign_rhs2 (stmt
), 1))
177 ret
+= tree_low_cst (gimple_assign_rhs2 (stmt
), 1);
182 if (rhs_code
== ADDR_EXPR
183 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
184 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
185 && host_integerp (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1), 1))
187 ret
+= tree_low_cst (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1), 1);
188 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
192 if (get_gimple_rhs_class (rhs_code
) != GIMPLE_SINGLE_RHS
)
193 return HOST_WIDE_INT_M1U
;
195 rhs
= gimple_assign_rhs1 (stmt
);
196 if (TREE_CODE (counter
) != TREE_CODE (rhs
))
197 return HOST_WIDE_INT_M1U
;
199 if (TREE_CODE (counter
) == COMPONENT_REF
)
201 if (get_base_address (counter
) != get_base_address (rhs
)
202 || TREE_CODE (TREE_OPERAND (rhs
, 1)) != FIELD_DECL
203 || TREE_OPERAND (counter
, 1) != TREE_OPERAND (rhs
, 1))
204 return HOST_WIDE_INT_M1U
;
206 else if (counter
!= rhs
)
207 return HOST_WIDE_INT_M1U
;
213 val
= ret
+ counter_val
;
216 enum tree_code rhs_code
;
219 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
223 si
->offsets
[SSA_NAME_VERSION (lhs
)] = max_size
;
225 si
->offsets
[SSA_NAME_VERSION (lhs
)] = val
;
227 stmt
= SSA_NAME_DEF_STMT (lhs
);
229 rhs_code
= gimple_assign_rhs_code (stmt
);
230 rhs1
= gimple_assign_rhs1 (stmt
);
231 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
232 || gimple_assign_cast_p (stmt
))
233 && TREE_CODE (rhs1
) == SSA_NAME
)
239 if ((rhs_code
== POINTER_PLUS_EXPR
240 || rhs_code
== PLUS_EXPR
)
241 && TREE_CODE (rhs1
) == SSA_NAME
242 && host_integerp (gimple_assign_rhs2 (stmt
), 1))
244 val
-= tree_low_cst (gimple_assign_rhs2 (stmt
), 1);
249 if (rhs_code
== ADDR_EXPR
250 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
251 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
252 && host_integerp (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1), 1))
254 val
-= tree_low_cst (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1), 1);
255 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
266 /* Called by walk_tree to look for references to va_list variables. */
269 find_va_list_reference (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
272 bitmap va_list_vars
= (bitmap
) ((struct walk_stmt_info
*) data
)->info
;
275 if (TREE_CODE (var
) == SSA_NAME
)
277 if (bitmap_bit_p (va_list_vars
, SSA_NAME_VERSION (var
)))
280 else if (TREE_CODE (var
) == VAR_DECL
)
282 if (bitmap_bit_p (va_list_vars
, DECL_UID (var
) + num_ssa_names
))
290 /* Helper function of va_list_counter_struct_op. Compute
291 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
292 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
293 statement. GPR_P is true if AP is a GPR counter, false if it is
297 va_list_counter_op (struct stdarg_info
*si
, tree ap
, tree var
, bool gpr_p
,
300 unsigned HOST_WIDE_INT increment
;
302 if (si
->compute_sizes
< 0)
304 si
->compute_sizes
= 0;
305 if (si
->va_start_count
== 1
306 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
307 si
->compute_sizes
= 1;
309 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
311 "bb%d will %sbe executed at most once for each va_start "
312 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
313 si
->va_start_bb
->index
);
318 && (increment
= va_list_counter_bump (si
, ap
, var
, gpr_p
)) + 1 > 1)
320 if (gpr_p
&& cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
322 cfun
->va_list_gpr_size
+= increment
;
326 if (!gpr_p
&& cfun
->va_list_fpr_size
+ increment
< VA_LIST_MAX_FPR_SIZE
)
328 cfun
->va_list_fpr_size
+= increment
;
333 if (write_p
|| !si
->compute_sizes
)
336 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
338 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
343 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
344 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
345 is false, AP has been seen in VAR = AP assignment.
346 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
347 va_arg operation that doesn't cause the va_list variable to escape
351 va_list_counter_struct_op (struct stdarg_info
*si
, tree ap
, tree var
,
356 if (TREE_CODE (ap
) != COMPONENT_REF
357 || TREE_CODE (TREE_OPERAND (ap
, 1)) != FIELD_DECL
)
360 if (TREE_CODE (var
) != SSA_NAME
361 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (var
)))
364 base
= get_base_address (ap
);
365 if (TREE_CODE (base
) != VAR_DECL
366 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (base
) + num_ssa_names
))
369 if (TREE_OPERAND (ap
, 1) == va_list_gpr_counter_field
)
370 va_list_counter_op (si
, ap
, var
, true, write_p
);
371 else if (TREE_OPERAND (ap
, 1) == va_list_fpr_counter_field
)
372 va_list_counter_op (si
, ap
, var
, false, write_p
);
378 /* Check for TEM = AP. Return true if found and the caller shouldn't
379 search for va_list references in the statement. */
382 va_list_ptr_read (struct stdarg_info
*si
, tree ap
, tree tem
)
384 if (TREE_CODE (ap
) != VAR_DECL
385 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
388 if (TREE_CODE (tem
) != SSA_NAME
389 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem
)))
392 if (si
->compute_sizes
< 0)
394 si
->compute_sizes
= 0;
395 if (si
->va_start_count
== 1
396 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
397 si
->compute_sizes
= 1;
399 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
401 "bb%d will %sbe executed at most once for each va_start "
402 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
403 si
->va_start_bb
->index
);
406 /* For void * or char * va_list types, there is just one counter.
407 If va_arg is used in a loop, we don't know how many registers need
409 if (! si
->compute_sizes
)
412 if (va_list_counter_bump (si
, ap
, tem
, true) == HOST_WIDE_INT_M1U
)
415 /* Note the temporary, as we need to track whether it doesn't escape
416 the current function. */
417 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (tem
));
427 sequence and update cfun->va_list_gpr_size. Return true if found. */
430 va_list_ptr_write (struct stdarg_info
*si
, tree ap
, tree tem2
)
432 unsigned HOST_WIDE_INT increment
;
434 if (TREE_CODE (ap
) != VAR_DECL
435 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
438 if (TREE_CODE (tem2
) != SSA_NAME
439 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem2
)))
442 if (si
->compute_sizes
<= 0)
445 increment
= va_list_counter_bump (si
, ap
, tem2
, true);
446 if (increment
+ 1 <= 1)
449 if (cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
450 cfun
->va_list_gpr_size
+= increment
;
452 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
458 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
459 containing value of some va_list variable plus optionally some constant,
460 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
461 depending whether LHS is a function local temporary. */
464 check_va_list_escapes (struct stdarg_info
*si
, tree lhs
, tree rhs
)
466 if (! POINTER_TYPE_P (TREE_TYPE (rhs
)))
469 if (TREE_CODE (rhs
) == SSA_NAME
)
471 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (rhs
)))
474 else if (TREE_CODE (rhs
) == ADDR_EXPR
475 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
476 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0)) == SSA_NAME
)
478 tree ptr
= TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0);
479 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (ptr
)))
485 if (TREE_CODE (lhs
) != SSA_NAME
)
487 si
->va_list_escapes
= true;
491 if (si
->compute_sizes
< 0)
493 si
->compute_sizes
= 0;
494 if (si
->va_start_count
== 1
495 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
496 si
->compute_sizes
= 1;
498 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
500 "bb%d will %sbe executed at most once for each va_start "
501 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
502 si
->va_start_bb
->index
);
505 /* For void * or char * va_list types, there is just one counter.
506 If va_arg is used in a loop, we don't know how many registers need
508 if (! si
->compute_sizes
)
510 si
->va_list_escapes
= true;
514 if (va_list_counter_bump (si
, si
->va_start_ap
, lhs
, true)
515 == HOST_WIDE_INT_M1U
)
517 si
->va_list_escapes
= true;
521 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (lhs
));
525 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
526 Return true if va_list might be escaping. */
529 check_all_va_list_escapes (struct stdarg_info
*si
)
535 gimple_stmt_iterator i
;
537 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
542 gimple phi
= gsi_stmt (i
);
544 lhs
= PHI_RESULT (phi
);
545 if (virtual_operand_p (lhs
)
546 || bitmap_bit_p (si
->va_list_escape_vars
,
547 SSA_NAME_VERSION (lhs
)))
550 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
552 tree rhs
= USE_FROM_PTR (uop
);
553 if (TREE_CODE (rhs
) == SSA_NAME
554 && bitmap_bit_p (si
->va_list_escape_vars
,
555 SSA_NAME_VERSION (rhs
)))
557 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
559 fputs ("va_list escapes in ", dump_file
);
560 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
561 fputc ('\n', dump_file
);
568 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
570 gimple stmt
= gsi_stmt (i
);
574 if (is_gimple_debug (stmt
))
577 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_ALL_USES
)
579 if (! bitmap_bit_p (si
->va_list_escape_vars
,
580 SSA_NAME_VERSION (use
)))
583 if (is_gimple_assign (stmt
))
585 tree rhs
= gimple_assign_rhs1 (stmt
);
586 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
589 if (rhs_code
== MEM_REF
590 && TREE_OPERAND (rhs
, 0) == use
591 && TYPE_SIZE_UNIT (TREE_TYPE (rhs
))
592 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs
)), 1)
593 && si
->offsets
[SSA_NAME_VERSION (use
)] != -1)
595 unsigned HOST_WIDE_INT gpr_size
;
596 tree access_size
= TYPE_SIZE_UNIT (TREE_TYPE (rhs
));
598 gpr_size
= si
->offsets
[SSA_NAME_VERSION (use
)]
599 + tree_low_cst (TREE_OPERAND (rhs
, 1), 0)
600 + tree_low_cst (access_size
, 1);
601 if (gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
602 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
603 else if (gpr_size
> cfun
->va_list_gpr_size
)
604 cfun
->va_list_gpr_size
= gpr_size
;
608 /* va_arg sequences may contain
609 other_ap_temp = ap_temp;
610 other_ap_temp = ap_temp + constant;
611 other_ap_temp = (some_type *) ap_temp;
615 && ((rhs_code
== POINTER_PLUS_EXPR
616 && (TREE_CODE (gimple_assign_rhs2 (stmt
))
618 || gimple_assign_cast_p (stmt
)
619 || (get_gimple_rhs_class (rhs_code
)
620 == GIMPLE_SINGLE_RHS
)))
622 tree lhs
= gimple_assign_lhs (stmt
);
624 if (TREE_CODE (lhs
) == SSA_NAME
625 && bitmap_bit_p (si
->va_list_escape_vars
,
626 SSA_NAME_VERSION (lhs
)))
629 if (TREE_CODE (lhs
) == VAR_DECL
630 && bitmap_bit_p (si
->va_list_vars
,
631 DECL_UID (lhs
) + num_ssa_names
))
634 else if (rhs_code
== ADDR_EXPR
635 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
636 && TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0) == use
)
638 tree lhs
= gimple_assign_lhs (stmt
);
640 if (bitmap_bit_p (si
->va_list_escape_vars
,
641 SSA_NAME_VERSION (lhs
)))
646 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
648 fputs ("va_list escapes in ", dump_file
);
649 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
650 fputc ('\n', dump_file
);
661 /* Return true if this optimization pass should be done.
662 It makes only sense for stdarg functions. */
665 gate_optimize_stdarg (void)
667 /* This optimization is only for stdarg functions. */
668 return cfun
->stdarg
!= 0;
672 /* Entry point to the stdarg optimization pass. */
675 execute_optimize_stdarg (void)
678 bool va_list_escapes
= false;
679 bool va_list_simple_ptr
;
680 struct stdarg_info si
;
681 struct walk_stmt_info wi
;
682 const char *funcname
= NULL
;
685 cfun
->va_list_gpr_size
= 0;
686 cfun
->va_list_fpr_size
= 0;
687 memset (&si
, 0, sizeof (si
));
688 si
.va_list_vars
= BITMAP_ALLOC (NULL
);
689 si
.va_list_escape_vars
= BITMAP_ALLOC (NULL
);
692 funcname
= lang_hooks
.decl_printable_name (current_function_decl
, 2);
694 cfun_va_list
= targetm
.fn_abi_va_list (cfun
->decl
);
695 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
696 && (TREE_TYPE (cfun_va_list
) == void_type_node
697 || TREE_TYPE (cfun_va_list
) == char_type_node
);
698 gcc_assert (is_gimple_reg_type (cfun_va_list
) == va_list_simple_ptr
);
702 gimple_stmt_iterator i
;
704 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
706 gimple stmt
= gsi_stmt (i
);
709 if (!is_gimple_call (stmt
))
712 callee
= gimple_call_fndecl (stmt
);
714 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
717 switch (DECL_FUNCTION_CODE (callee
))
719 case BUILT_IN_VA_START
:
721 /* If old style builtins are used, don't optimize anything. */
722 case BUILT_IN_SAVEREGS
:
723 case BUILT_IN_NEXT_ARG
:
724 va_list_escapes
= true;
731 ap
= gimple_call_arg (stmt
, 0);
733 if (TREE_CODE (ap
) != ADDR_EXPR
)
735 va_list_escapes
= true;
738 ap
= TREE_OPERAND (ap
, 0);
739 if (TREE_CODE (ap
) == ARRAY_REF
)
741 if (! integer_zerop (TREE_OPERAND (ap
, 1)))
743 va_list_escapes
= true;
746 ap
= TREE_OPERAND (ap
, 0);
748 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap
))
749 != TYPE_MAIN_VARIANT (targetm
.fn_abi_va_list (cfun
->decl
))
750 || TREE_CODE (ap
) != VAR_DECL
)
752 va_list_escapes
= true;
756 if (is_global_var (ap
))
758 va_list_escapes
= true;
762 bitmap_set_bit (si
.va_list_vars
, DECL_UID (ap
) + num_ssa_names
);
764 /* VA_START_BB and VA_START_AP will be only used if there is just
765 one va_start in the function. */
774 /* If there were no va_start uses in the function, there is no need to
776 if (si
.va_start_count
== 0)
779 /* If some va_list arguments weren't local, we can't optimize. */
783 /* For void * or char * va_list, something useful can be done only
784 if there is just one va_start. */
785 if (va_list_simple_ptr
&& si
.va_start_count
> 1)
787 va_list_escapes
= true;
791 /* For struct * va_list, if the backend didn't tell us what the counter fields
792 are, there is nothing more we can do. */
793 if (!va_list_simple_ptr
794 && va_list_gpr_counter_field
== NULL_TREE
795 && va_list_fpr_counter_field
== NULL_TREE
)
797 va_list_escapes
= true;
801 /* For void * or char * va_list there is just one counter
802 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
803 if (va_list_simple_ptr
)
804 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
806 calculate_dominance_info (CDI_DOMINATORS
);
807 memset (&wi
, 0, sizeof (wi
));
808 wi
.info
= si
.va_list_vars
;
812 gimple_stmt_iterator i
;
814 si
.compute_sizes
= -1;
817 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
818 them as assignments for the purpose of escape analysis. This is
819 not needed for non-simple va_list because virtual phis don't perform
820 any real data movement. */
821 if (va_list_simple_ptr
)
827 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
829 gimple phi
= gsi_stmt (i
);
830 lhs
= PHI_RESULT (phi
);
832 if (virtual_operand_p (lhs
))
835 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
837 rhs
= USE_FROM_PTR (uop
);
838 if (va_list_ptr_read (&si
, rhs
, lhs
))
840 else if (va_list_ptr_write (&si
, lhs
, rhs
))
843 check_va_list_escapes (&si
, lhs
, rhs
);
845 if (si
.va_list_escapes
)
847 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
849 fputs ("va_list escapes in ", dump_file
);
850 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
851 fputc ('\n', dump_file
);
853 va_list_escapes
= true;
859 for (i
= gsi_start_bb (bb
);
860 !gsi_end_p (i
) && !va_list_escapes
;
863 gimple stmt
= gsi_stmt (i
);
865 /* Don't look at __builtin_va_{start,end}, they are ok. */
866 if (is_gimple_call (stmt
))
868 tree callee
= gimple_call_fndecl (stmt
);
871 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
872 && (DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_START
873 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_END
))
877 if (is_gimple_assign (stmt
))
879 tree lhs
= gimple_assign_lhs (stmt
);
880 tree rhs
= gimple_assign_rhs1 (stmt
);
882 if (va_list_simple_ptr
)
884 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
885 == GIMPLE_SINGLE_RHS
)
887 /* Check for ap ={v} {}. */
888 if (TREE_CLOBBER_P (rhs
))
891 /* Check for tem = ap. */
892 else if (va_list_ptr_read (&si
, rhs
, lhs
))
895 /* Check for the last insn in:
900 else if (va_list_ptr_write (&si
, lhs
, rhs
))
904 if ((gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
905 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == INTEGER_CST
)
906 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
907 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
908 == GIMPLE_SINGLE_RHS
))
909 check_va_list_escapes (&si
, lhs
, rhs
);
913 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
914 == GIMPLE_SINGLE_RHS
)
916 /* Check for ap ={v} {}. */
917 if (TREE_CLOBBER_P (rhs
))
920 /* Check for ap[0].field = temp. */
921 else if (va_list_counter_struct_op (&si
, lhs
, rhs
, true))
924 /* Check for temp = ap[0].field. */
925 else if (va_list_counter_struct_op (&si
, rhs
, lhs
,
930 /* Do any architecture specific checking. */
931 if (targetm
.stdarg_optimize_hook
932 && targetm
.stdarg_optimize_hook (&si
, stmt
))
936 else if (is_gimple_debug (stmt
))
939 /* All other uses of va_list are either va_copy (that is not handled
940 in this optimization), taking address of va_list variable or
941 passing va_list to other functions (in that case va_list might
942 escape the function and therefore va_start needs to set it up
943 fully), or some unexpected use of va_list. None of these should
944 happen in a gimplified VA_ARG_EXPR. */
945 if (si
.va_list_escapes
946 || walk_gimple_op (stmt
, find_va_list_reference
, &wi
))
948 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
950 fputs ("va_list escapes in ", dump_file
);
951 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
952 fputc ('\n', dump_file
);
954 va_list_escapes
= true;
962 if (! va_list_escapes
963 && va_list_simple_ptr
964 && ! bitmap_empty_p (si
.va_list_escape_vars
)
965 && check_all_va_list_escapes (&si
))
966 va_list_escapes
= true;
971 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
972 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
974 BITMAP_FREE (si
.va_list_vars
);
975 BITMAP_FREE (si
.va_list_escape_vars
);
979 fprintf (dump_file
, "%s: va_list escapes %d, needs to save ",
980 funcname
, (int) va_list_escapes
);
981 if (cfun
->va_list_gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
982 fputs ("all", dump_file
);
984 fprintf (dump_file
, "%d", cfun
->va_list_gpr_size
);
985 fputs (" GPR units and ", dump_file
);
986 if (cfun
->va_list_fpr_size
>= VA_LIST_MAX_FPR_SIZE
)
987 fputs ("all", dump_file
);
989 fprintf (dump_file
, "%d", cfun
->va_list_fpr_size
);
990 fputs (" FPR units.\n", dump_file
);
998 const pass_data pass_data_stdarg
=
1000 GIMPLE_PASS
, /* type */
1001 "stdarg", /* name */
1002 OPTGROUP_NONE
, /* optinfo_flags */
1003 true, /* has_gate */
1004 true, /* has_execute */
1005 TV_NONE
, /* tv_id */
1006 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1007 0, /* properties_provided */
1008 0, /* properties_destroyed */
1009 0, /* todo_flags_start */
1010 0, /* todo_flags_finish */
1013 class pass_stdarg
: public gimple_opt_pass
1016 pass_stdarg (gcc::context
*ctxt
)
1017 : gimple_opt_pass (pass_data_stdarg
, ctxt
)
1020 /* opt_pass methods: */
1021 bool gate () { return gate_optimize_stdarg (); }
1022 unsigned int execute () { return execute_optimize_stdarg (); }
1024 }; // class pass_stdarg
1029 make_pass_stdarg (gcc::context
*ctxt
)
1031 return new pass_stdarg (ctxt
);