1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "langhooks.h"
28 #include "gimple-pretty-print.h"
31 #include "basic-block.h"
32 #include "tree-ssa-alias.h"
33 #include "internal-fn.h"
34 #include "gimple-expr.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "gimple-ssa.h"
40 #include "tree-phinodes.h"
41 #include "ssa-iterators.h"
42 #include "stringpool.h"
43 #include "tree-ssanames.h"
45 #include "tree-pass.h"
46 #include "tree-stdarg.h"
48 /* A simple pass that attempts to optimize stdarg functions on architectures
49 that need to save register arguments to stack on entry to stdarg functions.
50 If the function doesn't use any va_start macros, no registers need to
51 be saved. If va_start macros are used, the va_list variables don't escape
52 the function, it is only necessary to save registers that will be used
53 in va_arg macros. E.g. if va_arg is only used with integral types
54 in the function, floating point registers don't need to be saved, etc. */
57 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
58 is executed at most as many times as VA_START_BB. */
61 reachable_at_most_once (basic_block va_arg_bb
, basic_block va_start_bb
)
63 vec
<edge
> stack
= vNULL
;
69 if (va_arg_bb
== va_start_bb
)
72 if (! dominated_by_p (CDI_DOMINATORS
, va_arg_bb
, va_start_bb
))
75 visited
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
76 bitmap_clear (visited
);
79 FOR_EACH_EDGE (e
, ei
, va_arg_bb
->preds
)
82 while (! stack
.is_empty ())
89 if (e
->flags
& EDGE_COMPLEX
)
95 if (src
== va_start_bb
)
98 /* va_arg_bb can be executed more times than va_start_bb. */
105 gcc_assert (src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
));
107 if (! bitmap_bit_p (visited
, src
->index
))
109 bitmap_set_bit (visited
, src
->index
);
110 FOR_EACH_EDGE (e
, ei
, src
->preds
)
116 sbitmap_free (visited
);
121 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
122 return constant, otherwise return HOST_WIDE_INT_M1U.
123 GPR_P is true if this is GPR counter. */
125 static unsigned HOST_WIDE_INT
126 va_list_counter_bump (struct stdarg_info
*si
, tree counter
, tree rhs
,
131 unsigned HOST_WIDE_INT ret
= 0, val
, counter_val
;
132 unsigned int max_size
;
134 if (si
->offsets
== NULL
)
138 si
->offsets
= XNEWVEC (int, num_ssa_names
);
139 for (i
= 0; i
< num_ssa_names
; ++i
)
143 counter_val
= gpr_p
? cfun
->va_list_gpr_size
: cfun
->va_list_fpr_size
;
144 max_size
= gpr_p
? VA_LIST_MAX_GPR_SIZE
: VA_LIST_MAX_FPR_SIZE
;
145 orig_lhs
= lhs
= rhs
;
148 enum tree_code rhs_code
;
151 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
153 if (counter_val
>= max_size
)
159 ret
-= counter_val
- si
->offsets
[SSA_NAME_VERSION (lhs
)];
163 stmt
= SSA_NAME_DEF_STMT (lhs
);
165 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != lhs
)
166 return HOST_WIDE_INT_M1U
;
168 rhs_code
= gimple_assign_rhs_code (stmt
);
169 rhs1
= gimple_assign_rhs1 (stmt
);
170 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
171 || gimple_assign_cast_p (stmt
))
172 && TREE_CODE (rhs1
) == SSA_NAME
)
178 if ((rhs_code
== POINTER_PLUS_EXPR
179 || rhs_code
== PLUS_EXPR
)
180 && TREE_CODE (rhs1
) == SSA_NAME
181 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt
)))
183 ret
+= tree_to_uhwi (gimple_assign_rhs2 (stmt
));
188 if (rhs_code
== ADDR_EXPR
189 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
190 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
191 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1)))
193 ret
+= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1));
194 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
198 if (get_gimple_rhs_class (rhs_code
) != GIMPLE_SINGLE_RHS
)
199 return HOST_WIDE_INT_M1U
;
201 rhs
= gimple_assign_rhs1 (stmt
);
202 if (TREE_CODE (counter
) != TREE_CODE (rhs
))
203 return HOST_WIDE_INT_M1U
;
205 if (TREE_CODE (counter
) == COMPONENT_REF
)
207 if (get_base_address (counter
) != get_base_address (rhs
)
208 || TREE_CODE (TREE_OPERAND (rhs
, 1)) != FIELD_DECL
209 || TREE_OPERAND (counter
, 1) != TREE_OPERAND (rhs
, 1))
210 return HOST_WIDE_INT_M1U
;
212 else if (counter
!= rhs
)
213 return HOST_WIDE_INT_M1U
;
219 val
= ret
+ counter_val
;
222 enum tree_code rhs_code
;
225 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
229 si
->offsets
[SSA_NAME_VERSION (lhs
)] = max_size
;
231 si
->offsets
[SSA_NAME_VERSION (lhs
)] = val
;
233 stmt
= SSA_NAME_DEF_STMT (lhs
);
235 rhs_code
= gimple_assign_rhs_code (stmt
);
236 rhs1
= gimple_assign_rhs1 (stmt
);
237 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
238 || gimple_assign_cast_p (stmt
))
239 && TREE_CODE (rhs1
) == SSA_NAME
)
245 if ((rhs_code
== POINTER_PLUS_EXPR
246 || rhs_code
== PLUS_EXPR
)
247 && TREE_CODE (rhs1
) == SSA_NAME
248 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt
)))
250 val
-= tree_to_uhwi (gimple_assign_rhs2 (stmt
));
255 if (rhs_code
== ADDR_EXPR
256 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
257 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
258 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1)))
260 val
-= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1));
261 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
272 /* Called by walk_tree to look for references to va_list variables. */
275 find_va_list_reference (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
278 bitmap va_list_vars
= (bitmap
) ((struct walk_stmt_info
*) data
)->info
;
281 if (TREE_CODE (var
) == SSA_NAME
)
283 if (bitmap_bit_p (va_list_vars
, SSA_NAME_VERSION (var
)))
286 else if (TREE_CODE (var
) == VAR_DECL
)
288 if (bitmap_bit_p (va_list_vars
, DECL_UID (var
) + num_ssa_names
))
296 /* Helper function of va_list_counter_struct_op. Compute
297 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
298 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
299 statement. GPR_P is true if AP is a GPR counter, false if it is
303 va_list_counter_op (struct stdarg_info
*si
, tree ap
, tree var
, bool gpr_p
,
306 unsigned HOST_WIDE_INT increment
;
308 if (si
->compute_sizes
< 0)
310 si
->compute_sizes
= 0;
311 if (si
->va_start_count
== 1
312 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
313 si
->compute_sizes
= 1;
315 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
317 "bb%d will %sbe executed at most once for each va_start "
318 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
319 si
->va_start_bb
->index
);
324 && (increment
= va_list_counter_bump (si
, ap
, var
, gpr_p
)) + 1 > 1)
326 if (gpr_p
&& cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
328 cfun
->va_list_gpr_size
+= increment
;
332 if (!gpr_p
&& cfun
->va_list_fpr_size
+ increment
< VA_LIST_MAX_FPR_SIZE
)
334 cfun
->va_list_fpr_size
+= increment
;
339 if (write_p
|| !si
->compute_sizes
)
342 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
344 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
349 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
350 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
351 is false, AP has been seen in VAR = AP assignment.
352 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
353 va_arg operation that doesn't cause the va_list variable to escape
357 va_list_counter_struct_op (struct stdarg_info
*si
, tree ap
, tree var
,
362 if (TREE_CODE (ap
) != COMPONENT_REF
363 || TREE_CODE (TREE_OPERAND (ap
, 1)) != FIELD_DECL
)
366 if (TREE_CODE (var
) != SSA_NAME
367 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (var
)))
370 base
= get_base_address (ap
);
371 if (TREE_CODE (base
) != VAR_DECL
372 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (base
) + num_ssa_names
))
375 if (TREE_OPERAND (ap
, 1) == va_list_gpr_counter_field
)
376 va_list_counter_op (si
, ap
, var
, true, write_p
);
377 else if (TREE_OPERAND (ap
, 1) == va_list_fpr_counter_field
)
378 va_list_counter_op (si
, ap
, var
, false, write_p
);
384 /* Check for TEM = AP. Return true if found and the caller shouldn't
385 search for va_list references in the statement. */
388 va_list_ptr_read (struct stdarg_info
*si
, tree ap
, tree tem
)
390 if (TREE_CODE (ap
) != VAR_DECL
391 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
394 if (TREE_CODE (tem
) != SSA_NAME
395 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem
)))
398 if (si
->compute_sizes
< 0)
400 si
->compute_sizes
= 0;
401 if (si
->va_start_count
== 1
402 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
403 si
->compute_sizes
= 1;
405 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
407 "bb%d will %sbe executed at most once for each va_start "
408 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
409 si
->va_start_bb
->index
);
412 /* For void * or char * va_list types, there is just one counter.
413 If va_arg is used in a loop, we don't know how many registers need
415 if (! si
->compute_sizes
)
418 if (va_list_counter_bump (si
, ap
, tem
, true) == HOST_WIDE_INT_M1U
)
421 /* Note the temporary, as we need to track whether it doesn't escape
422 the current function. */
423 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (tem
));
433 sequence and update cfun->va_list_gpr_size. Return true if found. */
436 va_list_ptr_write (struct stdarg_info
*si
, tree ap
, tree tem2
)
438 unsigned HOST_WIDE_INT increment
;
440 if (TREE_CODE (ap
) != VAR_DECL
441 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
444 if (TREE_CODE (tem2
) != SSA_NAME
445 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem2
)))
448 if (si
->compute_sizes
<= 0)
451 increment
= va_list_counter_bump (si
, ap
, tem2
, true);
452 if (increment
+ 1 <= 1)
455 if (cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
456 cfun
->va_list_gpr_size
+= increment
;
458 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
464 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
465 containing value of some va_list variable plus optionally some constant,
466 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
467 depending whether LHS is a function local temporary. */
470 check_va_list_escapes (struct stdarg_info
*si
, tree lhs
, tree rhs
)
472 if (! POINTER_TYPE_P (TREE_TYPE (rhs
)))
475 if (TREE_CODE (rhs
) == SSA_NAME
)
477 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (rhs
)))
480 else if (TREE_CODE (rhs
) == ADDR_EXPR
481 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
482 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0)) == SSA_NAME
)
484 tree ptr
= TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0);
485 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (ptr
)))
491 if (TREE_CODE (lhs
) != SSA_NAME
)
493 si
->va_list_escapes
= true;
497 if (si
->compute_sizes
< 0)
499 si
->compute_sizes
= 0;
500 if (si
->va_start_count
== 1
501 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
502 si
->compute_sizes
= 1;
504 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
506 "bb%d will %sbe executed at most once for each va_start "
507 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
508 si
->va_start_bb
->index
);
511 /* For void * or char * va_list types, there is just one counter.
512 If va_arg is used in a loop, we don't know how many registers need
514 if (! si
->compute_sizes
)
516 si
->va_list_escapes
= true;
520 if (va_list_counter_bump (si
, si
->va_start_ap
, lhs
, true)
521 == HOST_WIDE_INT_M1U
)
523 si
->va_list_escapes
= true;
527 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (lhs
));
531 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
532 Return true if va_list might be escaping. */
535 check_all_va_list_escapes (struct stdarg_info
*si
)
539 FOR_EACH_BB_FN (bb
, cfun
)
541 gimple_stmt_iterator i
;
543 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
548 gimple phi
= gsi_stmt (i
);
550 lhs
= PHI_RESULT (phi
);
551 if (virtual_operand_p (lhs
)
552 || bitmap_bit_p (si
->va_list_escape_vars
,
553 SSA_NAME_VERSION (lhs
)))
556 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
558 tree rhs
= USE_FROM_PTR (uop
);
559 if (TREE_CODE (rhs
) == SSA_NAME
560 && bitmap_bit_p (si
->va_list_escape_vars
,
561 SSA_NAME_VERSION (rhs
)))
563 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
565 fputs ("va_list escapes in ", dump_file
);
566 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
567 fputc ('\n', dump_file
);
574 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
576 gimple stmt
= gsi_stmt (i
);
580 if (is_gimple_debug (stmt
))
583 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_ALL_USES
)
585 if (! bitmap_bit_p (si
->va_list_escape_vars
,
586 SSA_NAME_VERSION (use
)))
589 if (is_gimple_assign (stmt
))
591 tree rhs
= gimple_assign_rhs1 (stmt
);
592 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
595 if (rhs_code
== MEM_REF
596 && TREE_OPERAND (rhs
, 0) == use
597 && TYPE_SIZE_UNIT (TREE_TYPE (rhs
))
598 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs
)))
599 && si
->offsets
[SSA_NAME_VERSION (use
)] != -1)
601 unsigned HOST_WIDE_INT gpr_size
;
602 tree access_size
= TYPE_SIZE_UNIT (TREE_TYPE (rhs
));
604 gpr_size
= si
->offsets
[SSA_NAME_VERSION (use
)]
605 + tree_to_shwi (TREE_OPERAND (rhs
, 1))
606 + tree_to_uhwi (access_size
);
607 if (gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
608 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
609 else if (gpr_size
> cfun
->va_list_gpr_size
)
610 cfun
->va_list_gpr_size
= gpr_size
;
614 /* va_arg sequences may contain
615 other_ap_temp = ap_temp;
616 other_ap_temp = ap_temp + constant;
617 other_ap_temp = (some_type *) ap_temp;
621 && ((rhs_code
== POINTER_PLUS_EXPR
622 && (TREE_CODE (gimple_assign_rhs2 (stmt
))
624 || gimple_assign_cast_p (stmt
)
625 || (get_gimple_rhs_class (rhs_code
)
626 == GIMPLE_SINGLE_RHS
)))
628 tree lhs
= gimple_assign_lhs (stmt
);
630 if (TREE_CODE (lhs
) == SSA_NAME
631 && bitmap_bit_p (si
->va_list_escape_vars
,
632 SSA_NAME_VERSION (lhs
)))
635 if (TREE_CODE (lhs
) == VAR_DECL
636 && bitmap_bit_p (si
->va_list_vars
,
637 DECL_UID (lhs
) + num_ssa_names
))
640 else if (rhs_code
== ADDR_EXPR
641 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
642 && TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0) == use
)
644 tree lhs
= gimple_assign_lhs (stmt
);
646 if (bitmap_bit_p (si
->va_list_escape_vars
,
647 SSA_NAME_VERSION (lhs
)))
652 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
654 fputs ("va_list escapes in ", dump_file
);
655 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
656 fputc ('\n', dump_file
);
669 const pass_data pass_data_stdarg
=
671 GIMPLE_PASS
, /* type */
673 OPTGROUP_NONE
, /* optinfo_flags */
674 true, /* has_execute */
676 ( PROP_cfg
| PROP_ssa
), /* properties_required */
677 0, /* properties_provided */
678 0, /* properties_destroyed */
679 0, /* todo_flags_start */
680 0, /* todo_flags_finish */
683 class pass_stdarg
: public gimple_opt_pass
686 pass_stdarg (gcc::context
*ctxt
)
687 : gimple_opt_pass (pass_data_stdarg
, ctxt
)
690 /* opt_pass methods: */
691 virtual bool gate (function
*fun
)
693 /* This optimization is only for stdarg functions. */
694 return fun
->stdarg
!= 0;
697 virtual unsigned int execute (function
*);
699 }; // class pass_stdarg
702 pass_stdarg::execute (function
*fun
)
705 bool va_list_escapes
= false;
706 bool va_list_simple_ptr
;
707 struct stdarg_info si
;
708 struct walk_stmt_info wi
;
709 const char *funcname
= NULL
;
712 fun
->va_list_gpr_size
= 0;
713 fun
->va_list_fpr_size
= 0;
714 memset (&si
, 0, sizeof (si
));
715 si
.va_list_vars
= BITMAP_ALLOC (NULL
);
716 si
.va_list_escape_vars
= BITMAP_ALLOC (NULL
);
719 funcname
= lang_hooks
.decl_printable_name (current_function_decl
, 2);
721 cfun_va_list
= targetm
.fn_abi_va_list (fun
->decl
);
722 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
723 && (TREE_TYPE (cfun_va_list
) == void_type_node
724 || TREE_TYPE (cfun_va_list
) == char_type_node
);
725 gcc_assert (is_gimple_reg_type (cfun_va_list
) == va_list_simple_ptr
);
727 FOR_EACH_BB_FN (bb
, fun
)
729 gimple_stmt_iterator i
;
731 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
733 gimple stmt
= gsi_stmt (i
);
736 if (!is_gimple_call (stmt
))
739 callee
= gimple_call_fndecl (stmt
);
741 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
744 switch (DECL_FUNCTION_CODE (callee
))
746 case BUILT_IN_VA_START
:
748 /* If old style builtins are used, don't optimize anything. */
749 case BUILT_IN_SAVEREGS
:
750 case BUILT_IN_NEXT_ARG
:
751 va_list_escapes
= true;
758 ap
= gimple_call_arg (stmt
, 0);
760 if (TREE_CODE (ap
) != ADDR_EXPR
)
762 va_list_escapes
= true;
765 ap
= TREE_OPERAND (ap
, 0);
766 if (TREE_CODE (ap
) == ARRAY_REF
)
768 if (! integer_zerop (TREE_OPERAND (ap
, 1)))
770 va_list_escapes
= true;
773 ap
= TREE_OPERAND (ap
, 0);
775 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap
))
776 != TYPE_MAIN_VARIANT (targetm
.fn_abi_va_list (fun
->decl
))
777 || TREE_CODE (ap
) != VAR_DECL
)
779 va_list_escapes
= true;
783 if (is_global_var (ap
))
785 va_list_escapes
= true;
789 bitmap_set_bit (si
.va_list_vars
, DECL_UID (ap
) + num_ssa_names
);
791 /* VA_START_BB and VA_START_AP will be only used if there is just
792 one va_start in the function. */
801 /* If there were no va_start uses in the function, there is no need to
803 if (si
.va_start_count
== 0)
806 /* If some va_list arguments weren't local, we can't optimize. */
810 /* For void * or char * va_list, something useful can be done only
811 if there is just one va_start. */
812 if (va_list_simple_ptr
&& si
.va_start_count
> 1)
814 va_list_escapes
= true;
818 /* For struct * va_list, if the backend didn't tell us what the counter fields
819 are, there is nothing more we can do. */
820 if (!va_list_simple_ptr
821 && va_list_gpr_counter_field
== NULL_TREE
822 && va_list_fpr_counter_field
== NULL_TREE
)
824 va_list_escapes
= true;
828 /* For void * or char * va_list there is just one counter
829 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
830 if (va_list_simple_ptr
)
831 fun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
833 calculate_dominance_info (CDI_DOMINATORS
);
834 memset (&wi
, 0, sizeof (wi
));
835 wi
.info
= si
.va_list_vars
;
837 FOR_EACH_BB_FN (bb
, fun
)
839 gimple_stmt_iterator i
;
841 si
.compute_sizes
= -1;
844 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
845 them as assignments for the purpose of escape analysis. This is
846 not needed for non-simple va_list because virtual phis don't perform
847 any real data movement. */
848 if (va_list_simple_ptr
)
854 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
856 gimple phi
= gsi_stmt (i
);
857 lhs
= PHI_RESULT (phi
);
859 if (virtual_operand_p (lhs
))
862 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
864 rhs
= USE_FROM_PTR (uop
);
865 if (va_list_ptr_read (&si
, rhs
, lhs
))
867 else if (va_list_ptr_write (&si
, lhs
, rhs
))
870 check_va_list_escapes (&si
, lhs
, rhs
);
872 if (si
.va_list_escapes
)
874 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
876 fputs ("va_list escapes in ", dump_file
);
877 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
878 fputc ('\n', dump_file
);
880 va_list_escapes
= true;
886 for (i
= gsi_start_bb (bb
);
887 !gsi_end_p (i
) && !va_list_escapes
;
890 gimple stmt
= gsi_stmt (i
);
892 /* Don't look at __builtin_va_{start,end}, they are ok. */
893 if (is_gimple_call (stmt
))
895 tree callee
= gimple_call_fndecl (stmt
);
898 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
899 && (DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_START
900 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_END
))
904 if (is_gimple_assign (stmt
))
906 tree lhs
= gimple_assign_lhs (stmt
);
907 tree rhs
= gimple_assign_rhs1 (stmt
);
909 if (va_list_simple_ptr
)
911 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
912 == GIMPLE_SINGLE_RHS
)
914 /* Check for ap ={v} {}. */
915 if (TREE_CLOBBER_P (rhs
))
918 /* Check for tem = ap. */
919 else if (va_list_ptr_read (&si
, rhs
, lhs
))
922 /* Check for the last insn in:
927 else if (va_list_ptr_write (&si
, lhs
, rhs
))
931 if ((gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
932 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == INTEGER_CST
)
933 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
934 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
935 == GIMPLE_SINGLE_RHS
))
936 check_va_list_escapes (&si
, lhs
, rhs
);
940 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
941 == GIMPLE_SINGLE_RHS
)
943 /* Check for ap ={v} {}. */
944 if (TREE_CLOBBER_P (rhs
))
947 /* Check for ap[0].field = temp. */
948 else if (va_list_counter_struct_op (&si
, lhs
, rhs
, true))
951 /* Check for temp = ap[0].field. */
952 else if (va_list_counter_struct_op (&si
, rhs
, lhs
,
957 /* Do any architecture specific checking. */
958 if (targetm
.stdarg_optimize_hook
959 && targetm
.stdarg_optimize_hook (&si
, stmt
))
963 else if (is_gimple_debug (stmt
))
966 /* All other uses of va_list are either va_copy (that is not handled
967 in this optimization), taking address of va_list variable or
968 passing va_list to other functions (in that case va_list might
969 escape the function and therefore va_start needs to set it up
970 fully), or some unexpected use of va_list. None of these should
971 happen in a gimplified VA_ARG_EXPR. */
972 if (si
.va_list_escapes
973 || walk_gimple_op (stmt
, find_va_list_reference
, &wi
))
975 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
977 fputs ("va_list escapes in ", dump_file
);
978 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
979 fputc ('\n', dump_file
);
981 va_list_escapes
= true;
989 if (! va_list_escapes
990 && va_list_simple_ptr
991 && ! bitmap_empty_p (si
.va_list_escape_vars
)
992 && check_all_va_list_escapes (&si
))
993 va_list_escapes
= true;
998 fun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
999 fun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
1001 BITMAP_FREE (si
.va_list_vars
);
1002 BITMAP_FREE (si
.va_list_escape_vars
);
1006 fprintf (dump_file
, "%s: va_list escapes %d, needs to save ",
1007 funcname
, (int) va_list_escapes
);
1008 if (fun
->va_list_gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
1009 fputs ("all", dump_file
);
1011 fprintf (dump_file
, "%d", cfun
->va_list_gpr_size
);
1012 fputs (" GPR units and ", dump_file
);
1013 if (fun
->va_list_fpr_size
>= VA_LIST_MAX_FPR_SIZE
)
1014 fputs ("all", dump_file
);
1016 fprintf (dump_file
, "%d", cfun
->va_list_fpr_size
);
1017 fputs (" FPR units.\n", dump_file
);
1025 make_pass_stdarg (gcc::context
*ctxt
)
1027 return new pass_stdarg (ctxt
);