1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "hard-reg-set.h"
33 #include "langhooks.h"
34 #include "gimple-pretty-print.h"
38 #include "dominance.h"
40 #include "basic-block.h"
41 #include "tree-ssa-alias.h"
42 #include "internal-fn.h"
43 #include "gimple-expr.h"
46 #include "gimple-iterator.h"
47 #include "gimple-walk.h"
48 #include "gimple-ssa.h"
49 #include "tree-phinodes.h"
50 #include "ssa-iterators.h"
51 #include "stringpool.h"
52 #include "tree-ssanames.h"
54 #include "tree-pass.h"
55 #include "tree-stdarg.h"
57 /* A simple pass that attempts to optimize stdarg functions on architectures
58 that need to save register arguments to stack on entry to stdarg functions.
59 If the function doesn't use any va_start macros, no registers need to
60 be saved. If va_start macros are used, the va_list variables don't escape
61 the function, it is only necessary to save registers that will be used
62 in va_arg macros. E.g. if va_arg is only used with integral types
63 in the function, floating point registers don't need to be saved, etc. */
66 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
67 is executed at most as many times as VA_START_BB. */
70 reachable_at_most_once (basic_block va_arg_bb
, basic_block va_start_bb
)
72 vec
<edge
> stack
= vNULL
;
78 if (va_arg_bb
== va_start_bb
)
81 if (! dominated_by_p (CDI_DOMINATORS
, va_arg_bb
, va_start_bb
))
84 visited
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
85 bitmap_clear (visited
);
88 FOR_EACH_EDGE (e
, ei
, va_arg_bb
->preds
)
91 while (! stack
.is_empty ())
98 if (e
->flags
& EDGE_COMPLEX
)
104 if (src
== va_start_bb
)
107 /* va_arg_bb can be executed more times than va_start_bb. */
108 if (src
== va_arg_bb
)
114 gcc_assert (src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
));
116 if (! bitmap_bit_p (visited
, src
->index
))
118 bitmap_set_bit (visited
, src
->index
);
119 FOR_EACH_EDGE (e
, ei
, src
->preds
)
125 sbitmap_free (visited
);
130 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
131 return constant, otherwise return HOST_WIDE_INT_M1U.
132 GPR_P is true if this is GPR counter. */
134 static unsigned HOST_WIDE_INT
135 va_list_counter_bump (struct stdarg_info
*si
, tree counter
, tree rhs
,
140 unsigned HOST_WIDE_INT ret
= 0, val
, counter_val
;
141 unsigned int max_size
;
143 if (si
->offsets
== NULL
)
147 si
->offsets
= XNEWVEC (int, num_ssa_names
);
148 for (i
= 0; i
< num_ssa_names
; ++i
)
152 counter_val
= gpr_p
? cfun
->va_list_gpr_size
: cfun
->va_list_fpr_size
;
153 max_size
= gpr_p
? VA_LIST_MAX_GPR_SIZE
: VA_LIST_MAX_FPR_SIZE
;
154 orig_lhs
= lhs
= rhs
;
157 enum tree_code rhs_code
;
160 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
162 if (counter_val
>= max_size
)
168 ret
-= counter_val
- si
->offsets
[SSA_NAME_VERSION (lhs
)];
172 stmt
= SSA_NAME_DEF_STMT (lhs
);
174 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != lhs
)
175 return HOST_WIDE_INT_M1U
;
177 rhs_code
= gimple_assign_rhs_code (stmt
);
178 rhs1
= gimple_assign_rhs1 (stmt
);
179 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
180 || gimple_assign_cast_p (stmt
))
181 && TREE_CODE (rhs1
) == SSA_NAME
)
187 if ((rhs_code
== POINTER_PLUS_EXPR
188 || rhs_code
== PLUS_EXPR
)
189 && TREE_CODE (rhs1
) == SSA_NAME
190 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt
)))
192 ret
+= tree_to_uhwi (gimple_assign_rhs2 (stmt
));
197 if (rhs_code
== ADDR_EXPR
198 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
199 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
200 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1)))
202 ret
+= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1));
203 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
207 if (get_gimple_rhs_class (rhs_code
) != GIMPLE_SINGLE_RHS
)
208 return HOST_WIDE_INT_M1U
;
210 rhs
= gimple_assign_rhs1 (stmt
);
211 if (TREE_CODE (counter
) != TREE_CODE (rhs
))
212 return HOST_WIDE_INT_M1U
;
214 if (TREE_CODE (counter
) == COMPONENT_REF
)
216 if (get_base_address (counter
) != get_base_address (rhs
)
217 || TREE_CODE (TREE_OPERAND (rhs
, 1)) != FIELD_DECL
218 || TREE_OPERAND (counter
, 1) != TREE_OPERAND (rhs
, 1))
219 return HOST_WIDE_INT_M1U
;
221 else if (counter
!= rhs
)
222 return HOST_WIDE_INT_M1U
;
228 val
= ret
+ counter_val
;
231 enum tree_code rhs_code
;
234 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
238 si
->offsets
[SSA_NAME_VERSION (lhs
)] = max_size
;
240 si
->offsets
[SSA_NAME_VERSION (lhs
)] = val
;
242 stmt
= SSA_NAME_DEF_STMT (lhs
);
244 rhs_code
= gimple_assign_rhs_code (stmt
);
245 rhs1
= gimple_assign_rhs1 (stmt
);
246 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
247 || gimple_assign_cast_p (stmt
))
248 && TREE_CODE (rhs1
) == SSA_NAME
)
254 if ((rhs_code
== POINTER_PLUS_EXPR
255 || rhs_code
== PLUS_EXPR
)
256 && TREE_CODE (rhs1
) == SSA_NAME
257 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt
)))
259 val
-= tree_to_uhwi (gimple_assign_rhs2 (stmt
));
264 if (rhs_code
== ADDR_EXPR
265 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
266 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
267 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1)))
269 val
-= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1));
270 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
281 /* Called by walk_tree to look for references to va_list variables. */
284 find_va_list_reference (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
287 bitmap va_list_vars
= (bitmap
) ((struct walk_stmt_info
*) data
)->info
;
290 if (TREE_CODE (var
) == SSA_NAME
)
292 if (bitmap_bit_p (va_list_vars
, SSA_NAME_VERSION (var
)))
295 else if (TREE_CODE (var
) == VAR_DECL
)
297 if (bitmap_bit_p (va_list_vars
, DECL_UID (var
) + num_ssa_names
))
305 /* Helper function of va_list_counter_struct_op. Compute
306 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
307 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
308 statement. GPR_P is true if AP is a GPR counter, false if it is
312 va_list_counter_op (struct stdarg_info
*si
, tree ap
, tree var
, bool gpr_p
,
315 unsigned HOST_WIDE_INT increment
;
317 if (si
->compute_sizes
< 0)
319 si
->compute_sizes
= 0;
320 if (si
->va_start_count
== 1
321 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
322 si
->compute_sizes
= 1;
324 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
326 "bb%d will %sbe executed at most once for each va_start "
327 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
328 si
->va_start_bb
->index
);
333 && (increment
= va_list_counter_bump (si
, ap
, var
, gpr_p
)) + 1 > 1)
335 if (gpr_p
&& cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
337 cfun
->va_list_gpr_size
+= increment
;
341 if (!gpr_p
&& cfun
->va_list_fpr_size
+ increment
< VA_LIST_MAX_FPR_SIZE
)
343 cfun
->va_list_fpr_size
+= increment
;
348 if (write_p
|| !si
->compute_sizes
)
351 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
353 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
358 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
359 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
360 is false, AP has been seen in VAR = AP assignment.
361 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
362 va_arg operation that doesn't cause the va_list variable to escape
366 va_list_counter_struct_op (struct stdarg_info
*si
, tree ap
, tree var
,
371 if (TREE_CODE (ap
) != COMPONENT_REF
372 || TREE_CODE (TREE_OPERAND (ap
, 1)) != FIELD_DECL
)
375 if (TREE_CODE (var
) != SSA_NAME
376 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (var
)))
379 base
= get_base_address (ap
);
380 if (TREE_CODE (base
) != VAR_DECL
381 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (base
) + num_ssa_names
))
384 if (TREE_OPERAND (ap
, 1) == va_list_gpr_counter_field
)
385 va_list_counter_op (si
, ap
, var
, true, write_p
);
386 else if (TREE_OPERAND (ap
, 1) == va_list_fpr_counter_field
)
387 va_list_counter_op (si
, ap
, var
, false, write_p
);
393 /* Check for TEM = AP. Return true if found and the caller shouldn't
394 search for va_list references in the statement. */
397 va_list_ptr_read (struct stdarg_info
*si
, tree ap
, tree tem
)
399 if (TREE_CODE (ap
) != VAR_DECL
400 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
403 if (TREE_CODE (tem
) != SSA_NAME
404 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem
)))
407 if (si
->compute_sizes
< 0)
409 si
->compute_sizes
= 0;
410 if (si
->va_start_count
== 1
411 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
412 si
->compute_sizes
= 1;
414 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
416 "bb%d will %sbe executed at most once for each va_start "
417 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
418 si
->va_start_bb
->index
);
421 /* For void * or char * va_list types, there is just one counter.
422 If va_arg is used in a loop, we don't know how many registers need
424 if (! si
->compute_sizes
)
427 if (va_list_counter_bump (si
, ap
, tem
, true) == HOST_WIDE_INT_M1U
)
430 /* Note the temporary, as we need to track whether it doesn't escape
431 the current function. */
432 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (tem
));
442 sequence and update cfun->va_list_gpr_size. Return true if found. */
445 va_list_ptr_write (struct stdarg_info
*si
, tree ap
, tree tem2
)
447 unsigned HOST_WIDE_INT increment
;
449 if (TREE_CODE (ap
) != VAR_DECL
450 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
453 if (TREE_CODE (tem2
) != SSA_NAME
454 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem2
)))
457 if (si
->compute_sizes
<= 0)
460 increment
= va_list_counter_bump (si
, ap
, tem2
, true);
461 if (increment
+ 1 <= 1)
464 if (cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
465 cfun
->va_list_gpr_size
+= increment
;
467 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
473 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
474 containing value of some va_list variable plus optionally some constant,
475 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
476 depending whether LHS is a function local temporary. */
479 check_va_list_escapes (struct stdarg_info
*si
, tree lhs
, tree rhs
)
481 if (! POINTER_TYPE_P (TREE_TYPE (rhs
)))
484 if (TREE_CODE (rhs
) == SSA_NAME
)
486 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (rhs
)))
489 else if (TREE_CODE (rhs
) == ADDR_EXPR
490 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
491 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0)) == SSA_NAME
)
493 tree ptr
= TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0);
494 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (ptr
)))
500 if (TREE_CODE (lhs
) != SSA_NAME
)
502 si
->va_list_escapes
= true;
506 if (si
->compute_sizes
< 0)
508 si
->compute_sizes
= 0;
509 if (si
->va_start_count
== 1
510 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
511 si
->compute_sizes
= 1;
513 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
515 "bb%d will %sbe executed at most once for each va_start "
516 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
517 si
->va_start_bb
->index
);
520 /* For void * or char * va_list types, there is just one counter.
521 If va_arg is used in a loop, we don't know how many registers need
523 if (! si
->compute_sizes
)
525 si
->va_list_escapes
= true;
529 if (va_list_counter_bump (si
, si
->va_start_ap
, lhs
, true)
530 == HOST_WIDE_INT_M1U
)
532 si
->va_list_escapes
= true;
536 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (lhs
));
540 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
541 Return true if va_list might be escaping. */
544 check_all_va_list_escapes (struct stdarg_info
*si
)
548 FOR_EACH_BB_FN (bb
, cfun
)
550 gimple_stmt_iterator i
;
552 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
557 gimple phi
= gsi_stmt (i
);
559 lhs
= PHI_RESULT (phi
);
560 if (virtual_operand_p (lhs
)
561 || bitmap_bit_p (si
->va_list_escape_vars
,
562 SSA_NAME_VERSION (lhs
)))
565 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
567 tree rhs
= USE_FROM_PTR (uop
);
568 if (TREE_CODE (rhs
) == SSA_NAME
569 && bitmap_bit_p (si
->va_list_escape_vars
,
570 SSA_NAME_VERSION (rhs
)))
572 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
574 fputs ("va_list escapes in ", dump_file
);
575 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
576 fputc ('\n', dump_file
);
583 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
585 gimple stmt
= gsi_stmt (i
);
589 if (is_gimple_debug (stmt
))
592 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_ALL_USES
)
594 if (! bitmap_bit_p (si
->va_list_escape_vars
,
595 SSA_NAME_VERSION (use
)))
598 if (is_gimple_assign (stmt
))
600 tree rhs
= gimple_assign_rhs1 (stmt
);
601 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
604 if (rhs_code
== MEM_REF
605 && TREE_OPERAND (rhs
, 0) == use
606 && TYPE_SIZE_UNIT (TREE_TYPE (rhs
))
607 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs
)))
608 && si
->offsets
[SSA_NAME_VERSION (use
)] != -1)
610 unsigned HOST_WIDE_INT gpr_size
;
611 tree access_size
= TYPE_SIZE_UNIT (TREE_TYPE (rhs
));
613 gpr_size
= si
->offsets
[SSA_NAME_VERSION (use
)]
614 + tree_to_shwi (TREE_OPERAND (rhs
, 1))
615 + tree_to_uhwi (access_size
);
616 if (gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
617 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
618 else if (gpr_size
> cfun
->va_list_gpr_size
)
619 cfun
->va_list_gpr_size
= gpr_size
;
623 /* va_arg sequences may contain
624 other_ap_temp = ap_temp;
625 other_ap_temp = ap_temp + constant;
626 other_ap_temp = (some_type *) ap_temp;
630 && ((rhs_code
== POINTER_PLUS_EXPR
631 && (TREE_CODE (gimple_assign_rhs2 (stmt
))
633 || gimple_assign_cast_p (stmt
)
634 || (get_gimple_rhs_class (rhs_code
)
635 == GIMPLE_SINGLE_RHS
)))
637 tree lhs
= gimple_assign_lhs (stmt
);
639 if (TREE_CODE (lhs
) == SSA_NAME
640 && bitmap_bit_p (si
->va_list_escape_vars
,
641 SSA_NAME_VERSION (lhs
)))
644 if (TREE_CODE (lhs
) == VAR_DECL
645 && bitmap_bit_p (si
->va_list_vars
,
646 DECL_UID (lhs
) + num_ssa_names
))
649 else if (rhs_code
== ADDR_EXPR
650 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
651 && TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0) == use
)
653 tree lhs
= gimple_assign_lhs (stmt
);
655 if (bitmap_bit_p (si
->va_list_escape_vars
,
656 SSA_NAME_VERSION (lhs
)))
661 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
663 fputs ("va_list escapes in ", dump_file
);
664 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
665 fputc ('\n', dump_file
);
678 const pass_data pass_data_stdarg
=
680 GIMPLE_PASS
, /* type */
682 OPTGROUP_NONE
, /* optinfo_flags */
684 ( PROP_cfg
| PROP_ssa
), /* properties_required */
685 0, /* properties_provided */
686 0, /* properties_destroyed */
687 0, /* todo_flags_start */
688 0, /* todo_flags_finish */
691 class pass_stdarg
: public gimple_opt_pass
694 pass_stdarg (gcc::context
*ctxt
)
695 : gimple_opt_pass (pass_data_stdarg
, ctxt
)
698 /* opt_pass methods: */
699 virtual bool gate (function
*fun
)
701 /* This optimization is only for stdarg functions. */
702 return fun
->stdarg
!= 0;
705 virtual unsigned int execute (function
*);
707 }; // class pass_stdarg
710 pass_stdarg::execute (function
*fun
)
713 bool va_list_escapes
= false;
714 bool va_list_simple_ptr
;
715 struct stdarg_info si
;
716 struct walk_stmt_info wi
;
717 const char *funcname
= NULL
;
720 fun
->va_list_gpr_size
= 0;
721 fun
->va_list_fpr_size
= 0;
722 memset (&si
, 0, sizeof (si
));
723 si
.va_list_vars
= BITMAP_ALLOC (NULL
);
724 si
.va_list_escape_vars
= BITMAP_ALLOC (NULL
);
727 funcname
= lang_hooks
.decl_printable_name (current_function_decl
, 2);
729 cfun_va_list
= targetm
.fn_abi_va_list (fun
->decl
);
730 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
731 && (TREE_TYPE (cfun_va_list
) == void_type_node
732 || TREE_TYPE (cfun_va_list
) == char_type_node
);
733 gcc_assert (is_gimple_reg_type (cfun_va_list
) == va_list_simple_ptr
);
735 FOR_EACH_BB_FN (bb
, fun
)
737 gimple_stmt_iterator i
;
739 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
741 gimple stmt
= gsi_stmt (i
);
744 if (!is_gimple_call (stmt
))
747 callee
= gimple_call_fndecl (stmt
);
749 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
752 switch (DECL_FUNCTION_CODE (callee
))
754 case BUILT_IN_VA_START
:
756 /* If old style builtins are used, don't optimize anything. */
757 case BUILT_IN_SAVEREGS
:
758 case BUILT_IN_NEXT_ARG
:
759 va_list_escapes
= true;
766 ap
= gimple_call_arg (stmt
, 0);
768 if (TREE_CODE (ap
) != ADDR_EXPR
)
770 va_list_escapes
= true;
773 ap
= TREE_OPERAND (ap
, 0);
774 if (TREE_CODE (ap
) == ARRAY_REF
)
776 if (! integer_zerop (TREE_OPERAND (ap
, 1)))
778 va_list_escapes
= true;
781 ap
= TREE_OPERAND (ap
, 0);
783 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap
))
784 != TYPE_MAIN_VARIANT (targetm
.fn_abi_va_list (fun
->decl
))
785 || TREE_CODE (ap
) != VAR_DECL
)
787 va_list_escapes
= true;
791 if (is_global_var (ap
))
793 va_list_escapes
= true;
797 bitmap_set_bit (si
.va_list_vars
, DECL_UID (ap
) + num_ssa_names
);
799 /* VA_START_BB and VA_START_AP will be only used if there is just
800 one va_start in the function. */
809 /* If there were no va_start uses in the function, there is no need to
811 if (si
.va_start_count
== 0)
814 /* If some va_list arguments weren't local, we can't optimize. */
818 /* For void * or char * va_list, something useful can be done only
819 if there is just one va_start. */
820 if (va_list_simple_ptr
&& si
.va_start_count
> 1)
822 va_list_escapes
= true;
826 /* For struct * va_list, if the backend didn't tell us what the counter fields
827 are, there is nothing more we can do. */
828 if (!va_list_simple_ptr
829 && va_list_gpr_counter_field
== NULL_TREE
830 && va_list_fpr_counter_field
== NULL_TREE
)
832 va_list_escapes
= true;
836 /* For void * or char * va_list there is just one counter
837 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
838 if (va_list_simple_ptr
)
839 fun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
841 calculate_dominance_info (CDI_DOMINATORS
);
842 memset (&wi
, 0, sizeof (wi
));
843 wi
.info
= si
.va_list_vars
;
845 FOR_EACH_BB_FN (bb
, fun
)
847 gimple_stmt_iterator i
;
849 si
.compute_sizes
= -1;
852 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
853 them as assignments for the purpose of escape analysis. This is
854 not needed for non-simple va_list because virtual phis don't perform
855 any real data movement. */
856 if (va_list_simple_ptr
)
862 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
864 gimple phi
= gsi_stmt (i
);
865 lhs
= PHI_RESULT (phi
);
867 if (virtual_operand_p (lhs
))
870 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
872 rhs
= USE_FROM_PTR (uop
);
873 if (va_list_ptr_read (&si
, rhs
, lhs
))
875 else if (va_list_ptr_write (&si
, lhs
, rhs
))
878 check_va_list_escapes (&si
, lhs
, rhs
);
880 if (si
.va_list_escapes
)
882 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
884 fputs ("va_list escapes in ", dump_file
);
885 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
886 fputc ('\n', dump_file
);
888 va_list_escapes
= true;
894 for (i
= gsi_start_bb (bb
);
895 !gsi_end_p (i
) && !va_list_escapes
;
898 gimple stmt
= gsi_stmt (i
);
900 /* Don't look at __builtin_va_{start,end}, they are ok. */
901 if (is_gimple_call (stmt
))
903 tree callee
= gimple_call_fndecl (stmt
);
906 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
907 && (DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_START
908 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_END
))
912 if (is_gimple_assign (stmt
))
914 tree lhs
= gimple_assign_lhs (stmt
);
915 tree rhs
= gimple_assign_rhs1 (stmt
);
917 if (va_list_simple_ptr
)
919 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
920 == GIMPLE_SINGLE_RHS
)
922 /* Check for ap ={v} {}. */
923 if (TREE_CLOBBER_P (rhs
))
926 /* Check for tem = ap. */
927 else if (va_list_ptr_read (&si
, rhs
, lhs
))
930 /* Check for the last insn in:
935 else if (va_list_ptr_write (&si
, lhs
, rhs
))
939 if ((gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
940 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == INTEGER_CST
)
941 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
942 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
943 == GIMPLE_SINGLE_RHS
))
944 check_va_list_escapes (&si
, lhs
, rhs
);
948 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
949 == GIMPLE_SINGLE_RHS
)
951 /* Check for ap ={v} {}. */
952 if (TREE_CLOBBER_P (rhs
))
955 /* Check for ap[0].field = temp. */
956 else if (va_list_counter_struct_op (&si
, lhs
, rhs
, true))
959 /* Check for temp = ap[0].field. */
960 else if (va_list_counter_struct_op (&si
, rhs
, lhs
,
965 /* Do any architecture specific checking. */
966 if (targetm
.stdarg_optimize_hook
967 && targetm
.stdarg_optimize_hook (&si
, stmt
))
971 else if (is_gimple_debug (stmt
))
974 /* All other uses of va_list are either va_copy (that is not handled
975 in this optimization), taking address of va_list variable or
976 passing va_list to other functions (in that case va_list might
977 escape the function and therefore va_start needs to set it up
978 fully), or some unexpected use of va_list. None of these should
979 happen in a gimplified VA_ARG_EXPR. */
980 if (si
.va_list_escapes
981 || walk_gimple_op (stmt
, find_va_list_reference
, &wi
))
983 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
985 fputs ("va_list escapes in ", dump_file
);
986 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
987 fputc ('\n', dump_file
);
989 va_list_escapes
= true;
997 if (! va_list_escapes
998 && va_list_simple_ptr
999 && ! bitmap_empty_p (si
.va_list_escape_vars
)
1000 && check_all_va_list_escapes (&si
))
1001 va_list_escapes
= true;
1004 if (va_list_escapes
)
1006 fun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
1007 fun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
1009 BITMAP_FREE (si
.va_list_vars
);
1010 BITMAP_FREE (si
.va_list_escape_vars
);
1014 fprintf (dump_file
, "%s: va_list escapes %d, needs to save ",
1015 funcname
, (int) va_list_escapes
);
1016 if (fun
->va_list_gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
1017 fputs ("all", dump_file
);
1019 fprintf (dump_file
, "%d", cfun
->va_list_gpr_size
);
1020 fputs (" GPR units and ", dump_file
);
1021 if (fun
->va_list_fpr_size
>= VA_LIST_MAX_FPR_SIZE
)
1022 fputs ("all", dump_file
);
1024 fprintf (dump_file
, "%d", cfun
->va_list_fpr_size
);
1025 fputs (" FPR units.\n", dump_file
);
1033 make_pass_stdarg (gcc::context
*ctxt
)
1035 return new pass_stdarg (ctxt
);