1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-pass.h"
30 #include "gimple-pretty-print.h"
31 #include "fold-const.h"
32 #include "langhooks.h"
33 #include "gimple-iterator.h"
34 #include "gimple-walk.h"
36 #include "tree-into-ssa.h"
38 #include "tree-stdarg.h"
40 /* A simple pass that attempts to optimize stdarg functions on architectures
41 that need to save register arguments to stack on entry to stdarg functions.
42 If the function doesn't use any va_start macros, no registers need to
43 be saved. If va_start macros are used, the va_list variables don't escape
44 the function, it is only necessary to save registers that will be used
45 in va_arg macros. E.g. if va_arg is only used with integral types
46 in the function, floating point registers don't need to be saved, etc. */
49 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
50 is executed at most as many times as VA_START_BB. */
53 reachable_at_most_once (basic_block va_arg_bb
, basic_block va_start_bb
)
55 vec
<edge
> stack
= vNULL
;
61 if (va_arg_bb
== va_start_bb
)
64 if (! dominated_by_p (CDI_DOMINATORS
, va_arg_bb
, va_start_bb
))
67 visited
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
68 bitmap_clear (visited
);
71 FOR_EACH_EDGE (e
, ei
, va_arg_bb
->preds
)
74 while (! stack
.is_empty ())
81 if (e
->flags
& EDGE_COMPLEX
)
87 if (src
== va_start_bb
)
90 /* va_arg_bb can be executed more times than va_start_bb. */
97 gcc_assert (src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
));
99 if (! bitmap_bit_p (visited
, src
->index
))
101 bitmap_set_bit (visited
, src
->index
);
102 FOR_EACH_EDGE (e
, ei
, src
->preds
)
108 sbitmap_free (visited
);
113 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
114 return constant, otherwise return HOST_WIDE_INT_M1U.
115 GPR_P is true if this is GPR counter. */
117 static unsigned HOST_WIDE_INT
118 va_list_counter_bump (struct stdarg_info
*si
, tree counter
, tree rhs
,
123 unsigned HOST_WIDE_INT ret
= 0, val
, counter_val
;
124 unsigned int max_size
;
126 if (si
->offsets
== NULL
)
130 si
->offsets
= XNEWVEC (int, num_ssa_names
);
131 for (i
= 0; i
< num_ssa_names
; ++i
)
135 counter_val
= gpr_p
? cfun
->va_list_gpr_size
: cfun
->va_list_fpr_size
;
136 max_size
= gpr_p
? VA_LIST_MAX_GPR_SIZE
: VA_LIST_MAX_FPR_SIZE
;
137 orig_lhs
= lhs
= rhs
;
140 enum tree_code rhs_code
;
143 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
145 if (counter_val
>= max_size
)
151 ret
-= counter_val
- si
->offsets
[SSA_NAME_VERSION (lhs
)];
155 stmt
= SSA_NAME_DEF_STMT (lhs
);
157 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != lhs
)
158 return HOST_WIDE_INT_M1U
;
160 rhs_code
= gimple_assign_rhs_code (stmt
);
161 rhs1
= gimple_assign_rhs1 (stmt
);
162 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
163 || gimple_assign_cast_p (stmt
))
164 && TREE_CODE (rhs1
) == SSA_NAME
)
170 if ((rhs_code
== POINTER_PLUS_EXPR
171 || rhs_code
== PLUS_EXPR
)
172 && TREE_CODE (rhs1
) == SSA_NAME
173 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt
)))
175 ret
+= tree_to_uhwi (gimple_assign_rhs2 (stmt
));
180 if (rhs_code
== ADDR_EXPR
181 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
182 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
183 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1)))
185 ret
+= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1));
186 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
190 if (get_gimple_rhs_class (rhs_code
) != GIMPLE_SINGLE_RHS
)
191 return HOST_WIDE_INT_M1U
;
193 rhs
= gimple_assign_rhs1 (stmt
);
194 if (TREE_CODE (counter
) != TREE_CODE (rhs
))
195 return HOST_WIDE_INT_M1U
;
197 if (TREE_CODE (counter
) == COMPONENT_REF
)
199 if (get_base_address (counter
) != get_base_address (rhs
)
200 || TREE_CODE (TREE_OPERAND (rhs
, 1)) != FIELD_DECL
201 || TREE_OPERAND (counter
, 1) != TREE_OPERAND (rhs
, 1))
202 return HOST_WIDE_INT_M1U
;
204 else if (counter
!= rhs
)
205 return HOST_WIDE_INT_M1U
;
211 val
= ret
+ counter_val
;
214 enum tree_code rhs_code
;
217 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
221 si
->offsets
[SSA_NAME_VERSION (lhs
)] = max_size
;
223 si
->offsets
[SSA_NAME_VERSION (lhs
)] = val
;
225 stmt
= SSA_NAME_DEF_STMT (lhs
);
227 rhs_code
= gimple_assign_rhs_code (stmt
);
228 rhs1
= gimple_assign_rhs1 (stmt
);
229 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
230 || gimple_assign_cast_p (stmt
))
231 && TREE_CODE (rhs1
) == SSA_NAME
)
237 if ((rhs_code
== POINTER_PLUS_EXPR
238 || rhs_code
== PLUS_EXPR
)
239 && TREE_CODE (rhs1
) == SSA_NAME
240 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt
)))
242 val
-= tree_to_uhwi (gimple_assign_rhs2 (stmt
));
247 if (rhs_code
== ADDR_EXPR
248 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
249 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
250 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1)))
252 val
-= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1));
253 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
264 /* Called by walk_tree to look for references to va_list variables. */
267 find_va_list_reference (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
270 bitmap va_list_vars
= (bitmap
) ((struct walk_stmt_info
*) data
)->info
;
273 if (TREE_CODE (var
) == SSA_NAME
)
275 if (bitmap_bit_p (va_list_vars
, SSA_NAME_VERSION (var
)))
278 else if (TREE_CODE (var
) == VAR_DECL
)
280 if (bitmap_bit_p (va_list_vars
, DECL_UID (var
) + num_ssa_names
))
288 /* Helper function of va_list_counter_struct_op. Compute
289 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
290 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
291 statement. GPR_P is true if AP is a GPR counter, false if it is
295 va_list_counter_op (struct stdarg_info
*si
, tree ap
, tree var
, bool gpr_p
,
298 unsigned HOST_WIDE_INT increment
;
300 if (si
->compute_sizes
< 0)
302 si
->compute_sizes
= 0;
303 if (si
->va_start_count
== 1
304 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
305 si
->compute_sizes
= 1;
307 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
309 "bb%d will %sbe executed at most once for each va_start "
310 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
311 si
->va_start_bb
->index
);
316 && (increment
= va_list_counter_bump (si
, ap
, var
, gpr_p
)) + 1 > 1)
318 if (gpr_p
&& cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
320 cfun
->va_list_gpr_size
+= increment
;
324 if (!gpr_p
&& cfun
->va_list_fpr_size
+ increment
< VA_LIST_MAX_FPR_SIZE
)
326 cfun
->va_list_fpr_size
+= increment
;
331 if (write_p
|| !si
->compute_sizes
)
334 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
336 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
341 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
342 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
343 is false, AP has been seen in VAR = AP assignment.
344 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
345 va_arg operation that doesn't cause the va_list variable to escape
349 va_list_counter_struct_op (struct stdarg_info
*si
, tree ap
, tree var
,
354 if (TREE_CODE (ap
) != COMPONENT_REF
355 || TREE_CODE (TREE_OPERAND (ap
, 1)) != FIELD_DECL
)
358 if (TREE_CODE (var
) != SSA_NAME
359 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (var
)))
362 base
= get_base_address (ap
);
363 if (TREE_CODE (base
) != VAR_DECL
364 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (base
) + num_ssa_names
))
367 if (TREE_OPERAND (ap
, 1) == va_list_gpr_counter_field
)
368 va_list_counter_op (si
, ap
, var
, true, write_p
);
369 else if (TREE_OPERAND (ap
, 1) == va_list_fpr_counter_field
)
370 va_list_counter_op (si
, ap
, var
, false, write_p
);
376 /* Check for TEM = AP. Return true if found and the caller shouldn't
377 search for va_list references in the statement. */
380 va_list_ptr_read (struct stdarg_info
*si
, tree ap
, tree tem
)
382 if (TREE_CODE (ap
) != VAR_DECL
383 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
386 if (TREE_CODE (tem
) != SSA_NAME
387 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem
)))
390 if (si
->compute_sizes
< 0)
392 si
->compute_sizes
= 0;
393 if (si
->va_start_count
== 1
394 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
395 si
->compute_sizes
= 1;
397 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
399 "bb%d will %sbe executed at most once for each va_start "
400 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
401 si
->va_start_bb
->index
);
404 /* For void * or char * va_list types, there is just one counter.
405 If va_arg is used in a loop, we don't know how many registers need
407 if (! si
->compute_sizes
)
410 if (va_list_counter_bump (si
, ap
, tem
, true) == HOST_WIDE_INT_M1U
)
413 /* Note the temporary, as we need to track whether it doesn't escape
414 the current function. */
415 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (tem
));
425 sequence and update cfun->va_list_gpr_size. Return true if found. */
428 va_list_ptr_write (struct stdarg_info
*si
, tree ap
, tree tem2
)
430 unsigned HOST_WIDE_INT increment
;
432 if (TREE_CODE (ap
) != VAR_DECL
433 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
436 if (TREE_CODE (tem2
) != SSA_NAME
437 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem2
)))
440 if (si
->compute_sizes
<= 0)
443 increment
= va_list_counter_bump (si
, ap
, tem2
, true);
444 if (increment
+ 1 <= 1)
447 if (cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
448 cfun
->va_list_gpr_size
+= increment
;
450 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
456 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
457 containing value of some va_list variable plus optionally some constant,
458 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
459 depending whether LHS is a function local temporary. */
462 check_va_list_escapes (struct stdarg_info
*si
, tree lhs
, tree rhs
)
464 if (! POINTER_TYPE_P (TREE_TYPE (rhs
)))
467 if (TREE_CODE (rhs
) == SSA_NAME
)
469 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (rhs
)))
472 else if (TREE_CODE (rhs
) == ADDR_EXPR
473 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
474 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0)) == SSA_NAME
)
476 tree ptr
= TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0);
477 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (ptr
)))
483 if (TREE_CODE (lhs
) != SSA_NAME
)
485 si
->va_list_escapes
= true;
489 if (si
->compute_sizes
< 0)
491 si
->compute_sizes
= 0;
492 if (si
->va_start_count
== 1
493 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
494 si
->compute_sizes
= 1;
496 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
498 "bb%d will %sbe executed at most once for each va_start "
499 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
500 si
->va_start_bb
->index
);
503 /* For void * or char * va_list types, there is just one counter.
504 If va_arg is used in a loop, we don't know how many registers need
506 if (! si
->compute_sizes
)
508 si
->va_list_escapes
= true;
512 if (va_list_counter_bump (si
, si
->va_start_ap
, lhs
, true)
513 == HOST_WIDE_INT_M1U
)
515 si
->va_list_escapes
= true;
519 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (lhs
));
523 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
524 Return true if va_list might be escaping. */
527 check_all_va_list_escapes (struct stdarg_info
*si
)
531 FOR_EACH_BB_FN (bb
, cfun
)
533 for (gphi_iterator i
= gsi_start_phis (bb
); !gsi_end_p (i
);
539 gphi
*phi
= i
.phi ();
541 lhs
= PHI_RESULT (phi
);
542 if (virtual_operand_p (lhs
)
543 || bitmap_bit_p (si
->va_list_escape_vars
,
544 SSA_NAME_VERSION (lhs
)))
547 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
549 tree rhs
= USE_FROM_PTR (uop
);
550 if (TREE_CODE (rhs
) == SSA_NAME
551 && bitmap_bit_p (si
->va_list_escape_vars
,
552 SSA_NAME_VERSION (rhs
)))
554 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
556 fputs ("va_list escapes in ", dump_file
);
557 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
558 fputc ('\n', dump_file
);
565 for (gimple_stmt_iterator i
= gsi_start_bb (bb
); !gsi_end_p (i
);
568 gimple
*stmt
= gsi_stmt (i
);
572 if (is_gimple_debug (stmt
))
575 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_ALL_USES
)
577 if (! bitmap_bit_p (si
->va_list_escape_vars
,
578 SSA_NAME_VERSION (use
)))
581 if (is_gimple_assign (stmt
))
583 tree rhs
= gimple_assign_rhs1 (stmt
);
584 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
587 if (rhs_code
== MEM_REF
588 && TREE_OPERAND (rhs
, 0) == use
589 && TYPE_SIZE_UNIT (TREE_TYPE (rhs
))
590 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs
)))
591 && si
->offsets
[SSA_NAME_VERSION (use
)] != -1)
593 unsigned HOST_WIDE_INT gpr_size
;
594 tree access_size
= TYPE_SIZE_UNIT (TREE_TYPE (rhs
));
596 gpr_size
= si
->offsets
[SSA_NAME_VERSION (use
)]
597 + tree_to_shwi (TREE_OPERAND (rhs
, 1))
598 + tree_to_uhwi (access_size
);
599 if (gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
600 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
601 else if (gpr_size
> cfun
->va_list_gpr_size
)
602 cfun
->va_list_gpr_size
= gpr_size
;
606 /* va_arg sequences may contain
607 other_ap_temp = ap_temp;
608 other_ap_temp = ap_temp + constant;
609 other_ap_temp = (some_type *) ap_temp;
613 && ((rhs_code
== POINTER_PLUS_EXPR
614 && (TREE_CODE (gimple_assign_rhs2 (stmt
))
616 || gimple_assign_cast_p (stmt
)
617 || (get_gimple_rhs_class (rhs_code
)
618 == GIMPLE_SINGLE_RHS
)))
620 tree lhs
= gimple_assign_lhs (stmt
);
622 if (TREE_CODE (lhs
) == SSA_NAME
623 && bitmap_bit_p (si
->va_list_escape_vars
,
624 SSA_NAME_VERSION (lhs
)))
627 if (TREE_CODE (lhs
) == VAR_DECL
628 && bitmap_bit_p (si
->va_list_vars
,
629 DECL_UID (lhs
) + num_ssa_names
))
632 else if (rhs_code
== ADDR_EXPR
633 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
634 && TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0) == use
)
636 tree lhs
= gimple_assign_lhs (stmt
);
638 if (bitmap_bit_p (si
->va_list_escape_vars
,
639 SSA_NAME_VERSION (lhs
)))
644 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
646 fputs ("va_list escapes in ", dump_file
);
647 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
648 fputc ('\n', dump_file
);
658 /* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size. */
661 optimize_va_list_gpr_fpr_size (function
*fun
)
664 bool va_list_escapes
= false;
665 bool va_list_simple_ptr
;
666 struct stdarg_info si
;
667 struct walk_stmt_info wi
;
668 const char *funcname
= NULL
;
671 fun
->va_list_gpr_size
= 0;
672 fun
->va_list_fpr_size
= 0;
673 memset (&si
, 0, sizeof (si
));
674 si
.va_list_vars
= BITMAP_ALLOC (NULL
);
675 si
.va_list_escape_vars
= BITMAP_ALLOC (NULL
);
678 funcname
= lang_hooks
.decl_printable_name (current_function_decl
, 2);
680 cfun_va_list
= targetm
.fn_abi_va_list (fun
->decl
);
681 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
682 && (TREE_TYPE (cfun_va_list
) == void_type_node
683 || TREE_TYPE (cfun_va_list
) == char_type_node
);
684 gcc_assert (is_gimple_reg_type (cfun_va_list
) == va_list_simple_ptr
);
686 FOR_EACH_BB_FN (bb
, fun
)
688 gimple_stmt_iterator i
;
690 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
692 gimple
*stmt
= gsi_stmt (i
);
695 if (!is_gimple_call (stmt
))
698 callee
= gimple_call_fndecl (stmt
);
700 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
703 switch (DECL_FUNCTION_CODE (callee
))
705 case BUILT_IN_VA_START
:
707 /* If old style builtins are used, don't optimize anything. */
708 case BUILT_IN_SAVEREGS
:
709 case BUILT_IN_NEXT_ARG
:
710 va_list_escapes
= true;
717 ap
= gimple_call_arg (stmt
, 0);
719 if (TREE_CODE (ap
) != ADDR_EXPR
)
721 va_list_escapes
= true;
724 ap
= TREE_OPERAND (ap
, 0);
725 if (TREE_CODE (ap
) == ARRAY_REF
)
727 if (! integer_zerop (TREE_OPERAND (ap
, 1)))
729 va_list_escapes
= true;
732 ap
= TREE_OPERAND (ap
, 0);
734 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap
))
735 != TYPE_MAIN_VARIANT (targetm
.fn_abi_va_list (fun
->decl
))
736 || TREE_CODE (ap
) != VAR_DECL
)
738 va_list_escapes
= true;
742 if (is_global_var (ap
))
744 va_list_escapes
= true;
748 bitmap_set_bit (si
.va_list_vars
, DECL_UID (ap
) + num_ssa_names
);
750 /* VA_START_BB and VA_START_AP will be only used if there is just
751 one va_start in the function. */
760 /* If there were no va_start uses in the function, there is no need to
762 if (si
.va_start_count
== 0)
765 /* If some va_list arguments weren't local, we can't optimize. */
769 /* For void * or char * va_list, something useful can be done only
770 if there is just one va_start. */
771 if (va_list_simple_ptr
&& si
.va_start_count
> 1)
773 va_list_escapes
= true;
777 /* For struct * va_list, if the backend didn't tell us what the counter fields
778 are, there is nothing more we can do. */
779 if (!va_list_simple_ptr
780 && va_list_gpr_counter_field
== NULL_TREE
781 && va_list_fpr_counter_field
== NULL_TREE
)
783 va_list_escapes
= true;
787 /* For void * or char * va_list there is just one counter
788 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
789 if (va_list_simple_ptr
)
790 fun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
792 calculate_dominance_info (CDI_DOMINATORS
);
793 memset (&wi
, 0, sizeof (wi
));
794 wi
.info
= si
.va_list_vars
;
796 FOR_EACH_BB_FN (bb
, fun
)
798 si
.compute_sizes
= -1;
801 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
802 them as assignments for the purpose of escape analysis. This is
803 not needed for non-simple va_list because virtual phis don't perform
804 any real data movement. Also, check PHI nodes for taking address of
810 for (gphi_iterator i
= gsi_start_phis (bb
); !gsi_end_p (i
);
813 gphi
*phi
= i
.phi ();
814 lhs
= PHI_RESULT (phi
);
816 if (virtual_operand_p (lhs
))
819 if (va_list_simple_ptr
)
821 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
823 rhs
= USE_FROM_PTR (uop
);
824 if (va_list_ptr_read (&si
, rhs
, lhs
))
826 else if (va_list_ptr_write (&si
, lhs
, rhs
))
829 check_va_list_escapes (&si
, lhs
, rhs
);
831 if (si
.va_list_escapes
)
833 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
835 fputs ("va_list escapes in ", dump_file
);
836 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
837 fputc ('\n', dump_file
);
839 va_list_escapes
= true;
844 for (unsigned j
= 0; !va_list_escapes
845 && j
< gimple_phi_num_args (phi
); ++j
)
846 if ((!va_list_simple_ptr
847 || TREE_CODE (gimple_phi_arg_def (phi
, j
)) != SSA_NAME
)
848 && walk_tree (gimple_phi_arg_def_ptr (phi
, j
),
849 find_va_list_reference
, &wi
, NULL
))
851 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
853 fputs ("va_list escapes in ", dump_file
);
854 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
855 fputc ('\n', dump_file
);
857 va_list_escapes
= true;
861 for (gimple_stmt_iterator i
= gsi_start_bb (bb
);
862 !gsi_end_p (i
) && !va_list_escapes
;
865 gimple
*stmt
= gsi_stmt (i
);
867 /* Don't look at __builtin_va_{start,end}, they are ok. */
868 if (is_gimple_call (stmt
))
870 tree callee
= gimple_call_fndecl (stmt
);
873 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
874 && (DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_START
875 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_END
))
879 if (is_gimple_assign (stmt
))
881 lhs
= gimple_assign_lhs (stmt
);
882 rhs
= gimple_assign_rhs1 (stmt
);
884 if (va_list_simple_ptr
)
886 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
887 == GIMPLE_SINGLE_RHS
)
889 /* Check for ap ={v} {}. */
890 if (TREE_CLOBBER_P (rhs
))
893 /* Check for tem = ap. */
894 else if (va_list_ptr_read (&si
, rhs
, lhs
))
897 /* Check for the last insn in:
902 else if (va_list_ptr_write (&si
, lhs
, rhs
))
906 if ((gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
907 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == INTEGER_CST
)
908 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
909 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
910 == GIMPLE_SINGLE_RHS
))
911 check_va_list_escapes (&si
, lhs
, rhs
);
915 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
916 == GIMPLE_SINGLE_RHS
)
918 /* Check for ap ={v} {}. */
919 if (TREE_CLOBBER_P (rhs
))
922 /* Check for ap[0].field = temp. */
923 else if (va_list_counter_struct_op (&si
, lhs
, rhs
, true))
926 /* Check for temp = ap[0].field. */
927 else if (va_list_counter_struct_op (&si
, rhs
, lhs
,
932 /* Do any architecture specific checking. */
933 if (targetm
.stdarg_optimize_hook
934 && targetm
.stdarg_optimize_hook (&si
, stmt
))
938 else if (is_gimple_debug (stmt
))
941 /* All other uses of va_list are either va_copy (that is not handled
942 in this optimization), taking address of va_list variable or
943 passing va_list to other functions (in that case va_list might
944 escape the function and therefore va_start needs to set it up
945 fully), or some unexpected use of va_list. None of these should
946 happen in a gimplified VA_ARG_EXPR. */
947 if (si
.va_list_escapes
948 || walk_gimple_op (stmt
, find_va_list_reference
, &wi
))
950 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
952 fputs ("va_list escapes in ", dump_file
);
953 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
954 fputc ('\n', dump_file
);
956 va_list_escapes
= true;
964 if (! va_list_escapes
965 && va_list_simple_ptr
966 && ! bitmap_empty_p (si
.va_list_escape_vars
)
967 && check_all_va_list_escapes (&si
))
968 va_list_escapes
= true;
973 fun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
974 fun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
976 BITMAP_FREE (si
.va_list_vars
);
977 BITMAP_FREE (si
.va_list_escape_vars
);
981 fprintf (dump_file
, "%s: va_list escapes %d, needs to save ",
982 funcname
, (int) va_list_escapes
);
983 if (fun
->va_list_gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
984 fputs ("all", dump_file
);
986 fprintf (dump_file
, "%d", cfun
->va_list_gpr_size
);
987 fputs (" GPR units and ", dump_file
);
988 if (fun
->va_list_fpr_size
>= VA_LIST_MAX_FPR_SIZE
)
989 fputs ("all", dump_file
);
991 fprintf (dump_file
, "%d", cfun
->va_list_fpr_size
);
992 fputs (" FPR units.\n", dump_file
);
996 /* Return true if STMT is IFN_VA_ARG. */
999 gimple_call_ifn_va_arg_p (gimple
*stmt
)
1001 return (is_gimple_call (stmt
)
1002 && gimple_call_internal_p (stmt
)
1003 && gimple_call_internal_fn (stmt
) == IFN_VA_ARG
);
1006 /* Expand IFN_VA_ARGs in FUN. */
1009 expand_ifn_va_arg_1 (function
*fun
)
1011 bool modified
= false;
1013 gimple_stmt_iterator i
;
1014 location_t saved_location
;
1016 FOR_EACH_BB_FN (bb
, fun
)
1017 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1019 gimple
*stmt
= gsi_stmt (i
);
1020 tree ap
, expr
, lhs
, type
;
1021 gimple_seq pre
= NULL
, post
= NULL
;
1023 if (!gimple_call_ifn_va_arg_p (stmt
))
1028 type
= TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt
, 1)));
1029 ap
= gimple_call_arg (stmt
, 0);
1031 /* Balanced out the &ap, usually added by build_va_arg. */
1032 ap
= build_fold_indirect_ref (ap
);
1034 push_gimplify_context (false);
1035 saved_location
= input_location
;
1036 input_location
= gimple_location (stmt
);
1038 /* Make it easier for the backends by protecting the valist argument
1039 from multiple evaluations. */
1040 gimplify_expr (&ap
, &pre
, &post
, is_gimple_min_lval
, fb_lvalue
);
1042 expr
= targetm
.gimplify_va_arg_expr (ap
, type
, &pre
, &post
);
1044 lhs
= gimple_call_lhs (stmt
);
1045 if (lhs
!= NULL_TREE
)
1047 unsigned int nargs
= gimple_call_num_args (stmt
);
1048 gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs
), type
));
1052 /* We've transported the size of with WITH_SIZE_EXPR here as
1053 the last argument of the internal fn call. Now reinstate
1055 tree size
= gimple_call_arg (stmt
, nargs
- 1);
1056 expr
= build2 (WITH_SIZE_EXPR
, TREE_TYPE (expr
), expr
, size
);
1059 /* We use gimplify_assign here, rather than gimple_build_assign,
1060 because gimple_assign knows how to deal with variable-sized
1062 gimplify_assign (lhs
, expr
, &pre
);
1065 gimplify_expr (&expr
, &pre
, &post
, is_gimple_lvalue
, fb_lvalue
);
1067 input_location
= saved_location
;
1068 pop_gimplify_context (NULL
);
1070 gimple_seq_add_seq (&pre
, post
);
1071 update_modified_stmts (pre
);
1073 /* Add the sequence after IFN_VA_ARG. This splits the bb right
1074 after IFN_VA_ARG, and adds the sequence in one or more new bbs
1076 gimple_find_sub_bbs (pre
, &i
);
1078 /* Remove the IFN_VA_ARG gimple_call. It's the last stmt in the
1080 unlink_stmt_vdef (stmt
);
1081 release_ssa_name_fn (fun
, gimple_vdef (stmt
));
1082 gsi_remove (&i
, true);
1083 gcc_assert (gsi_end_p (i
));
1085 /* We're walking here into the bbs which contain the expansion of
1086 IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs
1087 expanding. We could try to skip walking these bbs, perhaps by
1088 walking backwards over gimples and bbs. */
1095 free_dominance_info (CDI_DOMINATORS
);
1096 update_ssa (TODO_update_ssa
);
1099 /* Expand IFN_VA_ARGs in FUN, if necessary. */
1102 expand_ifn_va_arg (function
*fun
)
1104 if ((fun
->curr_properties
& PROP_gimple_lva
) == 0)
1105 expand_ifn_va_arg_1 (fun
);
1110 gimple_stmt_iterator i
;
1111 FOR_EACH_BB_FN (bb
, fun
)
1112 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1113 gcc_assert (!gimple_call_ifn_va_arg_p (gsi_stmt (i
)));
1119 const pass_data pass_data_stdarg
=
1121 GIMPLE_PASS
, /* type */
1122 "stdarg", /* name */
1123 OPTGROUP_NONE
, /* optinfo_flags */
1124 TV_NONE
, /* tv_id */
1125 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1126 PROP_gimple_lva
, /* properties_provided */
1127 0, /* properties_destroyed */
1128 0, /* todo_flags_start */
1129 0, /* todo_flags_finish */
1132 class pass_stdarg
: public gimple_opt_pass
1135 pass_stdarg (gcc::context
*ctxt
)
1136 : gimple_opt_pass (pass_data_stdarg
, ctxt
)
1139 /* opt_pass methods: */
1140 virtual bool gate (function
*)
1142 /* Always run this pass, in order to expand va_arg internal_fns. We
1143 also need to do that if fun->stdarg == 0, because a va_arg may also
1144 occur in a function without varargs, f.i. if when passing a va_list to
1145 another function. */
1149 virtual unsigned int execute (function
*);
1151 }; // class pass_stdarg
1154 pass_stdarg::execute (function
*fun
)
1156 /* TODO: Postpone expand_ifn_va_arg till after
1157 optimize_va_list_gpr_fpr_size. */
1158 expand_ifn_va_arg (fun
);
1161 /* This optimization is only for stdarg functions. */
1162 && fun
->stdarg
!= 0)
1163 optimize_va_list_gpr_fpr_size (fun
);
1171 make_pass_stdarg (gcc::context
*ctxt
)
1173 return new pass_stdarg (ctxt
);
1178 const pass_data pass_data_lower_vaarg
=
1180 GIMPLE_PASS
, /* type */
1181 "lower_vaarg", /* name */
1182 OPTGROUP_NONE
, /* optinfo_flags */
1183 TV_NONE
, /* tv_id */
1184 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1185 PROP_gimple_lva
, /* properties_provided */
1186 0, /* properties_destroyed */
1187 0, /* todo_flags_start */
1188 0, /* todo_flags_finish */
1191 class pass_lower_vaarg
: public gimple_opt_pass
1194 pass_lower_vaarg (gcc::context
*ctxt
)
1195 : gimple_opt_pass (pass_data_lower_vaarg
, ctxt
)
1198 /* opt_pass methods: */
1199 virtual bool gate (function
*)
1201 return (cfun
->curr_properties
& PROP_gimple_lva
) == 0;
1204 virtual unsigned int execute (function
*);
1206 }; // class pass_lower_vaarg
1209 pass_lower_vaarg::execute (function
*fun
)
1211 expand_ifn_va_arg (fun
);
1218 make_pass_lower_vaarg (gcc::context
*ctxt
)
1220 return new pass_lower_vaarg (ctxt
);