1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2017 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-pass.h"
30 #include "gimple-pretty-print.h"
31 #include "fold-const.h"
32 #include "langhooks.h"
33 #include "gimple-iterator.h"
34 #include "gimple-walk.h"
36 #include "tree-into-ssa.h"
38 #include "tree-stdarg.h"
39 #include "tree-chkp.h"
41 /* A simple pass that attempts to optimize stdarg functions on architectures
42 that need to save register arguments to stack on entry to stdarg functions.
43 If the function doesn't use any va_start macros, no registers need to
44 be saved. If va_start macros are used, the va_list variables don't escape
45 the function, it is only necessary to save registers that will be used
46 in va_arg macros. E.g. if va_arg is only used with integral types
47 in the function, floating point registers don't need to be saved, etc. */
50 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
51 is executed at most as many times as VA_START_BB. */
54 reachable_at_most_once (basic_block va_arg_bb
, basic_block va_start_bb
)
56 auto_vec
<edge
, 10> stack
;
61 if (va_arg_bb
== va_start_bb
)
64 if (! dominated_by_p (CDI_DOMINATORS
, va_arg_bb
, va_start_bb
))
67 auto_sbitmap
visited (last_basic_block_for_fn (cfun
));
68 bitmap_clear (visited
);
71 FOR_EACH_EDGE (e
, ei
, va_arg_bb
->preds
)
74 while (! stack
.is_empty ())
81 if (e
->flags
& EDGE_COMPLEX
)
87 if (src
== va_start_bb
)
90 /* va_arg_bb can be executed more times than va_start_bb. */
97 gcc_assert (src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
));
99 if (! bitmap_bit_p (visited
, src
->index
))
101 bitmap_set_bit (visited
, src
->index
);
102 FOR_EACH_EDGE (e
, ei
, src
->preds
)
111 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
112 return constant, otherwise return HOST_WIDE_INT_M1U.
113 GPR_P is true if this is GPR counter. */
115 static unsigned HOST_WIDE_INT
116 va_list_counter_bump (struct stdarg_info
*si
, tree counter
, tree rhs
,
121 unsigned HOST_WIDE_INT ret
= 0, val
, counter_val
;
122 unsigned int max_size
;
124 if (si
->offsets
== NULL
)
128 si
->offsets
= XNEWVEC (int, num_ssa_names
);
129 for (i
= 0; i
< num_ssa_names
; ++i
)
133 counter_val
= gpr_p
? cfun
->va_list_gpr_size
: cfun
->va_list_fpr_size
;
134 max_size
= gpr_p
? VA_LIST_MAX_GPR_SIZE
: VA_LIST_MAX_FPR_SIZE
;
135 orig_lhs
= lhs
= rhs
;
138 enum tree_code rhs_code
;
141 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
143 if (counter_val
>= max_size
)
149 ret
-= counter_val
- si
->offsets
[SSA_NAME_VERSION (lhs
)];
153 stmt
= SSA_NAME_DEF_STMT (lhs
);
155 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != lhs
)
156 return HOST_WIDE_INT_M1U
;
158 rhs_code
= gimple_assign_rhs_code (stmt
);
159 rhs1
= gimple_assign_rhs1 (stmt
);
160 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
161 || gimple_assign_cast_p (stmt
))
162 && TREE_CODE (rhs1
) == SSA_NAME
)
168 if ((rhs_code
== POINTER_PLUS_EXPR
169 || rhs_code
== PLUS_EXPR
)
170 && TREE_CODE (rhs1
) == SSA_NAME
171 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt
)))
173 ret
+= tree_to_uhwi (gimple_assign_rhs2 (stmt
));
178 if (rhs_code
== ADDR_EXPR
179 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
180 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
181 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1)))
183 ret
+= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1));
184 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
188 if (get_gimple_rhs_class (rhs_code
) != GIMPLE_SINGLE_RHS
)
189 return HOST_WIDE_INT_M1U
;
191 rhs
= gimple_assign_rhs1 (stmt
);
192 if (TREE_CODE (counter
) != TREE_CODE (rhs
))
193 return HOST_WIDE_INT_M1U
;
195 if (TREE_CODE (counter
) == COMPONENT_REF
)
197 if (get_base_address (counter
) != get_base_address (rhs
)
198 || TREE_CODE (TREE_OPERAND (rhs
, 1)) != FIELD_DECL
199 || TREE_OPERAND (counter
, 1) != TREE_OPERAND (rhs
, 1))
200 return HOST_WIDE_INT_M1U
;
202 else if (counter
!= rhs
)
203 return HOST_WIDE_INT_M1U
;
209 val
= ret
+ counter_val
;
212 enum tree_code rhs_code
;
215 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
219 si
->offsets
[SSA_NAME_VERSION (lhs
)] = max_size
;
221 si
->offsets
[SSA_NAME_VERSION (lhs
)] = val
;
223 stmt
= SSA_NAME_DEF_STMT (lhs
);
225 rhs_code
= gimple_assign_rhs_code (stmt
);
226 rhs1
= gimple_assign_rhs1 (stmt
);
227 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
228 || gimple_assign_cast_p (stmt
))
229 && TREE_CODE (rhs1
) == SSA_NAME
)
235 if ((rhs_code
== POINTER_PLUS_EXPR
236 || rhs_code
== PLUS_EXPR
)
237 && TREE_CODE (rhs1
) == SSA_NAME
238 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt
)))
240 val
-= tree_to_uhwi (gimple_assign_rhs2 (stmt
));
245 if (rhs_code
== ADDR_EXPR
246 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
247 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
248 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1)))
250 val
-= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1));
251 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
262 /* Called by walk_tree to look for references to va_list variables. */
265 find_va_list_reference (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
268 bitmap va_list_vars
= (bitmap
) ((struct walk_stmt_info
*) data
)->info
;
271 if (TREE_CODE (var
) == SSA_NAME
)
273 if (bitmap_bit_p (va_list_vars
, SSA_NAME_VERSION (var
)))
276 else if (VAR_P (var
))
278 if (bitmap_bit_p (va_list_vars
, DECL_UID (var
) + num_ssa_names
))
286 /* Helper function of va_list_counter_struct_op. Compute
287 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
288 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
289 statement. GPR_P is true if AP is a GPR counter, false if it is
293 va_list_counter_op (struct stdarg_info
*si
, tree ap
, tree var
, bool gpr_p
,
296 unsigned HOST_WIDE_INT increment
;
298 if (si
->compute_sizes
< 0)
300 si
->compute_sizes
= 0;
301 if (si
->va_start_count
== 1
302 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
303 si
->compute_sizes
= 1;
305 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
307 "bb%d will %sbe executed at most once for each va_start "
308 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
309 si
->va_start_bb
->index
);
314 && (increment
= va_list_counter_bump (si
, ap
, var
, gpr_p
)) + 1 > 1)
316 if (gpr_p
&& cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
318 cfun
->va_list_gpr_size
+= increment
;
322 if (!gpr_p
&& cfun
->va_list_fpr_size
+ increment
< VA_LIST_MAX_FPR_SIZE
)
324 cfun
->va_list_fpr_size
+= increment
;
329 if (write_p
|| !si
->compute_sizes
)
332 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
334 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
339 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
340 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
341 is false, AP has been seen in VAR = AP assignment.
342 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
343 va_arg operation that doesn't cause the va_list variable to escape
347 va_list_counter_struct_op (struct stdarg_info
*si
, tree ap
, tree var
,
352 if (TREE_CODE (ap
) != COMPONENT_REF
353 || TREE_CODE (TREE_OPERAND (ap
, 1)) != FIELD_DECL
)
356 if (TREE_CODE (var
) != SSA_NAME
357 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (var
)))
360 base
= get_base_address (ap
);
362 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (base
) + num_ssa_names
))
365 if (TREE_OPERAND (ap
, 1) == va_list_gpr_counter_field
)
366 va_list_counter_op (si
, ap
, var
, true, write_p
);
367 else if (TREE_OPERAND (ap
, 1) == va_list_fpr_counter_field
)
368 va_list_counter_op (si
, ap
, var
, false, write_p
);
374 /* Check for TEM = AP. Return true if found and the caller shouldn't
375 search for va_list references in the statement. */
378 va_list_ptr_read (struct stdarg_info
*si
, tree ap
, tree tem
)
381 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
384 if (TREE_CODE (tem
) != SSA_NAME
385 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem
)))
388 if (si
->compute_sizes
< 0)
390 si
->compute_sizes
= 0;
391 if (si
->va_start_count
== 1
392 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
393 si
->compute_sizes
= 1;
395 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
397 "bb%d will %sbe executed at most once for each va_start "
398 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
399 si
->va_start_bb
->index
);
402 /* For void * or char * va_list types, there is just one counter.
403 If va_arg is used in a loop, we don't know how many registers need
405 if (! si
->compute_sizes
)
408 if (va_list_counter_bump (si
, ap
, tem
, true) == HOST_WIDE_INT_M1U
)
411 /* Note the temporary, as we need to track whether it doesn't escape
412 the current function. */
413 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (tem
));
423 sequence and update cfun->va_list_gpr_size. Return true if found. */
426 va_list_ptr_write (struct stdarg_info
*si
, tree ap
, tree tem2
)
428 unsigned HOST_WIDE_INT increment
;
431 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
434 if (TREE_CODE (tem2
) != SSA_NAME
435 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem2
)))
438 if (si
->compute_sizes
<= 0)
441 increment
= va_list_counter_bump (si
, ap
, tem2
, true);
442 if (increment
+ 1 <= 1)
445 if (cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
446 cfun
->va_list_gpr_size
+= increment
;
448 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
454 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
455 containing value of some va_list variable plus optionally some constant,
456 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
457 depending whether LHS is a function local temporary. */
460 check_va_list_escapes (struct stdarg_info
*si
, tree lhs
, tree rhs
)
462 if (! POINTER_TYPE_P (TREE_TYPE (rhs
)))
465 if (TREE_CODE (rhs
) == SSA_NAME
)
467 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (rhs
)))
470 else if (TREE_CODE (rhs
) == ADDR_EXPR
471 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
472 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0)) == SSA_NAME
)
474 tree ptr
= TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0);
475 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (ptr
)))
481 if (TREE_CODE (lhs
) != SSA_NAME
)
483 si
->va_list_escapes
= true;
487 if (si
->compute_sizes
< 0)
489 si
->compute_sizes
= 0;
490 if (si
->va_start_count
== 1
491 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
492 si
->compute_sizes
= 1;
494 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
496 "bb%d will %sbe executed at most once for each va_start "
497 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
498 si
->va_start_bb
->index
);
501 /* For void * or char * va_list types, there is just one counter.
502 If va_arg is used in a loop, we don't know how many registers need
504 if (! si
->compute_sizes
)
506 si
->va_list_escapes
= true;
510 if (va_list_counter_bump (si
, si
->va_start_ap
, lhs
, true)
511 == HOST_WIDE_INT_M1U
)
513 si
->va_list_escapes
= true;
517 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (lhs
));
521 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
522 Return true if va_list might be escaping. */
525 check_all_va_list_escapes (struct stdarg_info
*si
)
529 FOR_EACH_BB_FN (bb
, cfun
)
531 for (gphi_iterator i
= gsi_start_phis (bb
); !gsi_end_p (i
);
537 gphi
*phi
= i
.phi ();
539 lhs
= PHI_RESULT (phi
);
540 if (virtual_operand_p (lhs
)
541 || bitmap_bit_p (si
->va_list_escape_vars
,
542 SSA_NAME_VERSION (lhs
)))
545 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
547 tree rhs
= USE_FROM_PTR (uop
);
548 if (TREE_CODE (rhs
) == SSA_NAME
549 && bitmap_bit_p (si
->va_list_escape_vars
,
550 SSA_NAME_VERSION (rhs
)))
552 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
554 fputs ("va_list escapes in ", dump_file
);
555 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
556 fputc ('\n', dump_file
);
563 for (gimple_stmt_iterator i
= gsi_start_bb (bb
); !gsi_end_p (i
);
566 gimple
*stmt
= gsi_stmt (i
);
570 if (is_gimple_debug (stmt
))
573 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_ALL_USES
)
575 if (! bitmap_bit_p (si
->va_list_escape_vars
,
576 SSA_NAME_VERSION (use
)))
579 if (is_gimple_assign (stmt
))
581 tree rhs
= gimple_assign_rhs1 (stmt
);
582 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
585 if (rhs_code
== MEM_REF
586 && TREE_OPERAND (rhs
, 0) == use
587 && TYPE_SIZE_UNIT (TREE_TYPE (rhs
))
588 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs
)))
589 && si
->offsets
[SSA_NAME_VERSION (use
)] != -1)
591 unsigned HOST_WIDE_INT gpr_size
;
592 tree access_size
= TYPE_SIZE_UNIT (TREE_TYPE (rhs
));
594 gpr_size
= si
->offsets
[SSA_NAME_VERSION (use
)]
595 + tree_to_shwi (TREE_OPERAND (rhs
, 1))
596 + tree_to_uhwi (access_size
);
597 if (gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
598 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
599 else if (gpr_size
> cfun
->va_list_gpr_size
)
600 cfun
->va_list_gpr_size
= gpr_size
;
604 /* va_arg sequences may contain
605 other_ap_temp = ap_temp;
606 other_ap_temp = ap_temp + constant;
607 other_ap_temp = (some_type *) ap_temp;
611 && ((rhs_code
== POINTER_PLUS_EXPR
612 && (TREE_CODE (gimple_assign_rhs2 (stmt
))
614 || gimple_assign_cast_p (stmt
)
615 || (get_gimple_rhs_class (rhs_code
)
616 == GIMPLE_SINGLE_RHS
)))
618 tree lhs
= gimple_assign_lhs (stmt
);
620 if (TREE_CODE (lhs
) == SSA_NAME
621 && bitmap_bit_p (si
->va_list_escape_vars
,
622 SSA_NAME_VERSION (lhs
)))
626 && bitmap_bit_p (si
->va_list_vars
,
627 DECL_UID (lhs
) + num_ssa_names
))
630 else if (rhs_code
== ADDR_EXPR
631 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
632 && TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0) == use
)
634 tree lhs
= gimple_assign_lhs (stmt
);
636 if (bitmap_bit_p (si
->va_list_escape_vars
,
637 SSA_NAME_VERSION (lhs
)))
642 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
644 fputs ("va_list escapes in ", dump_file
);
645 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
646 fputc ('\n', dump_file
);
656 /* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size. */
659 optimize_va_list_gpr_fpr_size (function
*fun
)
662 bool va_list_escapes
= false;
663 bool va_list_simple_ptr
;
664 struct stdarg_info si
;
665 struct walk_stmt_info wi
;
666 const char *funcname
= NULL
;
669 fun
->va_list_gpr_size
= 0;
670 fun
->va_list_fpr_size
= 0;
671 memset (&si
, 0, sizeof (si
));
672 si
.va_list_vars
= BITMAP_ALLOC (NULL
);
673 si
.va_list_escape_vars
= BITMAP_ALLOC (NULL
);
676 funcname
= lang_hooks
.decl_printable_name (current_function_decl
, 2);
678 cfun_va_list
= targetm
.fn_abi_va_list (fun
->decl
);
679 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
680 && (TREE_TYPE (cfun_va_list
) == void_type_node
681 || TREE_TYPE (cfun_va_list
) == char_type_node
);
682 gcc_assert (is_gimple_reg_type (cfun_va_list
) == va_list_simple_ptr
);
684 FOR_EACH_BB_FN (bb
, fun
)
686 gimple_stmt_iterator i
;
688 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
690 gimple
*stmt
= gsi_stmt (i
);
693 if (!is_gimple_call (stmt
))
696 callee
= gimple_call_fndecl (stmt
);
698 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
701 switch (DECL_FUNCTION_CODE (callee
))
703 case BUILT_IN_VA_START
:
705 /* If old style builtins are used, don't optimize anything. */
706 case BUILT_IN_SAVEREGS
:
707 case BUILT_IN_NEXT_ARG
:
708 va_list_escapes
= true;
715 ap
= gimple_call_arg (stmt
, 0);
717 if (TREE_CODE (ap
) != ADDR_EXPR
)
719 va_list_escapes
= true;
722 ap
= TREE_OPERAND (ap
, 0);
723 if (TREE_CODE (ap
) == ARRAY_REF
)
725 if (! integer_zerop (TREE_OPERAND (ap
, 1)))
727 va_list_escapes
= true;
730 ap
= TREE_OPERAND (ap
, 0);
732 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap
))
733 != TYPE_MAIN_VARIANT (targetm
.fn_abi_va_list (fun
->decl
))
736 va_list_escapes
= true;
740 if (is_global_var (ap
))
742 va_list_escapes
= true;
746 bitmap_set_bit (si
.va_list_vars
, DECL_UID (ap
) + num_ssa_names
);
748 /* VA_START_BB and VA_START_AP will be only used if there is just
749 one va_start in the function. */
758 /* If there were no va_start uses in the function, there is no need to
760 if (si
.va_start_count
== 0)
763 /* If some va_list arguments weren't local, we can't optimize. */
767 /* For void * or char * va_list, something useful can be done only
768 if there is just one va_start. */
769 if (va_list_simple_ptr
&& si
.va_start_count
> 1)
771 va_list_escapes
= true;
775 /* For struct * va_list, if the backend didn't tell us what the counter fields
776 are, there is nothing more we can do. */
777 if (!va_list_simple_ptr
778 && va_list_gpr_counter_field
== NULL_TREE
779 && va_list_fpr_counter_field
== NULL_TREE
)
781 va_list_escapes
= true;
785 /* For void * or char * va_list there is just one counter
786 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
787 if (va_list_simple_ptr
)
788 fun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
790 calculate_dominance_info (CDI_DOMINATORS
);
791 memset (&wi
, 0, sizeof (wi
));
792 wi
.info
= si
.va_list_vars
;
794 FOR_EACH_BB_FN (bb
, fun
)
796 si
.compute_sizes
= -1;
799 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
800 them as assignments for the purpose of escape analysis. This is
801 not needed for non-simple va_list because virtual phis don't perform
802 any real data movement. Also, check PHI nodes for taking address of
808 for (gphi_iterator i
= gsi_start_phis (bb
); !gsi_end_p (i
);
811 gphi
*phi
= i
.phi ();
812 lhs
= PHI_RESULT (phi
);
814 if (virtual_operand_p (lhs
))
817 if (va_list_simple_ptr
)
819 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
821 rhs
= USE_FROM_PTR (uop
);
822 if (va_list_ptr_read (&si
, rhs
, lhs
))
824 else if (va_list_ptr_write (&si
, lhs
, rhs
))
827 check_va_list_escapes (&si
, lhs
, rhs
);
829 if (si
.va_list_escapes
)
831 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
833 fputs ("va_list escapes in ", dump_file
);
834 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
835 fputc ('\n', dump_file
);
837 va_list_escapes
= true;
842 for (unsigned j
= 0; !va_list_escapes
843 && j
< gimple_phi_num_args (phi
); ++j
)
844 if ((!va_list_simple_ptr
845 || TREE_CODE (gimple_phi_arg_def (phi
, j
)) != SSA_NAME
)
846 && walk_tree (gimple_phi_arg_def_ptr (phi
, j
),
847 find_va_list_reference
, &wi
, NULL
))
849 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
851 fputs ("va_list escapes in ", dump_file
);
852 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
853 fputc ('\n', dump_file
);
855 va_list_escapes
= true;
859 for (gimple_stmt_iterator i
= gsi_start_bb (bb
);
860 !gsi_end_p (i
) && !va_list_escapes
;
863 gimple
*stmt
= gsi_stmt (i
);
865 /* Don't look at __builtin_va_{start,end}, they are ok. */
866 if (is_gimple_call (stmt
))
868 tree callee
= gimple_call_fndecl (stmt
);
871 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
872 && (DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_START
873 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_END
))
877 if (is_gimple_assign (stmt
))
879 lhs
= gimple_assign_lhs (stmt
);
880 rhs
= gimple_assign_rhs1 (stmt
);
882 if (va_list_simple_ptr
)
884 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
885 == GIMPLE_SINGLE_RHS
)
887 /* Check for ap ={v} {}. */
888 if (TREE_CLOBBER_P (rhs
))
891 /* Check for tem = ap. */
892 else if (va_list_ptr_read (&si
, rhs
, lhs
))
895 /* Check for the last insn in:
900 else if (va_list_ptr_write (&si
, lhs
, rhs
))
904 if ((gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
905 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == INTEGER_CST
)
906 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
907 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
908 == GIMPLE_SINGLE_RHS
))
909 check_va_list_escapes (&si
, lhs
, rhs
);
913 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
914 == GIMPLE_SINGLE_RHS
)
916 /* Check for ap ={v} {}. */
917 if (TREE_CLOBBER_P (rhs
))
920 /* Check for ap[0].field = temp. */
921 else if (va_list_counter_struct_op (&si
, lhs
, rhs
, true))
924 /* Check for temp = ap[0].field. */
925 else if (va_list_counter_struct_op (&si
, rhs
, lhs
,
930 /* Do any architecture specific checking. */
931 if (targetm
.stdarg_optimize_hook
932 && targetm
.stdarg_optimize_hook (&si
, stmt
))
936 else if (is_gimple_debug (stmt
))
939 /* All other uses of va_list are either va_copy (that is not handled
940 in this optimization), taking address of va_list variable or
941 passing va_list to other functions (in that case va_list might
942 escape the function and therefore va_start needs to set it up
943 fully), or some unexpected use of va_list. None of these should
944 happen in a gimplified VA_ARG_EXPR. */
945 if (si
.va_list_escapes
946 || walk_gimple_op (stmt
, find_va_list_reference
, &wi
))
948 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
950 fputs ("va_list escapes in ", dump_file
);
951 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
952 fputc ('\n', dump_file
);
954 va_list_escapes
= true;
962 if (! va_list_escapes
963 && va_list_simple_ptr
964 && ! bitmap_empty_p (si
.va_list_escape_vars
)
965 && check_all_va_list_escapes (&si
))
966 va_list_escapes
= true;
971 fun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
972 fun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
974 BITMAP_FREE (si
.va_list_vars
);
975 BITMAP_FREE (si
.va_list_escape_vars
);
979 fprintf (dump_file
, "%s: va_list escapes %d, needs to save ",
980 funcname
, (int) va_list_escapes
);
981 if (fun
->va_list_gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
982 fputs ("all", dump_file
);
984 fprintf (dump_file
, "%d", cfun
->va_list_gpr_size
);
985 fputs (" GPR units and ", dump_file
);
986 if (fun
->va_list_fpr_size
>= VA_LIST_MAX_FPR_SIZE
)
987 fputs ("all", dump_file
);
989 fprintf (dump_file
, "%d", cfun
->va_list_fpr_size
);
990 fputs (" FPR units.\n", dump_file
);
994 /* Expand IFN_VA_ARGs in FUN. */
997 expand_ifn_va_arg_1 (function
*fun
)
999 bool modified
= false;
1001 gimple_stmt_iterator i
;
1002 location_t saved_location
;
1004 FOR_EACH_BB_FN (bb
, fun
)
1005 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1007 gimple
*stmt
= gsi_stmt (i
);
1008 tree ap
, aptype
, expr
, lhs
, type
;
1009 gimple_seq pre
= NULL
, post
= NULL
;
1011 if (!gimple_call_internal_p (stmt
, IFN_VA_ARG
))
1016 type
= TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt
, 1)));
1017 ap
= gimple_call_arg (stmt
, 0);
1018 aptype
= TREE_TYPE (gimple_call_arg (stmt
, 2));
1019 gcc_assert (POINTER_TYPE_P (aptype
));
1021 /* Balanced out the &ap, usually added by build_va_arg. */
1022 ap
= build2 (MEM_REF
, TREE_TYPE (aptype
), ap
,
1023 build_int_cst (aptype
, 0));
1025 push_gimplify_context (false);
1026 saved_location
= input_location
;
1027 input_location
= gimple_location (stmt
);
1029 /* Make it easier for the backends by protecting the valist argument
1030 from multiple evaluations. */
1031 gimplify_expr (&ap
, &pre
, &post
, is_gimple_min_lval
, fb_lvalue
);
1033 expr
= targetm
.gimplify_va_arg_expr (ap
, type
, &pre
, &post
);
1035 lhs
= gimple_call_lhs (stmt
);
1036 if (lhs
!= NULL_TREE
)
1038 unsigned int nargs
= gimple_call_num_args (stmt
);
1039 gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs
), type
));
1041 /* We replace call with a new expr. This may require
1042 corresponding bndret call fixup. */
1043 if (chkp_function_instrumented_p (fun
->decl
))
1044 chkp_fixup_inlined_call (lhs
, expr
);
1048 /* We've transported the size of with WITH_SIZE_EXPR here as
1049 the last argument of the internal fn call. Now reinstate
1051 tree size
= gimple_call_arg (stmt
, nargs
- 1);
1052 expr
= build2 (WITH_SIZE_EXPR
, TREE_TYPE (expr
), expr
, size
);
1055 /* We use gimplify_assign here, rather than gimple_build_assign,
1056 because gimple_assign knows how to deal with variable-sized
1058 gimplify_assign (lhs
, expr
, &pre
);
1061 gimplify_expr (&expr
, &pre
, &post
, is_gimple_lvalue
, fb_lvalue
);
1063 input_location
= saved_location
;
1064 pop_gimplify_context (NULL
);
1066 gimple_seq_add_seq (&pre
, post
);
1067 update_modified_stmts (pre
);
1069 /* Add the sequence after IFN_VA_ARG. This splits the bb right
1070 after IFN_VA_ARG, and adds the sequence in one or more new bbs
1072 gimple_find_sub_bbs (pre
, &i
);
1074 /* Remove the IFN_VA_ARG gimple_call. It's the last stmt in the
1076 unlink_stmt_vdef (stmt
);
1077 release_ssa_name_fn (fun
, gimple_vdef (stmt
));
1078 gsi_remove (&i
, true);
1079 gcc_assert (gsi_end_p (i
));
1081 /* We're walking here into the bbs which contain the expansion of
1082 IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs
1083 expanding. We could try to skip walking these bbs, perhaps by
1084 walking backwards over gimples and bbs. */
1091 free_dominance_info (CDI_DOMINATORS
);
1092 update_ssa (TODO_update_ssa
);
1095 /* Expand IFN_VA_ARGs in FUN, if necessary. */
1098 expand_ifn_va_arg (function
*fun
)
1100 if ((fun
->curr_properties
& PROP_gimple_lva
) == 0)
1101 expand_ifn_va_arg_1 (fun
);
1106 gimple_stmt_iterator i
;
1107 FOR_EACH_BB_FN (bb
, fun
)
1108 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1109 gcc_assert (!gimple_call_internal_p (gsi_stmt (i
), IFN_VA_ARG
));
1115 const pass_data pass_data_stdarg
=
1117 GIMPLE_PASS
, /* type */
1118 "stdarg", /* name */
1119 OPTGROUP_NONE
, /* optinfo_flags */
1120 TV_NONE
, /* tv_id */
1121 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1122 PROP_gimple_lva
, /* properties_provided */
1123 0, /* properties_destroyed */
1124 0, /* todo_flags_start */
1125 0, /* todo_flags_finish */
1128 class pass_stdarg
: public gimple_opt_pass
1131 pass_stdarg (gcc::context
*ctxt
)
1132 : gimple_opt_pass (pass_data_stdarg
, ctxt
)
1135 /* opt_pass methods: */
1136 virtual bool gate (function
*)
1138 /* Always run this pass, in order to expand va_arg internal_fns. We
1139 also need to do that if fun->stdarg == 0, because a va_arg may also
1140 occur in a function without varargs, f.i. if when passing a va_list to
1141 another function. */
1145 virtual unsigned int execute (function
*);
1147 }; // class pass_stdarg
1150 pass_stdarg::execute (function
*fun
)
1152 /* TODO: Postpone expand_ifn_va_arg till after
1153 optimize_va_list_gpr_fpr_size. */
1154 expand_ifn_va_arg (fun
);
1157 /* This optimization is only for stdarg functions. */
1158 && fun
->stdarg
!= 0)
1159 optimize_va_list_gpr_fpr_size (fun
);
1167 make_pass_stdarg (gcc::context
*ctxt
)
1169 return new pass_stdarg (ctxt
);
1174 const pass_data pass_data_lower_vaarg
=
1176 GIMPLE_PASS
, /* type */
1177 "lower_vaarg", /* name */
1178 OPTGROUP_NONE
, /* optinfo_flags */
1179 TV_NONE
, /* tv_id */
1180 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1181 PROP_gimple_lva
, /* properties_provided */
1182 0, /* properties_destroyed */
1183 0, /* todo_flags_start */
1184 0, /* todo_flags_finish */
1187 class pass_lower_vaarg
: public gimple_opt_pass
1190 pass_lower_vaarg (gcc::context
*ctxt
)
1191 : gimple_opt_pass (pass_data_lower_vaarg
, ctxt
)
1194 /* opt_pass methods: */
1195 virtual bool gate (function
*)
1197 return (cfun
->curr_properties
& PROP_gimple_lva
) == 0;
1200 virtual unsigned int execute (function
*);
1202 }; // class pass_lower_vaarg
1205 pass_lower_vaarg::execute (function
*fun
)
1207 expand_ifn_va_arg (fun
);
1214 make_pass_lower_vaarg (gcc::context
*ctxt
)
1216 return new pass_lower_vaarg (ctxt
);