1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-pass.h"
30 #include "gimple-pretty-print.h"
31 #include "fold-const.h"
32 #include "langhooks.h"
33 #include "gimple-iterator.h"
34 #include "gimple-walk.h"
36 #include "tree-into-ssa.h"
38 #include "tree-stdarg.h"
39 #include "tree-chkp.h"
41 /* A simple pass that attempts to optimize stdarg functions on architectures
42 that need to save register arguments to stack on entry to stdarg functions.
43 If the function doesn't use any va_start macros, no registers need to
44 be saved. If va_start macros are used, the va_list variables don't escape
45 the function, it is only necessary to save registers that will be used
46 in va_arg macros. E.g. if va_arg is only used with integral types
47 in the function, floating point registers don't need to be saved, etc. */
50 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
51 is executed at most as many times as VA_START_BB. */
54 reachable_at_most_once (basic_block va_arg_bb
, basic_block va_start_bb
)
56 vec
<edge
> stack
= vNULL
;
62 if (va_arg_bb
== va_start_bb
)
65 if (! dominated_by_p (CDI_DOMINATORS
, va_arg_bb
, va_start_bb
))
68 visited
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
69 bitmap_clear (visited
);
72 FOR_EACH_EDGE (e
, ei
, va_arg_bb
->preds
)
75 while (! stack
.is_empty ())
82 if (e
->flags
& EDGE_COMPLEX
)
88 if (src
== va_start_bb
)
91 /* va_arg_bb can be executed more times than va_start_bb. */
98 gcc_assert (src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
));
100 if (! bitmap_bit_p (visited
, src
->index
))
102 bitmap_set_bit (visited
, src
->index
);
103 FOR_EACH_EDGE (e
, ei
, src
->preds
)
109 sbitmap_free (visited
);
114 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
115 return constant, otherwise return HOST_WIDE_INT_M1U.
116 GPR_P is true if this is GPR counter. */
118 static unsigned HOST_WIDE_INT
119 va_list_counter_bump (struct stdarg_info
*si
, tree counter
, tree rhs
,
124 unsigned HOST_WIDE_INT ret
= 0, val
, counter_val
;
125 unsigned int max_size
;
127 if (si
->offsets
== NULL
)
131 si
->offsets
= XNEWVEC (int, num_ssa_names
);
132 for (i
= 0; i
< num_ssa_names
; ++i
)
136 counter_val
= gpr_p
? cfun
->va_list_gpr_size
: cfun
->va_list_fpr_size
;
137 max_size
= gpr_p
? VA_LIST_MAX_GPR_SIZE
: VA_LIST_MAX_FPR_SIZE
;
138 orig_lhs
= lhs
= rhs
;
141 enum tree_code rhs_code
;
144 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
146 if (counter_val
>= max_size
)
152 ret
-= counter_val
- si
->offsets
[SSA_NAME_VERSION (lhs
)];
156 stmt
= SSA_NAME_DEF_STMT (lhs
);
158 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != lhs
)
159 return HOST_WIDE_INT_M1U
;
161 rhs_code
= gimple_assign_rhs_code (stmt
);
162 rhs1
= gimple_assign_rhs1 (stmt
);
163 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
164 || gimple_assign_cast_p (stmt
))
165 && TREE_CODE (rhs1
) == SSA_NAME
)
171 if ((rhs_code
== POINTER_PLUS_EXPR
172 || rhs_code
== PLUS_EXPR
)
173 && TREE_CODE (rhs1
) == SSA_NAME
174 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt
)))
176 ret
+= tree_to_uhwi (gimple_assign_rhs2 (stmt
));
181 if (rhs_code
== ADDR_EXPR
182 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
183 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
184 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1)))
186 ret
+= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1));
187 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
191 if (get_gimple_rhs_class (rhs_code
) != GIMPLE_SINGLE_RHS
)
192 return HOST_WIDE_INT_M1U
;
194 rhs
= gimple_assign_rhs1 (stmt
);
195 if (TREE_CODE (counter
) != TREE_CODE (rhs
))
196 return HOST_WIDE_INT_M1U
;
198 if (TREE_CODE (counter
) == COMPONENT_REF
)
200 if (get_base_address (counter
) != get_base_address (rhs
)
201 || TREE_CODE (TREE_OPERAND (rhs
, 1)) != FIELD_DECL
202 || TREE_OPERAND (counter
, 1) != TREE_OPERAND (rhs
, 1))
203 return HOST_WIDE_INT_M1U
;
205 else if (counter
!= rhs
)
206 return HOST_WIDE_INT_M1U
;
212 val
= ret
+ counter_val
;
215 enum tree_code rhs_code
;
218 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
222 si
->offsets
[SSA_NAME_VERSION (lhs
)] = max_size
;
224 si
->offsets
[SSA_NAME_VERSION (lhs
)] = val
;
226 stmt
= SSA_NAME_DEF_STMT (lhs
);
228 rhs_code
= gimple_assign_rhs_code (stmt
);
229 rhs1
= gimple_assign_rhs1 (stmt
);
230 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
231 || gimple_assign_cast_p (stmt
))
232 && TREE_CODE (rhs1
) == SSA_NAME
)
238 if ((rhs_code
== POINTER_PLUS_EXPR
239 || rhs_code
== PLUS_EXPR
)
240 && TREE_CODE (rhs1
) == SSA_NAME
241 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt
)))
243 val
-= tree_to_uhwi (gimple_assign_rhs2 (stmt
));
248 if (rhs_code
== ADDR_EXPR
249 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
250 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
251 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1)))
253 val
-= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1));
254 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
265 /* Called by walk_tree to look for references to va_list variables. */
268 find_va_list_reference (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
271 bitmap va_list_vars
= (bitmap
) ((struct walk_stmt_info
*) data
)->info
;
274 if (TREE_CODE (var
) == SSA_NAME
)
276 if (bitmap_bit_p (va_list_vars
, SSA_NAME_VERSION (var
)))
279 else if (TREE_CODE (var
) == VAR_DECL
)
281 if (bitmap_bit_p (va_list_vars
, DECL_UID (var
) + num_ssa_names
))
289 /* Helper function of va_list_counter_struct_op. Compute
290 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
291 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
292 statement. GPR_P is true if AP is a GPR counter, false if it is
296 va_list_counter_op (struct stdarg_info
*si
, tree ap
, tree var
, bool gpr_p
,
299 unsigned HOST_WIDE_INT increment
;
301 if (si
->compute_sizes
< 0)
303 si
->compute_sizes
= 0;
304 if (si
->va_start_count
== 1
305 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
306 si
->compute_sizes
= 1;
308 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
310 "bb%d will %sbe executed at most once for each va_start "
311 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
312 si
->va_start_bb
->index
);
317 && (increment
= va_list_counter_bump (si
, ap
, var
, gpr_p
)) + 1 > 1)
319 if (gpr_p
&& cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
321 cfun
->va_list_gpr_size
+= increment
;
325 if (!gpr_p
&& cfun
->va_list_fpr_size
+ increment
< VA_LIST_MAX_FPR_SIZE
)
327 cfun
->va_list_fpr_size
+= increment
;
332 if (write_p
|| !si
->compute_sizes
)
335 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
337 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
342 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
343 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
344 is false, AP has been seen in VAR = AP assignment.
345 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
346 va_arg operation that doesn't cause the va_list variable to escape
350 va_list_counter_struct_op (struct stdarg_info
*si
, tree ap
, tree var
,
355 if (TREE_CODE (ap
) != COMPONENT_REF
356 || TREE_CODE (TREE_OPERAND (ap
, 1)) != FIELD_DECL
)
359 if (TREE_CODE (var
) != SSA_NAME
360 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (var
)))
363 base
= get_base_address (ap
);
364 if (TREE_CODE (base
) != VAR_DECL
365 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (base
) + num_ssa_names
))
368 if (TREE_OPERAND (ap
, 1) == va_list_gpr_counter_field
)
369 va_list_counter_op (si
, ap
, var
, true, write_p
);
370 else if (TREE_OPERAND (ap
, 1) == va_list_fpr_counter_field
)
371 va_list_counter_op (si
, ap
, var
, false, write_p
);
377 /* Check for TEM = AP. Return true if found and the caller shouldn't
378 search for va_list references in the statement. */
381 va_list_ptr_read (struct stdarg_info
*si
, tree ap
, tree tem
)
383 if (TREE_CODE (ap
) != VAR_DECL
384 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
387 if (TREE_CODE (tem
) != SSA_NAME
388 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem
)))
391 if (si
->compute_sizes
< 0)
393 si
->compute_sizes
= 0;
394 if (si
->va_start_count
== 1
395 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
396 si
->compute_sizes
= 1;
398 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
400 "bb%d will %sbe executed at most once for each va_start "
401 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
402 si
->va_start_bb
->index
);
405 /* For void * or char * va_list types, there is just one counter.
406 If va_arg is used in a loop, we don't know how many registers need
408 if (! si
->compute_sizes
)
411 if (va_list_counter_bump (si
, ap
, tem
, true) == HOST_WIDE_INT_M1U
)
414 /* Note the temporary, as we need to track whether it doesn't escape
415 the current function. */
416 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (tem
));
426 sequence and update cfun->va_list_gpr_size. Return true if found. */
429 va_list_ptr_write (struct stdarg_info
*si
, tree ap
, tree tem2
)
431 unsigned HOST_WIDE_INT increment
;
433 if (TREE_CODE (ap
) != VAR_DECL
434 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
437 if (TREE_CODE (tem2
) != SSA_NAME
438 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem2
)))
441 if (si
->compute_sizes
<= 0)
444 increment
= va_list_counter_bump (si
, ap
, tem2
, true);
445 if (increment
+ 1 <= 1)
448 if (cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
449 cfun
->va_list_gpr_size
+= increment
;
451 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
457 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
458 containing value of some va_list variable plus optionally some constant,
459 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
460 depending whether LHS is a function local temporary. */
463 check_va_list_escapes (struct stdarg_info
*si
, tree lhs
, tree rhs
)
465 if (! POINTER_TYPE_P (TREE_TYPE (rhs
)))
468 if (TREE_CODE (rhs
) == SSA_NAME
)
470 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (rhs
)))
473 else if (TREE_CODE (rhs
) == ADDR_EXPR
474 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
475 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0)) == SSA_NAME
)
477 tree ptr
= TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0);
478 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (ptr
)))
484 if (TREE_CODE (lhs
) != SSA_NAME
)
486 si
->va_list_escapes
= true;
490 if (si
->compute_sizes
< 0)
492 si
->compute_sizes
= 0;
493 if (si
->va_start_count
== 1
494 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
495 si
->compute_sizes
= 1;
497 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
499 "bb%d will %sbe executed at most once for each va_start "
500 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
501 si
->va_start_bb
->index
);
504 /* For void * or char * va_list types, there is just one counter.
505 If va_arg is used in a loop, we don't know how many registers need
507 if (! si
->compute_sizes
)
509 si
->va_list_escapes
= true;
513 if (va_list_counter_bump (si
, si
->va_start_ap
, lhs
, true)
514 == HOST_WIDE_INT_M1U
)
516 si
->va_list_escapes
= true;
520 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (lhs
));
524 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
525 Return true if va_list might be escaping. */
528 check_all_va_list_escapes (struct stdarg_info
*si
)
532 FOR_EACH_BB_FN (bb
, cfun
)
534 for (gphi_iterator i
= gsi_start_phis (bb
); !gsi_end_p (i
);
540 gphi
*phi
= i
.phi ();
542 lhs
= PHI_RESULT (phi
);
543 if (virtual_operand_p (lhs
)
544 || bitmap_bit_p (si
->va_list_escape_vars
,
545 SSA_NAME_VERSION (lhs
)))
548 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
550 tree rhs
= USE_FROM_PTR (uop
);
551 if (TREE_CODE (rhs
) == SSA_NAME
552 && bitmap_bit_p (si
->va_list_escape_vars
,
553 SSA_NAME_VERSION (rhs
)))
555 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
557 fputs ("va_list escapes in ", dump_file
);
558 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
559 fputc ('\n', dump_file
);
566 for (gimple_stmt_iterator i
= gsi_start_bb (bb
); !gsi_end_p (i
);
569 gimple
*stmt
= gsi_stmt (i
);
573 if (is_gimple_debug (stmt
))
576 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_ALL_USES
)
578 if (! bitmap_bit_p (si
->va_list_escape_vars
,
579 SSA_NAME_VERSION (use
)))
582 if (is_gimple_assign (stmt
))
584 tree rhs
= gimple_assign_rhs1 (stmt
);
585 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
588 if (rhs_code
== MEM_REF
589 && TREE_OPERAND (rhs
, 0) == use
590 && TYPE_SIZE_UNIT (TREE_TYPE (rhs
))
591 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs
)))
592 && si
->offsets
[SSA_NAME_VERSION (use
)] != -1)
594 unsigned HOST_WIDE_INT gpr_size
;
595 tree access_size
= TYPE_SIZE_UNIT (TREE_TYPE (rhs
));
597 gpr_size
= si
->offsets
[SSA_NAME_VERSION (use
)]
598 + tree_to_shwi (TREE_OPERAND (rhs
, 1))
599 + tree_to_uhwi (access_size
);
600 if (gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
601 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
602 else if (gpr_size
> cfun
->va_list_gpr_size
)
603 cfun
->va_list_gpr_size
= gpr_size
;
607 /* va_arg sequences may contain
608 other_ap_temp = ap_temp;
609 other_ap_temp = ap_temp + constant;
610 other_ap_temp = (some_type *) ap_temp;
614 && ((rhs_code
== POINTER_PLUS_EXPR
615 && (TREE_CODE (gimple_assign_rhs2 (stmt
))
617 || gimple_assign_cast_p (stmt
)
618 || (get_gimple_rhs_class (rhs_code
)
619 == GIMPLE_SINGLE_RHS
)))
621 tree lhs
= gimple_assign_lhs (stmt
);
623 if (TREE_CODE (lhs
) == SSA_NAME
624 && bitmap_bit_p (si
->va_list_escape_vars
,
625 SSA_NAME_VERSION (lhs
)))
628 if (TREE_CODE (lhs
) == VAR_DECL
629 && bitmap_bit_p (si
->va_list_vars
,
630 DECL_UID (lhs
) + num_ssa_names
))
633 else if (rhs_code
== ADDR_EXPR
634 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
635 && TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0) == use
)
637 tree lhs
= gimple_assign_lhs (stmt
);
639 if (bitmap_bit_p (si
->va_list_escape_vars
,
640 SSA_NAME_VERSION (lhs
)))
645 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
647 fputs ("va_list escapes in ", dump_file
);
648 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
649 fputc ('\n', dump_file
);
659 /* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size. */
662 optimize_va_list_gpr_fpr_size (function
*fun
)
665 bool va_list_escapes
= false;
666 bool va_list_simple_ptr
;
667 struct stdarg_info si
;
668 struct walk_stmt_info wi
;
669 const char *funcname
= NULL
;
672 fun
->va_list_gpr_size
= 0;
673 fun
->va_list_fpr_size
= 0;
674 memset (&si
, 0, sizeof (si
));
675 si
.va_list_vars
= BITMAP_ALLOC (NULL
);
676 si
.va_list_escape_vars
= BITMAP_ALLOC (NULL
);
679 funcname
= lang_hooks
.decl_printable_name (current_function_decl
, 2);
681 cfun_va_list
= targetm
.fn_abi_va_list (fun
->decl
);
682 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
683 && (TREE_TYPE (cfun_va_list
) == void_type_node
684 || TREE_TYPE (cfun_va_list
) == char_type_node
);
685 gcc_assert (is_gimple_reg_type (cfun_va_list
) == va_list_simple_ptr
);
687 FOR_EACH_BB_FN (bb
, fun
)
689 gimple_stmt_iterator i
;
691 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
693 gimple
*stmt
= gsi_stmt (i
);
696 if (!is_gimple_call (stmt
))
699 callee
= gimple_call_fndecl (stmt
);
701 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
704 switch (DECL_FUNCTION_CODE (callee
))
706 case BUILT_IN_VA_START
:
708 /* If old style builtins are used, don't optimize anything. */
709 case BUILT_IN_SAVEREGS
:
710 case BUILT_IN_NEXT_ARG
:
711 va_list_escapes
= true;
718 ap
= gimple_call_arg (stmt
, 0);
720 if (TREE_CODE (ap
) != ADDR_EXPR
)
722 va_list_escapes
= true;
725 ap
= TREE_OPERAND (ap
, 0);
726 if (TREE_CODE (ap
) == ARRAY_REF
)
728 if (! integer_zerop (TREE_OPERAND (ap
, 1)))
730 va_list_escapes
= true;
733 ap
= TREE_OPERAND (ap
, 0);
735 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap
))
736 != TYPE_MAIN_VARIANT (targetm
.fn_abi_va_list (fun
->decl
))
737 || TREE_CODE (ap
) != VAR_DECL
)
739 va_list_escapes
= true;
743 if (is_global_var (ap
))
745 va_list_escapes
= true;
749 bitmap_set_bit (si
.va_list_vars
, DECL_UID (ap
) + num_ssa_names
);
751 /* VA_START_BB and VA_START_AP will be only used if there is just
752 one va_start in the function. */
761 /* If there were no va_start uses in the function, there is no need to
763 if (si
.va_start_count
== 0)
766 /* If some va_list arguments weren't local, we can't optimize. */
770 /* For void * or char * va_list, something useful can be done only
771 if there is just one va_start. */
772 if (va_list_simple_ptr
&& si
.va_start_count
> 1)
774 va_list_escapes
= true;
778 /* For struct * va_list, if the backend didn't tell us what the counter fields
779 are, there is nothing more we can do. */
780 if (!va_list_simple_ptr
781 && va_list_gpr_counter_field
== NULL_TREE
782 && va_list_fpr_counter_field
== NULL_TREE
)
784 va_list_escapes
= true;
788 /* For void * or char * va_list there is just one counter
789 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
790 if (va_list_simple_ptr
)
791 fun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
793 calculate_dominance_info (CDI_DOMINATORS
);
794 memset (&wi
, 0, sizeof (wi
));
795 wi
.info
= si
.va_list_vars
;
797 FOR_EACH_BB_FN (bb
, fun
)
799 si
.compute_sizes
= -1;
802 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
803 them as assignments for the purpose of escape analysis. This is
804 not needed for non-simple va_list because virtual phis don't perform
805 any real data movement. Also, check PHI nodes for taking address of
811 for (gphi_iterator i
= gsi_start_phis (bb
); !gsi_end_p (i
);
814 gphi
*phi
= i
.phi ();
815 lhs
= PHI_RESULT (phi
);
817 if (virtual_operand_p (lhs
))
820 if (va_list_simple_ptr
)
822 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
824 rhs
= USE_FROM_PTR (uop
);
825 if (va_list_ptr_read (&si
, rhs
, lhs
))
827 else if (va_list_ptr_write (&si
, lhs
, rhs
))
830 check_va_list_escapes (&si
, lhs
, rhs
);
832 if (si
.va_list_escapes
)
834 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
836 fputs ("va_list escapes in ", dump_file
);
837 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
838 fputc ('\n', dump_file
);
840 va_list_escapes
= true;
845 for (unsigned j
= 0; !va_list_escapes
846 && j
< gimple_phi_num_args (phi
); ++j
)
847 if ((!va_list_simple_ptr
848 || TREE_CODE (gimple_phi_arg_def (phi
, j
)) != SSA_NAME
)
849 && walk_tree (gimple_phi_arg_def_ptr (phi
, j
),
850 find_va_list_reference
, &wi
, NULL
))
852 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
854 fputs ("va_list escapes in ", dump_file
);
855 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
856 fputc ('\n', dump_file
);
858 va_list_escapes
= true;
862 for (gimple_stmt_iterator i
= gsi_start_bb (bb
);
863 !gsi_end_p (i
) && !va_list_escapes
;
866 gimple
*stmt
= gsi_stmt (i
);
868 /* Don't look at __builtin_va_{start,end}, they are ok. */
869 if (is_gimple_call (stmt
))
871 tree callee
= gimple_call_fndecl (stmt
);
874 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
875 && (DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_START
876 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_END
))
880 if (is_gimple_assign (stmt
))
882 lhs
= gimple_assign_lhs (stmt
);
883 rhs
= gimple_assign_rhs1 (stmt
);
885 if (va_list_simple_ptr
)
887 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
888 == GIMPLE_SINGLE_RHS
)
890 /* Check for ap ={v} {}. */
891 if (TREE_CLOBBER_P (rhs
))
894 /* Check for tem = ap. */
895 else if (va_list_ptr_read (&si
, rhs
, lhs
))
898 /* Check for the last insn in:
903 else if (va_list_ptr_write (&si
, lhs
, rhs
))
907 if ((gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
908 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == INTEGER_CST
)
909 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
910 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
911 == GIMPLE_SINGLE_RHS
))
912 check_va_list_escapes (&si
, lhs
, rhs
);
916 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
917 == GIMPLE_SINGLE_RHS
)
919 /* Check for ap ={v} {}. */
920 if (TREE_CLOBBER_P (rhs
))
923 /* Check for ap[0].field = temp. */
924 else if (va_list_counter_struct_op (&si
, lhs
, rhs
, true))
927 /* Check for temp = ap[0].field. */
928 else if (va_list_counter_struct_op (&si
, rhs
, lhs
,
933 /* Do any architecture specific checking. */
934 if (targetm
.stdarg_optimize_hook
935 && targetm
.stdarg_optimize_hook (&si
, stmt
))
939 else if (is_gimple_debug (stmt
))
942 /* All other uses of va_list are either va_copy (that is not handled
943 in this optimization), taking address of va_list variable or
944 passing va_list to other functions (in that case va_list might
945 escape the function and therefore va_start needs to set it up
946 fully), or some unexpected use of va_list. None of these should
947 happen in a gimplified VA_ARG_EXPR. */
948 if (si
.va_list_escapes
949 || walk_gimple_op (stmt
, find_va_list_reference
, &wi
))
951 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
953 fputs ("va_list escapes in ", dump_file
);
954 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
955 fputc ('\n', dump_file
);
957 va_list_escapes
= true;
965 if (! va_list_escapes
966 && va_list_simple_ptr
967 && ! bitmap_empty_p (si
.va_list_escape_vars
)
968 && check_all_va_list_escapes (&si
))
969 va_list_escapes
= true;
974 fun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
975 fun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
977 BITMAP_FREE (si
.va_list_vars
);
978 BITMAP_FREE (si
.va_list_escape_vars
);
982 fprintf (dump_file
, "%s: va_list escapes %d, needs to save ",
983 funcname
, (int) va_list_escapes
);
984 if (fun
->va_list_gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
985 fputs ("all", dump_file
);
987 fprintf (dump_file
, "%d", cfun
->va_list_gpr_size
);
988 fputs (" GPR units and ", dump_file
);
989 if (fun
->va_list_fpr_size
>= VA_LIST_MAX_FPR_SIZE
)
990 fputs ("all", dump_file
);
992 fprintf (dump_file
, "%d", cfun
->va_list_fpr_size
);
993 fputs (" FPR units.\n", dump_file
);
997 /* Return true if STMT is IFN_VA_ARG. */
1000 gimple_call_ifn_va_arg_p (gimple
*stmt
)
1002 return (is_gimple_call (stmt
)
1003 && gimple_call_internal_p (stmt
)
1004 && gimple_call_internal_fn (stmt
) == IFN_VA_ARG
);
1007 /* Expand IFN_VA_ARGs in FUN. */
1010 expand_ifn_va_arg_1 (function
*fun
)
1012 bool modified
= false;
1014 gimple_stmt_iterator i
;
1015 location_t saved_location
;
1017 FOR_EACH_BB_FN (bb
, fun
)
1018 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1020 gimple
*stmt
= gsi_stmt (i
);
1021 tree ap
, expr
, lhs
, type
;
1022 gimple_seq pre
= NULL
, post
= NULL
;
1024 if (!gimple_call_ifn_va_arg_p (stmt
))
1029 type
= TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt
, 1)));
1030 ap
= gimple_call_arg (stmt
, 0);
1032 /* Balanced out the &ap, usually added by build_va_arg. */
1033 ap
= build_fold_indirect_ref (ap
);
1035 push_gimplify_context (false);
1036 saved_location
= input_location
;
1037 input_location
= gimple_location (stmt
);
1039 /* Make it easier for the backends by protecting the valist argument
1040 from multiple evaluations. */
1041 gimplify_expr (&ap
, &pre
, &post
, is_gimple_min_lval
, fb_lvalue
);
1043 expr
= targetm
.gimplify_va_arg_expr (ap
, type
, &pre
, &post
);
1045 lhs
= gimple_call_lhs (stmt
);
1046 if (lhs
!= NULL_TREE
)
1048 unsigned int nargs
= gimple_call_num_args (stmt
);
1049 gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs
), type
));
1051 /* We replace call with a new expr. This may require
1052 corresponding bndret call fixup. */
1053 if (chkp_function_instrumented_p (fun
->decl
))
1054 chkp_fixup_inlined_call (lhs
, expr
);
1058 /* We've transported the size of with WITH_SIZE_EXPR here as
1059 the last argument of the internal fn call. Now reinstate
1061 tree size
= gimple_call_arg (stmt
, nargs
- 1);
1062 expr
= build2 (WITH_SIZE_EXPR
, TREE_TYPE (expr
), expr
, size
);
1065 /* We use gimplify_assign here, rather than gimple_build_assign,
1066 because gimple_assign knows how to deal with variable-sized
1068 gimplify_assign (lhs
, expr
, &pre
);
1071 gimplify_expr (&expr
, &pre
, &post
, is_gimple_lvalue
, fb_lvalue
);
1073 input_location
= saved_location
;
1074 pop_gimplify_context (NULL
);
1076 gimple_seq_add_seq (&pre
, post
);
1077 update_modified_stmts (pre
);
1079 /* Add the sequence after IFN_VA_ARG. This splits the bb right
1080 after IFN_VA_ARG, and adds the sequence in one or more new bbs
1082 gimple_find_sub_bbs (pre
, &i
);
1084 /* Remove the IFN_VA_ARG gimple_call. It's the last stmt in the
1086 unlink_stmt_vdef (stmt
);
1087 release_ssa_name_fn (fun
, gimple_vdef (stmt
));
1088 gsi_remove (&i
, true);
1089 gcc_assert (gsi_end_p (i
));
1091 /* We're walking here into the bbs which contain the expansion of
1092 IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs
1093 expanding. We could try to skip walking these bbs, perhaps by
1094 walking backwards over gimples and bbs. */
1101 free_dominance_info (CDI_DOMINATORS
);
1102 update_ssa (TODO_update_ssa
);
1105 /* Expand IFN_VA_ARGs in FUN, if necessary. */
1108 expand_ifn_va_arg (function
*fun
)
1110 if ((fun
->curr_properties
& PROP_gimple_lva
) == 0)
1111 expand_ifn_va_arg_1 (fun
);
1116 gimple_stmt_iterator i
;
1117 FOR_EACH_BB_FN (bb
, fun
)
1118 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1119 gcc_assert (!gimple_call_ifn_va_arg_p (gsi_stmt (i
)));
1125 const pass_data pass_data_stdarg
=
1127 GIMPLE_PASS
, /* type */
1128 "stdarg", /* name */
1129 OPTGROUP_NONE
, /* optinfo_flags */
1130 TV_NONE
, /* tv_id */
1131 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1132 PROP_gimple_lva
, /* properties_provided */
1133 0, /* properties_destroyed */
1134 0, /* todo_flags_start */
1135 0, /* todo_flags_finish */
1138 class pass_stdarg
: public gimple_opt_pass
1141 pass_stdarg (gcc::context
*ctxt
)
1142 : gimple_opt_pass (pass_data_stdarg
, ctxt
)
1145 /* opt_pass methods: */
1146 virtual bool gate (function
*)
1148 /* Always run this pass, in order to expand va_arg internal_fns. We
1149 also need to do that if fun->stdarg == 0, because a va_arg may also
1150 occur in a function without varargs, f.i. if when passing a va_list to
1151 another function. */
1155 virtual unsigned int execute (function
*);
1157 }; // class pass_stdarg
1160 pass_stdarg::execute (function
*fun
)
1162 /* TODO: Postpone expand_ifn_va_arg till after
1163 optimize_va_list_gpr_fpr_size. */
1164 expand_ifn_va_arg (fun
);
1167 /* This optimization is only for stdarg functions. */
1168 && fun
->stdarg
!= 0)
1169 optimize_va_list_gpr_fpr_size (fun
);
1177 make_pass_stdarg (gcc::context
*ctxt
)
1179 return new pass_stdarg (ctxt
);
1184 const pass_data pass_data_lower_vaarg
=
1186 GIMPLE_PASS
, /* type */
1187 "lower_vaarg", /* name */
1188 OPTGROUP_NONE
, /* optinfo_flags */
1189 TV_NONE
, /* tv_id */
1190 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1191 PROP_gimple_lva
, /* properties_provided */
1192 0, /* properties_destroyed */
1193 0, /* todo_flags_start */
1194 0, /* todo_flags_finish */
1197 class pass_lower_vaarg
: public gimple_opt_pass
1200 pass_lower_vaarg (gcc::context
*ctxt
)
1201 : gimple_opt_pass (pass_data_lower_vaarg
, ctxt
)
1204 /* opt_pass methods: */
1205 virtual bool gate (function
*)
1207 return (cfun
->curr_properties
& PROP_gimple_lva
) == 0;
1210 virtual unsigned int execute (function
*);
1212 }; // class pass_lower_vaarg
1215 pass_lower_vaarg::execute (function
*fun
)
1217 expand_ifn_va_arg (fun
);
1224 make_pass_lower_vaarg (gcc::context
*ctxt
)
1226 return new pass_lower_vaarg (ctxt
);