1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "fold-const.h"
29 #include "hard-reg-set.h"
31 #include "langhooks.h"
32 #include "gimple-pretty-print.h"
36 #include "dominance.h"
38 #include "basic-block.h"
39 #include "tree-ssa-alias.h"
40 #include "internal-fn.h"
41 #include "gimple-expr.h"
43 #include "gimple-iterator.h"
44 #include "gimple-walk.h"
45 #include "gimple-ssa.h"
47 #include "tree-phinodes.h"
48 #include "ssa-iterators.h"
49 #include "stringpool.h"
50 #include "tree-ssanames.h"
51 #include "tree-into-ssa.h"
54 #include "tree-pass.h"
55 #include "tree-stdarg.h"
57 /* A simple pass that attempts to optimize stdarg functions on architectures
58 that need to save register arguments to stack on entry to stdarg functions.
59 If the function doesn't use any va_start macros, no registers need to
60 be saved. If va_start macros are used, the va_list variables don't escape
61 the function, it is only necessary to save registers that will be used
62 in va_arg macros. E.g. if va_arg is only used with integral types
63 in the function, floating point registers don't need to be saved, etc. */
66 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
67 is executed at most as many times as VA_START_BB. */
70 reachable_at_most_once (basic_block va_arg_bb
, basic_block va_start_bb
)
72 vec
<edge
> stack
= vNULL
;
78 if (va_arg_bb
== va_start_bb
)
81 if (! dominated_by_p (CDI_DOMINATORS
, va_arg_bb
, va_start_bb
))
84 visited
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
85 bitmap_clear (visited
);
88 FOR_EACH_EDGE (e
, ei
, va_arg_bb
->preds
)
91 while (! stack
.is_empty ())
98 if (e
->flags
& EDGE_COMPLEX
)
104 if (src
== va_start_bb
)
107 /* va_arg_bb can be executed more times than va_start_bb. */
108 if (src
== va_arg_bb
)
114 gcc_assert (src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
));
116 if (! bitmap_bit_p (visited
, src
->index
))
118 bitmap_set_bit (visited
, src
->index
);
119 FOR_EACH_EDGE (e
, ei
, src
->preds
)
125 sbitmap_free (visited
);
130 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
131 return constant, otherwise return HOST_WIDE_INT_M1U.
132 GPR_P is true if this is GPR counter. */
134 static unsigned HOST_WIDE_INT
135 va_list_counter_bump (struct stdarg_info
*si
, tree counter
, tree rhs
,
140 unsigned HOST_WIDE_INT ret
= 0, val
, counter_val
;
141 unsigned int max_size
;
143 if (si
->offsets
== NULL
)
147 si
->offsets
= XNEWVEC (int, num_ssa_names
);
148 for (i
= 0; i
< num_ssa_names
; ++i
)
152 counter_val
= gpr_p
? cfun
->va_list_gpr_size
: cfun
->va_list_fpr_size
;
153 max_size
= gpr_p
? VA_LIST_MAX_GPR_SIZE
: VA_LIST_MAX_FPR_SIZE
;
154 orig_lhs
= lhs
= rhs
;
157 enum tree_code rhs_code
;
160 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
162 if (counter_val
>= max_size
)
168 ret
-= counter_val
- si
->offsets
[SSA_NAME_VERSION (lhs
)];
172 stmt
= SSA_NAME_DEF_STMT (lhs
);
174 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != lhs
)
175 return HOST_WIDE_INT_M1U
;
177 rhs_code
= gimple_assign_rhs_code (stmt
);
178 rhs1
= gimple_assign_rhs1 (stmt
);
179 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
180 || gimple_assign_cast_p (stmt
))
181 && TREE_CODE (rhs1
) == SSA_NAME
)
187 if ((rhs_code
== POINTER_PLUS_EXPR
188 || rhs_code
== PLUS_EXPR
)
189 && TREE_CODE (rhs1
) == SSA_NAME
190 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt
)))
192 ret
+= tree_to_uhwi (gimple_assign_rhs2 (stmt
));
197 if (rhs_code
== ADDR_EXPR
198 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
199 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
200 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1)))
202 ret
+= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1));
203 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
207 if (get_gimple_rhs_class (rhs_code
) != GIMPLE_SINGLE_RHS
)
208 return HOST_WIDE_INT_M1U
;
210 rhs
= gimple_assign_rhs1 (stmt
);
211 if (TREE_CODE (counter
) != TREE_CODE (rhs
))
212 return HOST_WIDE_INT_M1U
;
214 if (TREE_CODE (counter
) == COMPONENT_REF
)
216 if (get_base_address (counter
) != get_base_address (rhs
)
217 || TREE_CODE (TREE_OPERAND (rhs
, 1)) != FIELD_DECL
218 || TREE_OPERAND (counter
, 1) != TREE_OPERAND (rhs
, 1))
219 return HOST_WIDE_INT_M1U
;
221 else if (counter
!= rhs
)
222 return HOST_WIDE_INT_M1U
;
228 val
= ret
+ counter_val
;
231 enum tree_code rhs_code
;
234 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
238 si
->offsets
[SSA_NAME_VERSION (lhs
)] = max_size
;
240 si
->offsets
[SSA_NAME_VERSION (lhs
)] = val
;
242 stmt
= SSA_NAME_DEF_STMT (lhs
);
244 rhs_code
= gimple_assign_rhs_code (stmt
);
245 rhs1
= gimple_assign_rhs1 (stmt
);
246 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
247 || gimple_assign_cast_p (stmt
))
248 && TREE_CODE (rhs1
) == SSA_NAME
)
254 if ((rhs_code
== POINTER_PLUS_EXPR
255 || rhs_code
== PLUS_EXPR
)
256 && TREE_CODE (rhs1
) == SSA_NAME
257 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt
)))
259 val
-= tree_to_uhwi (gimple_assign_rhs2 (stmt
));
264 if (rhs_code
== ADDR_EXPR
265 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
266 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
267 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1)))
269 val
-= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1));
270 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
281 /* Called by walk_tree to look for references to va_list variables. */
284 find_va_list_reference (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
287 bitmap va_list_vars
= (bitmap
) ((struct walk_stmt_info
*) data
)->info
;
290 if (TREE_CODE (var
) == SSA_NAME
)
292 if (bitmap_bit_p (va_list_vars
, SSA_NAME_VERSION (var
)))
295 else if (TREE_CODE (var
) == VAR_DECL
)
297 if (bitmap_bit_p (va_list_vars
, DECL_UID (var
) + num_ssa_names
))
305 /* Helper function of va_list_counter_struct_op. Compute
306 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
307 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
308 statement. GPR_P is true if AP is a GPR counter, false if it is
312 va_list_counter_op (struct stdarg_info
*si
, tree ap
, tree var
, bool gpr_p
,
315 unsigned HOST_WIDE_INT increment
;
317 if (si
->compute_sizes
< 0)
319 si
->compute_sizes
= 0;
320 if (si
->va_start_count
== 1
321 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
322 si
->compute_sizes
= 1;
324 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
326 "bb%d will %sbe executed at most once for each va_start "
327 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
328 si
->va_start_bb
->index
);
333 && (increment
= va_list_counter_bump (si
, ap
, var
, gpr_p
)) + 1 > 1)
335 if (gpr_p
&& cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
337 cfun
->va_list_gpr_size
+= increment
;
341 if (!gpr_p
&& cfun
->va_list_fpr_size
+ increment
< VA_LIST_MAX_FPR_SIZE
)
343 cfun
->va_list_fpr_size
+= increment
;
348 if (write_p
|| !si
->compute_sizes
)
351 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
353 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
358 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
359 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
360 is false, AP has been seen in VAR = AP assignment.
361 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
362 va_arg operation that doesn't cause the va_list variable to escape
366 va_list_counter_struct_op (struct stdarg_info
*si
, tree ap
, tree var
,
371 if (TREE_CODE (ap
) != COMPONENT_REF
372 || TREE_CODE (TREE_OPERAND (ap
, 1)) != FIELD_DECL
)
375 if (TREE_CODE (var
) != SSA_NAME
376 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (var
)))
379 base
= get_base_address (ap
);
380 if (TREE_CODE (base
) != VAR_DECL
381 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (base
) + num_ssa_names
))
384 if (TREE_OPERAND (ap
, 1) == va_list_gpr_counter_field
)
385 va_list_counter_op (si
, ap
, var
, true, write_p
);
386 else if (TREE_OPERAND (ap
, 1) == va_list_fpr_counter_field
)
387 va_list_counter_op (si
, ap
, var
, false, write_p
);
393 /* Check for TEM = AP. Return true if found and the caller shouldn't
394 search for va_list references in the statement. */
397 va_list_ptr_read (struct stdarg_info
*si
, tree ap
, tree tem
)
399 if (TREE_CODE (ap
) != VAR_DECL
400 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
403 if (TREE_CODE (tem
) != SSA_NAME
404 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem
)))
407 if (si
->compute_sizes
< 0)
409 si
->compute_sizes
= 0;
410 if (si
->va_start_count
== 1
411 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
412 si
->compute_sizes
= 1;
414 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
416 "bb%d will %sbe executed at most once for each va_start "
417 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
418 si
->va_start_bb
->index
);
421 /* For void * or char * va_list types, there is just one counter.
422 If va_arg is used in a loop, we don't know how many registers need
424 if (! si
->compute_sizes
)
427 if (va_list_counter_bump (si
, ap
, tem
, true) == HOST_WIDE_INT_M1U
)
430 /* Note the temporary, as we need to track whether it doesn't escape
431 the current function. */
432 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (tem
));
442 sequence and update cfun->va_list_gpr_size. Return true if found. */
445 va_list_ptr_write (struct stdarg_info
*si
, tree ap
, tree tem2
)
447 unsigned HOST_WIDE_INT increment
;
449 if (TREE_CODE (ap
) != VAR_DECL
450 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
453 if (TREE_CODE (tem2
) != SSA_NAME
454 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem2
)))
457 if (si
->compute_sizes
<= 0)
460 increment
= va_list_counter_bump (si
, ap
, tem2
, true);
461 if (increment
+ 1 <= 1)
464 if (cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
465 cfun
->va_list_gpr_size
+= increment
;
467 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
473 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
474 containing value of some va_list variable plus optionally some constant,
475 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
476 depending whether LHS is a function local temporary. */
479 check_va_list_escapes (struct stdarg_info
*si
, tree lhs
, tree rhs
)
481 if (! POINTER_TYPE_P (TREE_TYPE (rhs
)))
484 if (TREE_CODE (rhs
) == SSA_NAME
)
486 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (rhs
)))
489 else if (TREE_CODE (rhs
) == ADDR_EXPR
490 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
491 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0)) == SSA_NAME
)
493 tree ptr
= TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0);
494 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (ptr
)))
500 if (TREE_CODE (lhs
) != SSA_NAME
)
502 si
->va_list_escapes
= true;
506 if (si
->compute_sizes
< 0)
508 si
->compute_sizes
= 0;
509 if (si
->va_start_count
== 1
510 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
511 si
->compute_sizes
= 1;
513 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
515 "bb%d will %sbe executed at most once for each va_start "
516 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
517 si
->va_start_bb
->index
);
520 /* For void * or char * va_list types, there is just one counter.
521 If va_arg is used in a loop, we don't know how many registers need
523 if (! si
->compute_sizes
)
525 si
->va_list_escapes
= true;
529 if (va_list_counter_bump (si
, si
->va_start_ap
, lhs
, true)
530 == HOST_WIDE_INT_M1U
)
532 si
->va_list_escapes
= true;
536 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (lhs
));
540 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
541 Return true if va_list might be escaping. */
544 check_all_va_list_escapes (struct stdarg_info
*si
)
548 FOR_EACH_BB_FN (bb
, cfun
)
550 for (gphi_iterator i
= gsi_start_phis (bb
); !gsi_end_p (i
);
556 gphi
*phi
= i
.phi ();
558 lhs
= PHI_RESULT (phi
);
559 if (virtual_operand_p (lhs
)
560 || bitmap_bit_p (si
->va_list_escape_vars
,
561 SSA_NAME_VERSION (lhs
)))
564 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
566 tree rhs
= USE_FROM_PTR (uop
);
567 if (TREE_CODE (rhs
) == SSA_NAME
568 && bitmap_bit_p (si
->va_list_escape_vars
,
569 SSA_NAME_VERSION (rhs
)))
571 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
573 fputs ("va_list escapes in ", dump_file
);
574 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
575 fputc ('\n', dump_file
);
582 for (gimple_stmt_iterator i
= gsi_start_bb (bb
); !gsi_end_p (i
);
585 gimple stmt
= gsi_stmt (i
);
589 if (is_gimple_debug (stmt
))
592 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_ALL_USES
)
594 if (! bitmap_bit_p (si
->va_list_escape_vars
,
595 SSA_NAME_VERSION (use
)))
598 if (is_gimple_assign (stmt
))
600 tree rhs
= gimple_assign_rhs1 (stmt
);
601 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
604 if (rhs_code
== MEM_REF
605 && TREE_OPERAND (rhs
, 0) == use
606 && TYPE_SIZE_UNIT (TREE_TYPE (rhs
))
607 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs
)))
608 && si
->offsets
[SSA_NAME_VERSION (use
)] != -1)
610 unsigned HOST_WIDE_INT gpr_size
;
611 tree access_size
= TYPE_SIZE_UNIT (TREE_TYPE (rhs
));
613 gpr_size
= si
->offsets
[SSA_NAME_VERSION (use
)]
614 + tree_to_shwi (TREE_OPERAND (rhs
, 1))
615 + tree_to_uhwi (access_size
);
616 if (gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
617 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
618 else if (gpr_size
> cfun
->va_list_gpr_size
)
619 cfun
->va_list_gpr_size
= gpr_size
;
623 /* va_arg sequences may contain
624 other_ap_temp = ap_temp;
625 other_ap_temp = ap_temp + constant;
626 other_ap_temp = (some_type *) ap_temp;
630 && ((rhs_code
== POINTER_PLUS_EXPR
631 && (TREE_CODE (gimple_assign_rhs2 (stmt
))
633 || gimple_assign_cast_p (stmt
)
634 || (get_gimple_rhs_class (rhs_code
)
635 == GIMPLE_SINGLE_RHS
)))
637 tree lhs
= gimple_assign_lhs (stmt
);
639 if (TREE_CODE (lhs
) == SSA_NAME
640 && bitmap_bit_p (si
->va_list_escape_vars
,
641 SSA_NAME_VERSION (lhs
)))
644 if (TREE_CODE (lhs
) == VAR_DECL
645 && bitmap_bit_p (si
->va_list_vars
,
646 DECL_UID (lhs
) + num_ssa_names
))
649 else if (rhs_code
== ADDR_EXPR
650 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
651 && TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0) == use
)
653 tree lhs
= gimple_assign_lhs (stmt
);
655 if (bitmap_bit_p (si
->va_list_escape_vars
,
656 SSA_NAME_VERSION (lhs
)))
661 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
663 fputs ("va_list escapes in ", dump_file
);
664 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
665 fputc ('\n', dump_file
);
675 /* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size. */
678 optimize_va_list_gpr_fpr_size (function
*fun
)
681 bool va_list_escapes
= false;
682 bool va_list_simple_ptr
;
683 struct stdarg_info si
;
684 struct walk_stmt_info wi
;
685 const char *funcname
= NULL
;
688 fun
->va_list_gpr_size
= 0;
689 fun
->va_list_fpr_size
= 0;
690 memset (&si
, 0, sizeof (si
));
691 si
.va_list_vars
= BITMAP_ALLOC (NULL
);
692 si
.va_list_escape_vars
= BITMAP_ALLOC (NULL
);
695 funcname
= lang_hooks
.decl_printable_name (current_function_decl
, 2);
697 cfun_va_list
= targetm
.fn_abi_va_list (fun
->decl
);
698 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
699 && (TREE_TYPE (cfun_va_list
) == void_type_node
700 || TREE_TYPE (cfun_va_list
) == char_type_node
);
701 gcc_assert (is_gimple_reg_type (cfun_va_list
) == va_list_simple_ptr
);
703 FOR_EACH_BB_FN (bb
, fun
)
705 gimple_stmt_iterator i
;
707 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
709 gimple stmt
= gsi_stmt (i
);
712 if (!is_gimple_call (stmt
))
715 callee
= gimple_call_fndecl (stmt
);
717 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
720 switch (DECL_FUNCTION_CODE (callee
))
722 case BUILT_IN_VA_START
:
724 /* If old style builtins are used, don't optimize anything. */
725 case BUILT_IN_SAVEREGS
:
726 case BUILT_IN_NEXT_ARG
:
727 va_list_escapes
= true;
734 ap
= gimple_call_arg (stmt
, 0);
736 if (TREE_CODE (ap
) != ADDR_EXPR
)
738 va_list_escapes
= true;
741 ap
= TREE_OPERAND (ap
, 0);
742 if (TREE_CODE (ap
) == ARRAY_REF
)
744 if (! integer_zerop (TREE_OPERAND (ap
, 1)))
746 va_list_escapes
= true;
749 ap
= TREE_OPERAND (ap
, 0);
751 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap
))
752 != TYPE_MAIN_VARIANT (targetm
.fn_abi_va_list (fun
->decl
))
753 || TREE_CODE (ap
) != VAR_DECL
)
755 va_list_escapes
= true;
759 if (is_global_var (ap
))
761 va_list_escapes
= true;
765 bitmap_set_bit (si
.va_list_vars
, DECL_UID (ap
) + num_ssa_names
);
767 /* VA_START_BB and VA_START_AP will be only used if there is just
768 one va_start in the function. */
777 /* If there were no va_start uses in the function, there is no need to
779 if (si
.va_start_count
== 0)
782 /* If some va_list arguments weren't local, we can't optimize. */
786 /* For void * or char * va_list, something useful can be done only
787 if there is just one va_start. */
788 if (va_list_simple_ptr
&& si
.va_start_count
> 1)
790 va_list_escapes
= true;
794 /* For struct * va_list, if the backend didn't tell us what the counter fields
795 are, there is nothing more we can do. */
796 if (!va_list_simple_ptr
797 && va_list_gpr_counter_field
== NULL_TREE
798 && va_list_fpr_counter_field
== NULL_TREE
)
800 va_list_escapes
= true;
804 /* For void * or char * va_list there is just one counter
805 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
806 if (va_list_simple_ptr
)
807 fun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
809 calculate_dominance_info (CDI_DOMINATORS
);
810 memset (&wi
, 0, sizeof (wi
));
811 wi
.info
= si
.va_list_vars
;
813 FOR_EACH_BB_FN (bb
, fun
)
815 si
.compute_sizes
= -1;
818 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
819 them as assignments for the purpose of escape analysis. This is
820 not needed for non-simple va_list because virtual phis don't perform
821 any real data movement. Also, check PHI nodes for taking address of
827 for (gphi_iterator i
= gsi_start_phis (bb
); !gsi_end_p (i
);
830 gphi
*phi
= i
.phi ();
831 lhs
= PHI_RESULT (phi
);
833 if (virtual_operand_p (lhs
))
836 if (va_list_simple_ptr
)
838 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
840 rhs
= USE_FROM_PTR (uop
);
841 if (va_list_ptr_read (&si
, rhs
, lhs
))
843 else if (va_list_ptr_write (&si
, lhs
, rhs
))
846 check_va_list_escapes (&si
, lhs
, rhs
);
848 if (si
.va_list_escapes
)
850 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
852 fputs ("va_list escapes in ", dump_file
);
853 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
854 fputc ('\n', dump_file
);
856 va_list_escapes
= true;
861 for (unsigned j
= 0; !va_list_escapes
862 && j
< gimple_phi_num_args (phi
); ++j
)
863 if ((!va_list_simple_ptr
864 || TREE_CODE (gimple_phi_arg_def (phi
, j
)) != SSA_NAME
)
865 && walk_tree (gimple_phi_arg_def_ptr (phi
, j
),
866 find_va_list_reference
, &wi
, NULL
))
868 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
870 fputs ("va_list escapes in ", dump_file
);
871 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
872 fputc ('\n', dump_file
);
874 va_list_escapes
= true;
878 for (gimple_stmt_iterator i
= gsi_start_bb (bb
);
879 !gsi_end_p (i
) && !va_list_escapes
;
882 gimple stmt
= gsi_stmt (i
);
884 /* Don't look at __builtin_va_{start,end}, they are ok. */
885 if (is_gimple_call (stmt
))
887 tree callee
= gimple_call_fndecl (stmt
);
890 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
891 && (DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_START
892 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_END
))
896 if (is_gimple_assign (stmt
))
898 lhs
= gimple_assign_lhs (stmt
);
899 rhs
= gimple_assign_rhs1 (stmt
);
901 if (va_list_simple_ptr
)
903 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
904 == GIMPLE_SINGLE_RHS
)
906 /* Check for ap ={v} {}. */
907 if (TREE_CLOBBER_P (rhs
))
910 /* Check for tem = ap. */
911 else if (va_list_ptr_read (&si
, rhs
, lhs
))
914 /* Check for the last insn in:
919 else if (va_list_ptr_write (&si
, lhs
, rhs
))
923 if ((gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
924 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == INTEGER_CST
)
925 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
926 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
927 == GIMPLE_SINGLE_RHS
))
928 check_va_list_escapes (&si
, lhs
, rhs
);
932 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
933 == GIMPLE_SINGLE_RHS
)
935 /* Check for ap ={v} {}. */
936 if (TREE_CLOBBER_P (rhs
))
939 /* Check for ap[0].field = temp. */
940 else if (va_list_counter_struct_op (&si
, lhs
, rhs
, true))
943 /* Check for temp = ap[0].field. */
944 else if (va_list_counter_struct_op (&si
, rhs
, lhs
,
949 /* Do any architecture specific checking. */
950 if (targetm
.stdarg_optimize_hook
951 && targetm
.stdarg_optimize_hook (&si
, stmt
))
955 else if (is_gimple_debug (stmt
))
958 /* All other uses of va_list are either va_copy (that is not handled
959 in this optimization), taking address of va_list variable or
960 passing va_list to other functions (in that case va_list might
961 escape the function and therefore va_start needs to set it up
962 fully), or some unexpected use of va_list. None of these should
963 happen in a gimplified VA_ARG_EXPR. */
964 if (si
.va_list_escapes
965 || walk_gimple_op (stmt
, find_va_list_reference
, &wi
))
967 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
969 fputs ("va_list escapes in ", dump_file
);
970 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
971 fputc ('\n', dump_file
);
973 va_list_escapes
= true;
981 if (! va_list_escapes
982 && va_list_simple_ptr
983 && ! bitmap_empty_p (si
.va_list_escape_vars
)
984 && check_all_va_list_escapes (&si
))
985 va_list_escapes
= true;
990 fun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
991 fun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
993 BITMAP_FREE (si
.va_list_vars
);
994 BITMAP_FREE (si
.va_list_escape_vars
);
998 fprintf (dump_file
, "%s: va_list escapes %d, needs to save ",
999 funcname
, (int) va_list_escapes
);
1000 if (fun
->va_list_gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
1001 fputs ("all", dump_file
);
1003 fprintf (dump_file
, "%d", cfun
->va_list_gpr_size
);
1004 fputs (" GPR units and ", dump_file
);
1005 if (fun
->va_list_fpr_size
>= VA_LIST_MAX_FPR_SIZE
)
1006 fputs ("all", dump_file
);
1008 fprintf (dump_file
, "%d", cfun
->va_list_fpr_size
);
1009 fputs (" FPR units.\n", dump_file
);
1013 /* Return true if STMT is IFN_VA_ARG. */
1016 gimple_call_ifn_va_arg_p (gimple stmt
)
1018 return (is_gimple_call (stmt
)
1019 && gimple_call_internal_p (stmt
)
1020 && gimple_call_internal_fn (stmt
) == IFN_VA_ARG
);
1023 /* Expand IFN_VA_ARGs in FUN. */
1026 expand_ifn_va_arg_1 (function
*fun
)
1028 bool modified
= false;
1030 gimple_stmt_iterator i
;
1031 location_t saved_location
;
1033 FOR_EACH_BB_FN (bb
, fun
)
1034 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1036 gimple stmt
= gsi_stmt (i
);
1037 tree ap
, expr
, lhs
, type
;
1038 gimple_seq pre
= NULL
, post
= NULL
;
1040 if (!gimple_call_ifn_va_arg_p (stmt
))
1045 type
= TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt
, 1)));
1046 ap
= gimple_call_arg (stmt
, 0);
1048 /* Balanced out the &ap, usually added by build_va_arg. */
1049 ap
= build_fold_indirect_ref (ap
);
1051 push_gimplify_context (false);
1052 saved_location
= input_location
;
1053 input_location
= gimple_location (stmt
);
1055 /* Make it easier for the backends by protecting the valist argument
1056 from multiple evaluations. */
1057 gimplify_expr (&ap
, &pre
, &post
, is_gimple_min_lval
, fb_lvalue
);
1059 expr
= targetm
.gimplify_va_arg_expr (ap
, type
, &pre
, &post
);
1061 lhs
= gimple_call_lhs (stmt
);
1062 if (lhs
!= NULL_TREE
)
1064 unsigned int nargs
= gimple_call_num_args (stmt
);
1065 gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs
), type
));
1069 /* We've transported the size of with WITH_SIZE_EXPR here as
1070 the last argument of the internal fn call. Now reinstate
1072 tree size
= gimple_call_arg (stmt
, nargs
- 1);
1073 expr
= build2 (WITH_SIZE_EXPR
, TREE_TYPE (expr
), expr
, size
);
1076 /* We use gimplify_assign here, rather than gimple_build_assign,
1077 because gimple_assign knows how to deal with variable-sized
1079 gimplify_assign (lhs
, expr
, &pre
);
1082 gimplify_expr (&expr
, &pre
, &post
, is_gimple_lvalue
, fb_lvalue
);
1084 input_location
= saved_location
;
1085 pop_gimplify_context (NULL
);
1087 gimple_seq_add_seq (&pre
, post
);
1088 update_modified_stmts (pre
);
1090 /* Add the sequence after IFN_VA_ARG. This splits the bb right
1091 after IFN_VA_ARG, and adds the sequence in one or more new bbs
1093 gimple_find_sub_bbs (pre
, &i
);
1095 /* Remove the IFN_VA_ARG gimple_call. It's the last stmt in the
1097 gsi_remove (&i
, true);
1098 gcc_assert (gsi_end_p (i
));
1100 /* We're walking here into the bbs which contain the expansion of
1101 IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs
1102 expanding. We could try to skip walking these bbs, perhaps by
1103 walking backwards over gimples and bbs. */
1110 free_dominance_info (CDI_DOMINATORS
);
1111 update_ssa (TODO_update_ssa
);
1114 /* Expand IFN_VA_ARGs in FUN, if necessary. */
1117 expand_ifn_va_arg (function
*fun
)
1119 if ((fun
->curr_properties
& PROP_gimple_lva
) == 0)
1120 expand_ifn_va_arg_1 (fun
);
1124 gimple_stmt_iterator i
;
1125 FOR_EACH_BB_FN (bb
, fun
)
1126 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1127 gcc_assert (!gimple_call_ifn_va_arg_p (gsi_stmt (i
)));
1133 const pass_data pass_data_stdarg
=
1135 GIMPLE_PASS
, /* type */
1136 "stdarg", /* name */
1137 OPTGROUP_NONE
, /* optinfo_flags */
1138 TV_NONE
, /* tv_id */
1139 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1140 PROP_gimple_lva
, /* properties_provided */
1141 0, /* properties_destroyed */
1142 0, /* todo_flags_start */
1143 0, /* todo_flags_finish */
1146 class pass_stdarg
: public gimple_opt_pass
1149 pass_stdarg (gcc::context
*ctxt
)
1150 : gimple_opt_pass (pass_data_stdarg
, ctxt
)
1153 /* opt_pass methods: */
1154 virtual bool gate (function
*)
1156 /* Always run this pass, in order to expand va_arg internal_fns. We
1157 also need to do that if fun->stdarg == 0, because a va_arg may also
1158 occur in a function without varargs, f.i. if when passing a va_list to
1159 another function. */
1163 virtual unsigned int execute (function
*);
1165 }; // class pass_stdarg
1168 pass_stdarg::execute (function
*fun
)
1170 /* TODO: Postpone expand_ifn_va_arg till after
1171 optimize_va_list_gpr_fpr_size. */
1172 expand_ifn_va_arg (fun
);
1175 /* This optimization is only for stdarg functions. */
1176 && fun
->stdarg
!= 0)
1177 optimize_va_list_gpr_fpr_size (fun
);
1185 make_pass_stdarg (gcc::context
*ctxt
)
1187 return new pass_stdarg (ctxt
);
1192 const pass_data pass_data_lower_vaarg
=
1194 GIMPLE_PASS
, /* type */
1195 "lower_vaarg", /* name */
1196 OPTGROUP_NONE
, /* optinfo_flags */
1197 TV_NONE
, /* tv_id */
1198 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1199 PROP_gimple_lva
, /* properties_provided */
1200 0, /* properties_destroyed */
1201 0, /* todo_flags_start */
1202 0, /* todo_flags_finish */
1205 class pass_lower_vaarg
: public gimple_opt_pass
1208 pass_lower_vaarg (gcc::context
*ctxt
)
1209 : gimple_opt_pass (pass_data_lower_vaarg
, ctxt
)
1212 /* opt_pass methods: */
1213 virtual bool gate (function
*)
1215 return (cfun
->curr_properties
& PROP_gimple_lva
) == 0;
1218 virtual unsigned int execute (function
*);
1220 }; // class pass_lower_vaarg
1223 pass_lower_vaarg::execute (function
*fun
)
1225 expand_ifn_va_arg (fun
);
1232 make_pass_lower_vaarg (gcc::context
*ctxt
)
1234 return new pass_lower_vaarg (ctxt
);