1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "fold-const.h"
30 #include "hard-reg-set.h"
33 #include "langhooks.h"
34 #include "gimple-pretty-print.h"
38 #include "dominance.h"
40 #include "basic-block.h"
41 #include "tree-ssa-alias.h"
42 #include "internal-fn.h"
43 #include "gimple-expr.h"
46 #include "gimple-iterator.h"
47 #include "gimple-walk.h"
48 #include "gimple-ssa.h"
50 #include "tree-phinodes.h"
51 #include "ssa-iterators.h"
52 #include "stringpool.h"
53 #include "tree-ssanames.h"
54 #include "tree-into-ssa.h"
57 #include "tree-pass.h"
58 #include "tree-stdarg.h"
60 /* A simple pass that attempts to optimize stdarg functions on architectures
61 that need to save register arguments to stack on entry to stdarg functions.
62 If the function doesn't use any va_start macros, no registers need to
63 be saved. If va_start macros are used, the va_list variables don't escape
64 the function, it is only necessary to save registers that will be used
65 in va_arg macros. E.g. if va_arg is only used with integral types
66 in the function, floating point registers don't need to be saved, etc. */
69 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
70 is executed at most as many times as VA_START_BB. */
73 reachable_at_most_once (basic_block va_arg_bb
, basic_block va_start_bb
)
75 vec
<edge
> stack
= vNULL
;
81 if (va_arg_bb
== va_start_bb
)
84 if (! dominated_by_p (CDI_DOMINATORS
, va_arg_bb
, va_start_bb
))
87 visited
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
88 bitmap_clear (visited
);
91 FOR_EACH_EDGE (e
, ei
, va_arg_bb
->preds
)
94 while (! stack
.is_empty ())
101 if (e
->flags
& EDGE_COMPLEX
)
107 if (src
== va_start_bb
)
110 /* va_arg_bb can be executed more times than va_start_bb. */
111 if (src
== va_arg_bb
)
117 gcc_assert (src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
));
119 if (! bitmap_bit_p (visited
, src
->index
))
121 bitmap_set_bit (visited
, src
->index
);
122 FOR_EACH_EDGE (e
, ei
, src
->preds
)
128 sbitmap_free (visited
);
133 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
134 return constant, otherwise return HOST_WIDE_INT_M1U.
135 GPR_P is true if this is GPR counter. */
137 static unsigned HOST_WIDE_INT
138 va_list_counter_bump (struct stdarg_info
*si
, tree counter
, tree rhs
,
143 unsigned HOST_WIDE_INT ret
= 0, val
, counter_val
;
144 unsigned int max_size
;
146 if (si
->offsets
== NULL
)
150 si
->offsets
= XNEWVEC (int, num_ssa_names
);
151 for (i
= 0; i
< num_ssa_names
; ++i
)
155 counter_val
= gpr_p
? cfun
->va_list_gpr_size
: cfun
->va_list_fpr_size
;
156 max_size
= gpr_p
? VA_LIST_MAX_GPR_SIZE
: VA_LIST_MAX_FPR_SIZE
;
157 orig_lhs
= lhs
= rhs
;
160 enum tree_code rhs_code
;
163 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
165 if (counter_val
>= max_size
)
171 ret
-= counter_val
- si
->offsets
[SSA_NAME_VERSION (lhs
)];
175 stmt
= SSA_NAME_DEF_STMT (lhs
);
177 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != lhs
)
178 return HOST_WIDE_INT_M1U
;
180 rhs_code
= gimple_assign_rhs_code (stmt
);
181 rhs1
= gimple_assign_rhs1 (stmt
);
182 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
183 || gimple_assign_cast_p (stmt
))
184 && TREE_CODE (rhs1
) == SSA_NAME
)
190 if ((rhs_code
== POINTER_PLUS_EXPR
191 || rhs_code
== PLUS_EXPR
)
192 && TREE_CODE (rhs1
) == SSA_NAME
193 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt
)))
195 ret
+= tree_to_uhwi (gimple_assign_rhs2 (stmt
));
200 if (rhs_code
== ADDR_EXPR
201 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
202 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
203 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1)))
205 ret
+= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1));
206 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
210 if (get_gimple_rhs_class (rhs_code
) != GIMPLE_SINGLE_RHS
)
211 return HOST_WIDE_INT_M1U
;
213 rhs
= gimple_assign_rhs1 (stmt
);
214 if (TREE_CODE (counter
) != TREE_CODE (rhs
))
215 return HOST_WIDE_INT_M1U
;
217 if (TREE_CODE (counter
) == COMPONENT_REF
)
219 if (get_base_address (counter
) != get_base_address (rhs
)
220 || TREE_CODE (TREE_OPERAND (rhs
, 1)) != FIELD_DECL
221 || TREE_OPERAND (counter
, 1) != TREE_OPERAND (rhs
, 1))
222 return HOST_WIDE_INT_M1U
;
224 else if (counter
!= rhs
)
225 return HOST_WIDE_INT_M1U
;
231 val
= ret
+ counter_val
;
234 enum tree_code rhs_code
;
237 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
241 si
->offsets
[SSA_NAME_VERSION (lhs
)] = max_size
;
243 si
->offsets
[SSA_NAME_VERSION (lhs
)] = val
;
245 stmt
= SSA_NAME_DEF_STMT (lhs
);
247 rhs_code
= gimple_assign_rhs_code (stmt
);
248 rhs1
= gimple_assign_rhs1 (stmt
);
249 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
250 || gimple_assign_cast_p (stmt
))
251 && TREE_CODE (rhs1
) == SSA_NAME
)
257 if ((rhs_code
== POINTER_PLUS_EXPR
258 || rhs_code
== PLUS_EXPR
)
259 && TREE_CODE (rhs1
) == SSA_NAME
260 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt
)))
262 val
-= tree_to_uhwi (gimple_assign_rhs2 (stmt
));
267 if (rhs_code
== ADDR_EXPR
268 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
269 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
270 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1)))
272 val
-= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1));
273 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
284 /* Called by walk_tree to look for references to va_list variables. */
287 find_va_list_reference (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
290 bitmap va_list_vars
= (bitmap
) ((struct walk_stmt_info
*) data
)->info
;
293 if (TREE_CODE (var
) == SSA_NAME
)
295 if (bitmap_bit_p (va_list_vars
, SSA_NAME_VERSION (var
)))
298 else if (TREE_CODE (var
) == VAR_DECL
)
300 if (bitmap_bit_p (va_list_vars
, DECL_UID (var
) + num_ssa_names
))
308 /* Helper function of va_list_counter_struct_op. Compute
309 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
310 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
311 statement. GPR_P is true if AP is a GPR counter, false if it is
315 va_list_counter_op (struct stdarg_info
*si
, tree ap
, tree var
, bool gpr_p
,
318 unsigned HOST_WIDE_INT increment
;
320 if (si
->compute_sizes
< 0)
322 si
->compute_sizes
= 0;
323 if (si
->va_start_count
== 1
324 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
325 si
->compute_sizes
= 1;
327 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
329 "bb%d will %sbe executed at most once for each va_start "
330 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
331 si
->va_start_bb
->index
);
336 && (increment
= va_list_counter_bump (si
, ap
, var
, gpr_p
)) + 1 > 1)
338 if (gpr_p
&& cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
340 cfun
->va_list_gpr_size
+= increment
;
344 if (!gpr_p
&& cfun
->va_list_fpr_size
+ increment
< VA_LIST_MAX_FPR_SIZE
)
346 cfun
->va_list_fpr_size
+= increment
;
351 if (write_p
|| !si
->compute_sizes
)
354 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
356 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
361 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
362 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
363 is false, AP has been seen in VAR = AP assignment.
364 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
365 va_arg operation that doesn't cause the va_list variable to escape
369 va_list_counter_struct_op (struct stdarg_info
*si
, tree ap
, tree var
,
374 if (TREE_CODE (ap
) != COMPONENT_REF
375 || TREE_CODE (TREE_OPERAND (ap
, 1)) != FIELD_DECL
)
378 if (TREE_CODE (var
) != SSA_NAME
379 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (var
)))
382 base
= get_base_address (ap
);
383 if (TREE_CODE (base
) != VAR_DECL
384 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (base
) + num_ssa_names
))
387 if (TREE_OPERAND (ap
, 1) == va_list_gpr_counter_field
)
388 va_list_counter_op (si
, ap
, var
, true, write_p
);
389 else if (TREE_OPERAND (ap
, 1) == va_list_fpr_counter_field
)
390 va_list_counter_op (si
, ap
, var
, false, write_p
);
396 /* Check for TEM = AP. Return true if found and the caller shouldn't
397 search for va_list references in the statement. */
400 va_list_ptr_read (struct stdarg_info
*si
, tree ap
, tree tem
)
402 if (TREE_CODE (ap
) != VAR_DECL
403 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
406 if (TREE_CODE (tem
) != SSA_NAME
407 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem
)))
410 if (si
->compute_sizes
< 0)
412 si
->compute_sizes
= 0;
413 if (si
->va_start_count
== 1
414 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
415 si
->compute_sizes
= 1;
417 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
419 "bb%d will %sbe executed at most once for each va_start "
420 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
421 si
->va_start_bb
->index
);
424 /* For void * or char * va_list types, there is just one counter.
425 If va_arg is used in a loop, we don't know how many registers need
427 if (! si
->compute_sizes
)
430 if (va_list_counter_bump (si
, ap
, tem
, true) == HOST_WIDE_INT_M1U
)
433 /* Note the temporary, as we need to track whether it doesn't escape
434 the current function. */
435 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (tem
));
445 sequence and update cfun->va_list_gpr_size. Return true if found. */
448 va_list_ptr_write (struct stdarg_info
*si
, tree ap
, tree tem2
)
450 unsigned HOST_WIDE_INT increment
;
452 if (TREE_CODE (ap
) != VAR_DECL
453 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
456 if (TREE_CODE (tem2
) != SSA_NAME
457 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem2
)))
460 if (si
->compute_sizes
<= 0)
463 increment
= va_list_counter_bump (si
, ap
, tem2
, true);
464 if (increment
+ 1 <= 1)
467 if (cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
468 cfun
->va_list_gpr_size
+= increment
;
470 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
476 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
477 containing value of some va_list variable plus optionally some constant,
478 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
479 depending whether LHS is a function local temporary. */
482 check_va_list_escapes (struct stdarg_info
*si
, tree lhs
, tree rhs
)
484 if (! POINTER_TYPE_P (TREE_TYPE (rhs
)))
487 if (TREE_CODE (rhs
) == SSA_NAME
)
489 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (rhs
)))
492 else if (TREE_CODE (rhs
) == ADDR_EXPR
493 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
494 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0)) == SSA_NAME
)
496 tree ptr
= TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0);
497 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (ptr
)))
503 if (TREE_CODE (lhs
) != SSA_NAME
)
505 si
->va_list_escapes
= true;
509 if (si
->compute_sizes
< 0)
511 si
->compute_sizes
= 0;
512 if (si
->va_start_count
== 1
513 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
514 si
->compute_sizes
= 1;
516 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
518 "bb%d will %sbe executed at most once for each va_start "
519 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
520 si
->va_start_bb
->index
);
523 /* For void * or char * va_list types, there is just one counter.
524 If va_arg is used in a loop, we don't know how many registers need
526 if (! si
->compute_sizes
)
528 si
->va_list_escapes
= true;
532 if (va_list_counter_bump (si
, si
->va_start_ap
, lhs
, true)
533 == HOST_WIDE_INT_M1U
)
535 si
->va_list_escapes
= true;
539 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (lhs
));
543 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
544 Return true if va_list might be escaping. */
547 check_all_va_list_escapes (struct stdarg_info
*si
)
551 FOR_EACH_BB_FN (bb
, cfun
)
553 for (gphi_iterator i
= gsi_start_phis (bb
); !gsi_end_p (i
);
559 gphi
*phi
= i
.phi ();
561 lhs
= PHI_RESULT (phi
);
562 if (virtual_operand_p (lhs
)
563 || bitmap_bit_p (si
->va_list_escape_vars
,
564 SSA_NAME_VERSION (lhs
)))
567 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
569 tree rhs
= USE_FROM_PTR (uop
);
570 if (TREE_CODE (rhs
) == SSA_NAME
571 && bitmap_bit_p (si
->va_list_escape_vars
,
572 SSA_NAME_VERSION (rhs
)))
574 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
576 fputs ("va_list escapes in ", dump_file
);
577 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
578 fputc ('\n', dump_file
);
585 for (gimple_stmt_iterator i
= gsi_start_bb (bb
); !gsi_end_p (i
);
588 gimple stmt
= gsi_stmt (i
);
592 if (is_gimple_debug (stmt
))
595 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_ALL_USES
)
597 if (! bitmap_bit_p (si
->va_list_escape_vars
,
598 SSA_NAME_VERSION (use
)))
601 if (is_gimple_assign (stmt
))
603 tree rhs
= gimple_assign_rhs1 (stmt
);
604 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
607 if (rhs_code
== MEM_REF
608 && TREE_OPERAND (rhs
, 0) == use
609 && TYPE_SIZE_UNIT (TREE_TYPE (rhs
))
610 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs
)))
611 && si
->offsets
[SSA_NAME_VERSION (use
)] != -1)
613 unsigned HOST_WIDE_INT gpr_size
;
614 tree access_size
= TYPE_SIZE_UNIT (TREE_TYPE (rhs
));
616 gpr_size
= si
->offsets
[SSA_NAME_VERSION (use
)]
617 + tree_to_shwi (TREE_OPERAND (rhs
, 1))
618 + tree_to_uhwi (access_size
);
619 if (gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
620 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
621 else if (gpr_size
> cfun
->va_list_gpr_size
)
622 cfun
->va_list_gpr_size
= gpr_size
;
626 /* va_arg sequences may contain
627 other_ap_temp = ap_temp;
628 other_ap_temp = ap_temp + constant;
629 other_ap_temp = (some_type *) ap_temp;
633 && ((rhs_code
== POINTER_PLUS_EXPR
634 && (TREE_CODE (gimple_assign_rhs2 (stmt
))
636 || gimple_assign_cast_p (stmt
)
637 || (get_gimple_rhs_class (rhs_code
)
638 == GIMPLE_SINGLE_RHS
)))
640 tree lhs
= gimple_assign_lhs (stmt
);
642 if (TREE_CODE (lhs
) == SSA_NAME
643 && bitmap_bit_p (si
->va_list_escape_vars
,
644 SSA_NAME_VERSION (lhs
)))
647 if (TREE_CODE (lhs
) == VAR_DECL
648 && bitmap_bit_p (si
->va_list_vars
,
649 DECL_UID (lhs
) + num_ssa_names
))
652 else if (rhs_code
== ADDR_EXPR
653 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
654 && TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0) == use
)
656 tree lhs
= gimple_assign_lhs (stmt
);
658 if (bitmap_bit_p (si
->va_list_escape_vars
,
659 SSA_NAME_VERSION (lhs
)))
664 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
666 fputs ("va_list escapes in ", dump_file
);
667 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
668 fputc ('\n', dump_file
);
678 /* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size. */
681 optimize_va_list_gpr_fpr_size (function
*fun
)
684 bool va_list_escapes
= false;
685 bool va_list_simple_ptr
;
686 struct stdarg_info si
;
687 struct walk_stmt_info wi
;
688 const char *funcname
= NULL
;
691 fun
->va_list_gpr_size
= 0;
692 fun
->va_list_fpr_size
= 0;
693 memset (&si
, 0, sizeof (si
));
694 si
.va_list_vars
= BITMAP_ALLOC (NULL
);
695 si
.va_list_escape_vars
= BITMAP_ALLOC (NULL
);
698 funcname
= lang_hooks
.decl_printable_name (current_function_decl
, 2);
700 cfun_va_list
= targetm
.fn_abi_va_list (fun
->decl
);
701 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
702 && (TREE_TYPE (cfun_va_list
) == void_type_node
703 || TREE_TYPE (cfun_va_list
) == char_type_node
);
704 gcc_assert (is_gimple_reg_type (cfun_va_list
) == va_list_simple_ptr
);
706 FOR_EACH_BB_FN (bb
, fun
)
708 gimple_stmt_iterator i
;
710 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
712 gimple stmt
= gsi_stmt (i
);
715 if (!is_gimple_call (stmt
))
718 callee
= gimple_call_fndecl (stmt
);
720 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
723 switch (DECL_FUNCTION_CODE (callee
))
725 case BUILT_IN_VA_START
:
727 /* If old style builtins are used, don't optimize anything. */
728 case BUILT_IN_SAVEREGS
:
729 case BUILT_IN_NEXT_ARG
:
730 va_list_escapes
= true;
737 ap
= gimple_call_arg (stmt
, 0);
739 if (TREE_CODE (ap
) != ADDR_EXPR
)
741 va_list_escapes
= true;
744 ap
= TREE_OPERAND (ap
, 0);
745 if (TREE_CODE (ap
) == ARRAY_REF
)
747 if (! integer_zerop (TREE_OPERAND (ap
, 1)))
749 va_list_escapes
= true;
752 ap
= TREE_OPERAND (ap
, 0);
754 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap
))
755 != TYPE_MAIN_VARIANT (targetm
.fn_abi_va_list (fun
->decl
))
756 || TREE_CODE (ap
) != VAR_DECL
)
758 va_list_escapes
= true;
762 if (is_global_var (ap
))
764 va_list_escapes
= true;
768 bitmap_set_bit (si
.va_list_vars
, DECL_UID (ap
) + num_ssa_names
);
770 /* VA_START_BB and VA_START_AP will be only used if there is just
771 one va_start in the function. */
780 /* If there were no va_start uses in the function, there is no need to
782 if (si
.va_start_count
== 0)
785 /* If some va_list arguments weren't local, we can't optimize. */
789 /* For void * or char * va_list, something useful can be done only
790 if there is just one va_start. */
791 if (va_list_simple_ptr
&& si
.va_start_count
> 1)
793 va_list_escapes
= true;
797 /* For struct * va_list, if the backend didn't tell us what the counter fields
798 are, there is nothing more we can do. */
799 if (!va_list_simple_ptr
800 && va_list_gpr_counter_field
== NULL_TREE
801 && va_list_fpr_counter_field
== NULL_TREE
)
803 va_list_escapes
= true;
807 /* For void * or char * va_list there is just one counter
808 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
809 if (va_list_simple_ptr
)
810 fun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
812 calculate_dominance_info (CDI_DOMINATORS
);
813 memset (&wi
, 0, sizeof (wi
));
814 wi
.info
= si
.va_list_vars
;
816 FOR_EACH_BB_FN (bb
, fun
)
818 si
.compute_sizes
= -1;
821 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
822 them as assignments for the purpose of escape analysis. This is
823 not needed for non-simple va_list because virtual phis don't perform
824 any real data movement. Also, check PHI nodes for taking address of
830 for (gphi_iterator i
= gsi_start_phis (bb
); !gsi_end_p (i
);
833 gphi
*phi
= i
.phi ();
834 lhs
= PHI_RESULT (phi
);
836 if (virtual_operand_p (lhs
))
839 if (va_list_simple_ptr
)
841 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
843 rhs
= USE_FROM_PTR (uop
);
844 if (va_list_ptr_read (&si
, rhs
, lhs
))
846 else if (va_list_ptr_write (&si
, lhs
, rhs
))
849 check_va_list_escapes (&si
, lhs
, rhs
);
851 if (si
.va_list_escapes
)
853 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
855 fputs ("va_list escapes in ", dump_file
);
856 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
857 fputc ('\n', dump_file
);
859 va_list_escapes
= true;
864 for (unsigned j
= 0; !va_list_escapes
865 && j
< gimple_phi_num_args (phi
); ++j
)
866 if ((!va_list_simple_ptr
867 || TREE_CODE (gimple_phi_arg_def (phi
, j
)) != SSA_NAME
)
868 && walk_tree (gimple_phi_arg_def_ptr (phi
, j
),
869 find_va_list_reference
, &wi
, NULL
))
871 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
873 fputs ("va_list escapes in ", dump_file
);
874 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
875 fputc ('\n', dump_file
);
877 va_list_escapes
= true;
881 for (gimple_stmt_iterator i
= gsi_start_bb (bb
);
882 !gsi_end_p (i
) && !va_list_escapes
;
885 gimple stmt
= gsi_stmt (i
);
887 /* Don't look at __builtin_va_{start,end}, they are ok. */
888 if (is_gimple_call (stmt
))
890 tree callee
= gimple_call_fndecl (stmt
);
893 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
894 && (DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_START
895 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_END
))
899 if (is_gimple_assign (stmt
))
901 lhs
= gimple_assign_lhs (stmt
);
902 rhs
= gimple_assign_rhs1 (stmt
);
904 if (va_list_simple_ptr
)
906 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
907 == GIMPLE_SINGLE_RHS
)
909 /* Check for ap ={v} {}. */
910 if (TREE_CLOBBER_P (rhs
))
913 /* Check for tem = ap. */
914 else if (va_list_ptr_read (&si
, rhs
, lhs
))
917 /* Check for the last insn in:
922 else if (va_list_ptr_write (&si
, lhs
, rhs
))
926 if ((gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
927 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == INTEGER_CST
)
928 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
929 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
930 == GIMPLE_SINGLE_RHS
))
931 check_va_list_escapes (&si
, lhs
, rhs
);
935 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
936 == GIMPLE_SINGLE_RHS
)
938 /* Check for ap ={v} {}. */
939 if (TREE_CLOBBER_P (rhs
))
942 /* Check for ap[0].field = temp. */
943 else if (va_list_counter_struct_op (&si
, lhs
, rhs
, true))
946 /* Check for temp = ap[0].field. */
947 else if (va_list_counter_struct_op (&si
, rhs
, lhs
,
952 /* Do any architecture specific checking. */
953 if (targetm
.stdarg_optimize_hook
954 && targetm
.stdarg_optimize_hook (&si
, stmt
))
958 else if (is_gimple_debug (stmt
))
961 /* All other uses of va_list are either va_copy (that is not handled
962 in this optimization), taking address of va_list variable or
963 passing va_list to other functions (in that case va_list might
964 escape the function and therefore va_start needs to set it up
965 fully), or some unexpected use of va_list. None of these should
966 happen in a gimplified VA_ARG_EXPR. */
967 if (si
.va_list_escapes
968 || walk_gimple_op (stmt
, find_va_list_reference
, &wi
))
970 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
972 fputs ("va_list escapes in ", dump_file
);
973 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
974 fputc ('\n', dump_file
);
976 va_list_escapes
= true;
984 if (! va_list_escapes
985 && va_list_simple_ptr
986 && ! bitmap_empty_p (si
.va_list_escape_vars
)
987 && check_all_va_list_escapes (&si
))
988 va_list_escapes
= true;
993 fun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
994 fun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
996 BITMAP_FREE (si
.va_list_vars
);
997 BITMAP_FREE (si
.va_list_escape_vars
);
1001 fprintf (dump_file
, "%s: va_list escapes %d, needs to save ",
1002 funcname
, (int) va_list_escapes
);
1003 if (fun
->va_list_gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
1004 fputs ("all", dump_file
);
1006 fprintf (dump_file
, "%d", cfun
->va_list_gpr_size
);
1007 fputs (" GPR units and ", dump_file
);
1008 if (fun
->va_list_fpr_size
>= VA_LIST_MAX_FPR_SIZE
)
1009 fputs ("all", dump_file
);
1011 fprintf (dump_file
, "%d", cfun
->va_list_fpr_size
);
1012 fputs (" FPR units.\n", dump_file
);
1016 /* Return true if STMT is IFN_VA_ARG. */
1019 gimple_call_ifn_va_arg_p (gimple stmt
)
1021 return (is_gimple_call (stmt
)
1022 && gimple_call_internal_p (stmt
)
1023 && gimple_call_internal_fn (stmt
) == IFN_VA_ARG
);
1026 /* Expand IFN_VA_ARGs in FUN. */
1029 expand_ifn_va_arg_1 (function
*fun
)
1031 bool modified
= false;
1033 gimple_stmt_iterator i
;
1034 location_t saved_location
;
1036 FOR_EACH_BB_FN (bb
, fun
)
1037 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1039 gimple stmt
= gsi_stmt (i
);
1040 tree ap
, expr
, lhs
, type
;
1041 gimple_seq pre
= NULL
, post
= NULL
;
1043 if (!gimple_call_ifn_va_arg_p (stmt
))
1048 type
= TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt
, 1)));
1049 ap
= gimple_call_arg (stmt
, 0);
1051 /* Balanced out the &ap, usually added by build_va_arg. */
1052 ap
= build_fold_indirect_ref (ap
);
1054 push_gimplify_context (false);
1055 saved_location
= input_location
;
1056 input_location
= gimple_location (stmt
);
1058 /* Make it easier for the backends by protecting the valist argument
1059 from multiple evaluations. */
1060 gimplify_expr (&ap
, &pre
, &post
, is_gimple_min_lval
, fb_lvalue
);
1062 expr
= targetm
.gimplify_va_arg_expr (ap
, type
, &pre
, &post
);
1064 lhs
= gimple_call_lhs (stmt
);
1065 if (lhs
!= NULL_TREE
)
1067 unsigned int nargs
= gimple_call_num_args (stmt
);
1068 gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs
), type
));
1072 /* We've transported the size of with WITH_SIZE_EXPR here as
1073 the last argument of the internal fn call. Now reinstate
1075 tree size
= gimple_call_arg (stmt
, nargs
- 1);
1076 expr
= build2 (WITH_SIZE_EXPR
, TREE_TYPE (expr
), expr
, size
);
1079 /* We use gimplify_assign here, rather than gimple_build_assign,
1080 because gimple_assign knows how to deal with variable-sized
1082 gimplify_assign (lhs
, expr
, &pre
);
1085 gimplify_expr (&expr
, &pre
, &post
, is_gimple_lvalue
, fb_lvalue
);
1087 input_location
= saved_location
;
1088 pop_gimplify_context (NULL
);
1090 gimple_seq_add_seq (&pre
, post
);
1091 update_modified_stmts (pre
);
1093 /* Add the sequence after IFN_VA_ARG. This splits the bb right
1094 after IFN_VA_ARG, and adds the sequence in one or more new bbs
1096 gimple_find_sub_bbs (pre
, &i
);
1098 /* Remove the IFN_VA_ARG gimple_call. It's the last stmt in the
1100 gsi_remove (&i
, true);
1101 gcc_assert (gsi_end_p (i
));
1103 /* We're walking here into the bbs which contain the expansion of
1104 IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs
1105 expanding. We could try to skip walking these bbs, perhaps by
1106 walking backwards over gimples and bbs. */
1113 free_dominance_info (CDI_DOMINATORS
);
1114 update_ssa (TODO_update_ssa
);
1117 /* Expand IFN_VA_ARGs in FUN, if necessary. */
1120 expand_ifn_va_arg (function
*fun
)
1122 if ((fun
->curr_properties
& PROP_gimple_lva
) == 0)
1123 expand_ifn_va_arg_1 (fun
);
1127 gimple_stmt_iterator i
;
1128 FOR_EACH_BB_FN (bb
, fun
)
1129 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1130 gcc_assert (!gimple_call_ifn_va_arg_p (gsi_stmt (i
)));
1136 const pass_data pass_data_stdarg
=
1138 GIMPLE_PASS
, /* type */
1139 "stdarg", /* name */
1140 OPTGROUP_NONE
, /* optinfo_flags */
1141 TV_NONE
, /* tv_id */
1142 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1143 PROP_gimple_lva
, /* properties_provided */
1144 0, /* properties_destroyed */
1145 0, /* todo_flags_start */
1146 0, /* todo_flags_finish */
1149 class pass_stdarg
: public gimple_opt_pass
1152 pass_stdarg (gcc::context
*ctxt
)
1153 : gimple_opt_pass (pass_data_stdarg
, ctxt
)
1156 /* opt_pass methods: */
1157 virtual bool gate (function
*)
1159 /* Always run this pass, in order to expand va_arg internal_fns. We
1160 also need to do that if fun->stdarg == 0, because a va_arg may also
1161 occur in a function without varargs, f.i. if when passing a va_list to
1162 another function. */
1166 virtual unsigned int execute (function
*);
1168 }; // class pass_stdarg
1171 pass_stdarg::execute (function
*fun
)
1173 /* TODO: Postpone expand_ifn_va_arg till after
1174 optimize_va_list_gpr_fpr_size. */
1175 expand_ifn_va_arg (fun
);
1178 /* This optimization is only for stdarg functions. */
1179 && fun
->stdarg
!= 0)
1180 optimize_va_list_gpr_fpr_size (fun
);
1188 make_pass_stdarg (gcc::context
*ctxt
)
1190 return new pass_stdarg (ctxt
);
1195 const pass_data pass_data_lower_vaarg
=
1197 GIMPLE_PASS
, /* type */
1198 "lower_vaarg", /* name */
1199 OPTGROUP_NONE
, /* optinfo_flags */
1200 TV_NONE
, /* tv_id */
1201 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1202 PROP_gimple_lva
, /* properties_provided */
1203 0, /* properties_destroyed */
1204 0, /* todo_flags_start */
1205 0, /* todo_flags_finish */
1208 class pass_lower_vaarg
: public gimple_opt_pass
1211 pass_lower_vaarg (gcc::context
*ctxt
)
1212 : gimple_opt_pass (pass_data_lower_vaarg
, ctxt
)
1215 /* opt_pass methods: */
1216 virtual bool gate (function
*)
1218 return (cfun
->curr_properties
& PROP_gimple_lva
) == 0;
1221 virtual unsigned int execute (function
*);
1223 }; // class pass_lower_vaarg
1226 pass_lower_vaarg::execute (function
*fun
)
1228 expand_ifn_va_arg (fun
);
1235 make_pass_lower_vaarg (gcc::context
*ctxt
)
1237 return new pass_lower_vaarg (ctxt
);