1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2018 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-pass.h"
30 #include "gimple-pretty-print.h"
31 #include "fold-const.h"
32 #include "langhooks.h"
33 #include "gimple-iterator.h"
34 #include "gimple-walk.h"
36 #include "tree-into-ssa.h"
38 #include "tree-stdarg.h"
40 /* A simple pass that attempts to optimize stdarg functions on architectures
41 that need to save register arguments to stack on entry to stdarg functions.
42 If the function doesn't use any va_start macros, no registers need to
43 be saved. If va_start macros are used, the va_list variables don't escape
44 the function, it is only necessary to save registers that will be used
45 in va_arg macros. E.g. if va_arg is only used with integral types
46 in the function, floating point registers don't need to be saved, etc. */
49 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
50 is executed at most as many times as VA_START_BB. */
53 reachable_at_most_once (basic_block va_arg_bb
, basic_block va_start_bb
)
55 auto_vec
<edge
, 10> stack
;
60 if (va_arg_bb
== va_start_bb
)
63 if (! dominated_by_p (CDI_DOMINATORS
, va_arg_bb
, va_start_bb
))
66 auto_sbitmap
visited (last_basic_block_for_fn (cfun
));
67 bitmap_clear (visited
);
70 FOR_EACH_EDGE (e
, ei
, va_arg_bb
->preds
)
73 while (! stack
.is_empty ())
80 if (e
->flags
& EDGE_COMPLEX
)
86 if (src
== va_start_bb
)
89 /* va_arg_bb can be executed more times than va_start_bb. */
96 gcc_assert (src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
));
98 if (! bitmap_bit_p (visited
, src
->index
))
100 bitmap_set_bit (visited
, src
->index
);
101 FOR_EACH_EDGE (e
, ei
, src
->preds
)
110 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
111 return constant, otherwise return HOST_WIDE_INT_M1U.
112 GPR_P is true if this is GPR counter. */
114 static unsigned HOST_WIDE_INT
115 va_list_counter_bump (struct stdarg_info
*si
, tree counter
, tree rhs
,
120 unsigned HOST_WIDE_INT ret
= 0, val
, counter_val
;
121 unsigned int max_size
;
123 if (si
->offsets
== NULL
)
127 si
->offsets
= XNEWVEC (int, num_ssa_names
);
128 for (i
= 0; i
< num_ssa_names
; ++i
)
132 counter_val
= gpr_p
? cfun
->va_list_gpr_size
: cfun
->va_list_fpr_size
;
133 max_size
= gpr_p
? VA_LIST_MAX_GPR_SIZE
: VA_LIST_MAX_FPR_SIZE
;
134 orig_lhs
= lhs
= rhs
;
137 enum tree_code rhs_code
;
140 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
142 if (counter_val
>= max_size
)
148 ret
-= counter_val
- si
->offsets
[SSA_NAME_VERSION (lhs
)];
152 stmt
= SSA_NAME_DEF_STMT (lhs
);
154 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != lhs
)
155 return HOST_WIDE_INT_M1U
;
157 rhs_code
= gimple_assign_rhs_code (stmt
);
158 rhs1
= gimple_assign_rhs1 (stmt
);
159 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
160 || gimple_assign_cast_p (stmt
))
161 && TREE_CODE (rhs1
) == SSA_NAME
)
167 if ((rhs_code
== POINTER_PLUS_EXPR
168 || rhs_code
== PLUS_EXPR
)
169 && TREE_CODE (rhs1
) == SSA_NAME
170 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt
)))
172 ret
+= tree_to_uhwi (gimple_assign_rhs2 (stmt
));
177 if (rhs_code
== ADDR_EXPR
178 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
179 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
180 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1)))
182 ret
+= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1));
183 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
187 if (get_gimple_rhs_class (rhs_code
) != GIMPLE_SINGLE_RHS
)
188 return HOST_WIDE_INT_M1U
;
190 rhs
= gimple_assign_rhs1 (stmt
);
191 if (TREE_CODE (counter
) != TREE_CODE (rhs
))
192 return HOST_WIDE_INT_M1U
;
194 if (TREE_CODE (counter
) == COMPONENT_REF
)
196 if (get_base_address (counter
) != get_base_address (rhs
)
197 || TREE_CODE (TREE_OPERAND (rhs
, 1)) != FIELD_DECL
198 || TREE_OPERAND (counter
, 1) != TREE_OPERAND (rhs
, 1))
199 return HOST_WIDE_INT_M1U
;
201 else if (counter
!= rhs
)
202 return HOST_WIDE_INT_M1U
;
208 val
= ret
+ counter_val
;
211 enum tree_code rhs_code
;
214 if (si
->offsets
[SSA_NAME_VERSION (lhs
)] != -1)
218 si
->offsets
[SSA_NAME_VERSION (lhs
)] = max_size
;
220 si
->offsets
[SSA_NAME_VERSION (lhs
)] = val
;
222 stmt
= SSA_NAME_DEF_STMT (lhs
);
224 rhs_code
= gimple_assign_rhs_code (stmt
);
225 rhs1
= gimple_assign_rhs1 (stmt
);
226 if ((get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
227 || gimple_assign_cast_p (stmt
))
228 && TREE_CODE (rhs1
) == SSA_NAME
)
234 if ((rhs_code
== POINTER_PLUS_EXPR
235 || rhs_code
== PLUS_EXPR
)
236 && TREE_CODE (rhs1
) == SSA_NAME
237 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt
)))
239 val
-= tree_to_uhwi (gimple_assign_rhs2 (stmt
));
244 if (rhs_code
== ADDR_EXPR
245 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == MEM_REF
246 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0)) == SSA_NAME
247 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1)))
249 val
-= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 1));
250 lhs
= TREE_OPERAND (TREE_OPERAND (rhs1
, 0), 0);
261 /* Called by walk_tree to look for references to va_list variables. */
264 find_va_list_reference (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
267 bitmap va_list_vars
= (bitmap
) ((struct walk_stmt_info
*) data
)->info
;
270 if (TREE_CODE (var
) == SSA_NAME
)
272 if (bitmap_bit_p (va_list_vars
, SSA_NAME_VERSION (var
)))
275 else if (VAR_P (var
))
277 if (bitmap_bit_p (va_list_vars
, DECL_UID (var
) + num_ssa_names
))
285 /* Helper function of va_list_counter_struct_op. Compute
286 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
287 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
288 statement. GPR_P is true if AP is a GPR counter, false if it is
292 va_list_counter_op (struct stdarg_info
*si
, tree ap
, tree var
, bool gpr_p
,
295 unsigned HOST_WIDE_INT increment
;
297 if (si
->compute_sizes
< 0)
299 si
->compute_sizes
= 0;
300 if (si
->va_start_count
== 1
301 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
302 si
->compute_sizes
= 1;
304 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
306 "bb%d will %sbe executed at most once for each va_start "
307 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
308 si
->va_start_bb
->index
);
313 && (increment
= va_list_counter_bump (si
, ap
, var
, gpr_p
)) + 1 > 1)
315 if (gpr_p
&& cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
317 cfun
->va_list_gpr_size
+= increment
;
321 if (!gpr_p
&& cfun
->va_list_fpr_size
+ increment
< VA_LIST_MAX_FPR_SIZE
)
323 cfun
->va_list_fpr_size
+= increment
;
328 if (write_p
|| !si
->compute_sizes
)
331 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
333 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
338 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
339 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
340 is false, AP has been seen in VAR = AP assignment.
341 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
342 va_arg operation that doesn't cause the va_list variable to escape
346 va_list_counter_struct_op (struct stdarg_info
*si
, tree ap
, tree var
,
351 if (TREE_CODE (ap
) != COMPONENT_REF
352 || TREE_CODE (TREE_OPERAND (ap
, 1)) != FIELD_DECL
)
355 if (TREE_CODE (var
) != SSA_NAME
356 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (var
)))
359 base
= get_base_address (ap
);
361 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (base
) + num_ssa_names
))
364 if (TREE_OPERAND (ap
, 1) == va_list_gpr_counter_field
)
365 va_list_counter_op (si
, ap
, var
, true, write_p
);
366 else if (TREE_OPERAND (ap
, 1) == va_list_fpr_counter_field
)
367 va_list_counter_op (si
, ap
, var
, false, write_p
);
373 /* Check for TEM = AP. Return true if found and the caller shouldn't
374 search for va_list references in the statement. */
377 va_list_ptr_read (struct stdarg_info
*si
, tree ap
, tree tem
)
380 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
383 if (TREE_CODE (tem
) != SSA_NAME
384 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem
)))
387 if (si
->compute_sizes
< 0)
389 si
->compute_sizes
= 0;
390 if (si
->va_start_count
== 1
391 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
392 si
->compute_sizes
= 1;
394 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
396 "bb%d will %sbe executed at most once for each va_start "
397 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
398 si
->va_start_bb
->index
);
401 /* For void * or char * va_list types, there is just one counter.
402 If va_arg is used in a loop, we don't know how many registers need
404 if (! si
->compute_sizes
)
407 if (va_list_counter_bump (si
, ap
, tem
, true) == HOST_WIDE_INT_M1U
)
410 /* Note the temporary, as we need to track whether it doesn't escape
411 the current function. */
412 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (tem
));
422 sequence and update cfun->va_list_gpr_size. Return true if found. */
425 va_list_ptr_write (struct stdarg_info
*si
, tree ap
, tree tem2
)
427 unsigned HOST_WIDE_INT increment
;
430 || !bitmap_bit_p (si
->va_list_vars
, DECL_UID (ap
) + num_ssa_names
))
433 if (TREE_CODE (tem2
) != SSA_NAME
434 || bitmap_bit_p (si
->va_list_vars
, SSA_NAME_VERSION (tem2
)))
437 if (si
->compute_sizes
<= 0)
440 increment
= va_list_counter_bump (si
, ap
, tem2
, true);
441 if (increment
+ 1 <= 1)
444 if (cfun
->va_list_gpr_size
+ increment
< VA_LIST_MAX_GPR_SIZE
)
445 cfun
->va_list_gpr_size
+= increment
;
447 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
453 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
454 containing value of some va_list variable plus optionally some constant,
455 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
456 depending whether LHS is a function local temporary. */
459 check_va_list_escapes (struct stdarg_info
*si
, tree lhs
, tree rhs
)
461 if (! POINTER_TYPE_P (TREE_TYPE (rhs
)))
464 if (TREE_CODE (rhs
) == SSA_NAME
)
466 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (rhs
)))
469 else if (TREE_CODE (rhs
) == ADDR_EXPR
470 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
471 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0)) == SSA_NAME
)
473 tree ptr
= TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0);
474 if (! bitmap_bit_p (si
->va_list_escape_vars
, SSA_NAME_VERSION (ptr
)))
480 if (TREE_CODE (lhs
) != SSA_NAME
)
482 si
->va_list_escapes
= true;
486 if (si
->compute_sizes
< 0)
488 si
->compute_sizes
= 0;
489 if (si
->va_start_count
== 1
490 && reachable_at_most_once (si
->bb
, si
->va_start_bb
))
491 si
->compute_sizes
= 1;
493 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
495 "bb%d will %sbe executed at most once for each va_start "
496 "in bb%d\n", si
->bb
->index
, si
->compute_sizes
? "" : "not ",
497 si
->va_start_bb
->index
);
500 /* For void * or char * va_list types, there is just one counter.
501 If va_arg is used in a loop, we don't know how many registers need
503 if (! si
->compute_sizes
)
505 si
->va_list_escapes
= true;
509 if (va_list_counter_bump (si
, si
->va_start_ap
, lhs
, true)
510 == HOST_WIDE_INT_M1U
)
512 si
->va_list_escapes
= true;
516 bitmap_set_bit (si
->va_list_escape_vars
, SSA_NAME_VERSION (lhs
));
520 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
521 Return true if va_list might be escaping. */
524 check_all_va_list_escapes (struct stdarg_info
*si
)
528 FOR_EACH_BB_FN (bb
, cfun
)
530 for (gphi_iterator i
= gsi_start_phis (bb
); !gsi_end_p (i
);
536 gphi
*phi
= i
.phi ();
538 lhs
= PHI_RESULT (phi
);
539 if (virtual_operand_p (lhs
)
540 || bitmap_bit_p (si
->va_list_escape_vars
,
541 SSA_NAME_VERSION (lhs
)))
544 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
546 tree rhs
= USE_FROM_PTR (uop
);
547 if (TREE_CODE (rhs
) == SSA_NAME
548 && bitmap_bit_p (si
->va_list_escape_vars
,
549 SSA_NAME_VERSION (rhs
)))
551 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
553 fputs ("va_list escapes in ", dump_file
);
554 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
555 fputc ('\n', dump_file
);
562 for (gimple_stmt_iterator i
= gsi_start_bb (bb
); !gsi_end_p (i
);
565 gimple
*stmt
= gsi_stmt (i
);
569 if (is_gimple_debug (stmt
))
572 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_ALL_USES
)
574 if (! bitmap_bit_p (si
->va_list_escape_vars
,
575 SSA_NAME_VERSION (use
)))
578 if (is_gimple_assign (stmt
))
580 tree rhs
= gimple_assign_rhs1 (stmt
);
581 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
584 if (rhs_code
== MEM_REF
585 && TREE_OPERAND (rhs
, 0) == use
586 && TYPE_SIZE_UNIT (TREE_TYPE (rhs
))
587 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs
)))
588 && si
->offsets
[SSA_NAME_VERSION (use
)] != -1)
590 unsigned HOST_WIDE_INT gpr_size
;
591 tree access_size
= TYPE_SIZE_UNIT (TREE_TYPE (rhs
));
593 gpr_size
= si
->offsets
[SSA_NAME_VERSION (use
)]
594 + tree_to_shwi (TREE_OPERAND (rhs
, 1))
595 + tree_to_uhwi (access_size
);
596 if (gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
597 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
598 else if (gpr_size
> cfun
->va_list_gpr_size
)
599 cfun
->va_list_gpr_size
= gpr_size
;
603 /* va_arg sequences may contain
604 other_ap_temp = ap_temp;
605 other_ap_temp = ap_temp + constant;
606 other_ap_temp = (some_type *) ap_temp;
610 && ((rhs_code
== POINTER_PLUS_EXPR
611 && (TREE_CODE (gimple_assign_rhs2 (stmt
))
613 || gimple_assign_cast_p (stmt
)
614 || (get_gimple_rhs_class (rhs_code
)
615 == GIMPLE_SINGLE_RHS
)))
617 tree lhs
= gimple_assign_lhs (stmt
);
619 if (TREE_CODE (lhs
) == SSA_NAME
620 && bitmap_bit_p (si
->va_list_escape_vars
,
621 SSA_NAME_VERSION (lhs
)))
625 && bitmap_bit_p (si
->va_list_vars
,
626 DECL_UID (lhs
) + num_ssa_names
))
629 else if (rhs_code
== ADDR_EXPR
630 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
631 && TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0) == use
)
633 tree lhs
= gimple_assign_lhs (stmt
);
635 if (bitmap_bit_p (si
->va_list_escape_vars
,
636 SSA_NAME_VERSION (lhs
)))
641 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
643 fputs ("va_list escapes in ", dump_file
);
644 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
645 fputc ('\n', dump_file
);
655 /* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size. */
658 optimize_va_list_gpr_fpr_size (function
*fun
)
661 bool va_list_escapes
= false;
662 bool va_list_simple_ptr
;
663 struct stdarg_info si
;
664 struct walk_stmt_info wi
;
665 const char *funcname
= NULL
;
668 fun
->va_list_gpr_size
= 0;
669 fun
->va_list_fpr_size
= 0;
670 memset (&si
, 0, sizeof (si
));
671 si
.va_list_vars
= BITMAP_ALLOC (NULL
);
672 si
.va_list_escape_vars
= BITMAP_ALLOC (NULL
);
675 funcname
= lang_hooks
.decl_printable_name (current_function_decl
, 2);
677 cfun_va_list
= targetm
.fn_abi_va_list (fun
->decl
);
678 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
679 && (TREE_TYPE (cfun_va_list
) == void_type_node
680 || TREE_TYPE (cfun_va_list
) == char_type_node
);
681 gcc_assert (is_gimple_reg_type (cfun_va_list
) == va_list_simple_ptr
);
683 FOR_EACH_BB_FN (bb
, fun
)
685 gimple_stmt_iterator i
;
687 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
689 gimple
*stmt
= gsi_stmt (i
);
692 if (!is_gimple_call (stmt
))
695 callee
= gimple_call_fndecl (stmt
);
697 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
700 switch (DECL_FUNCTION_CODE (callee
))
702 case BUILT_IN_VA_START
:
704 /* If old style builtins are used, don't optimize anything. */
705 case BUILT_IN_SAVEREGS
:
706 case BUILT_IN_NEXT_ARG
:
707 va_list_escapes
= true;
714 ap
= gimple_call_arg (stmt
, 0);
716 if (TREE_CODE (ap
) != ADDR_EXPR
)
718 va_list_escapes
= true;
721 ap
= TREE_OPERAND (ap
, 0);
722 if (TREE_CODE (ap
) == ARRAY_REF
)
724 if (! integer_zerop (TREE_OPERAND (ap
, 1)))
726 va_list_escapes
= true;
729 ap
= TREE_OPERAND (ap
, 0);
731 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap
))
732 != TYPE_MAIN_VARIANT (targetm
.fn_abi_va_list (fun
->decl
))
735 va_list_escapes
= true;
739 if (is_global_var (ap
))
741 va_list_escapes
= true;
745 bitmap_set_bit (si
.va_list_vars
, DECL_UID (ap
) + num_ssa_names
);
747 /* VA_START_BB and VA_START_AP will be only used if there is just
748 one va_start in the function. */
757 /* If there were no va_start uses in the function, there is no need to
759 if (si
.va_start_count
== 0)
762 /* If some va_list arguments weren't local, we can't optimize. */
766 /* For void * or char * va_list, something useful can be done only
767 if there is just one va_start. */
768 if (va_list_simple_ptr
&& si
.va_start_count
> 1)
770 va_list_escapes
= true;
774 /* For struct * va_list, if the backend didn't tell us what the counter fields
775 are, there is nothing more we can do. */
776 if (!va_list_simple_ptr
777 && va_list_gpr_counter_field
== NULL_TREE
778 && va_list_fpr_counter_field
== NULL_TREE
)
780 va_list_escapes
= true;
784 /* For void * or char * va_list there is just one counter
785 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
786 if (va_list_simple_ptr
)
787 fun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
789 calculate_dominance_info (CDI_DOMINATORS
);
790 memset (&wi
, 0, sizeof (wi
));
791 wi
.info
= si
.va_list_vars
;
793 FOR_EACH_BB_FN (bb
, fun
)
795 si
.compute_sizes
= -1;
798 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
799 them as assignments for the purpose of escape analysis. This is
800 not needed for non-simple va_list because virtual phis don't perform
801 any real data movement. Also, check PHI nodes for taking address of
807 for (gphi_iterator i
= gsi_start_phis (bb
); !gsi_end_p (i
);
810 gphi
*phi
= i
.phi ();
811 lhs
= PHI_RESULT (phi
);
813 if (virtual_operand_p (lhs
))
816 if (va_list_simple_ptr
)
818 FOR_EACH_PHI_ARG (uop
, phi
, soi
, SSA_OP_USE
)
820 rhs
= USE_FROM_PTR (uop
);
821 if (va_list_ptr_read (&si
, rhs
, lhs
))
823 else if (va_list_ptr_write (&si
, lhs
, rhs
))
826 check_va_list_escapes (&si
, lhs
, rhs
);
828 if (si
.va_list_escapes
)
830 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
832 fputs ("va_list escapes in ", dump_file
);
833 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
834 fputc ('\n', dump_file
);
836 va_list_escapes
= true;
841 for (unsigned j
= 0; !va_list_escapes
842 && j
< gimple_phi_num_args (phi
); ++j
)
843 if ((!va_list_simple_ptr
844 || TREE_CODE (gimple_phi_arg_def (phi
, j
)) != SSA_NAME
)
845 && walk_tree (gimple_phi_arg_def_ptr (phi
, j
),
846 find_va_list_reference
, &wi
, NULL
))
848 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
850 fputs ("va_list escapes in ", dump_file
);
851 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
852 fputc ('\n', dump_file
);
854 va_list_escapes
= true;
858 for (gimple_stmt_iterator i
= gsi_start_bb (bb
);
859 !gsi_end_p (i
) && !va_list_escapes
;
862 gimple
*stmt
= gsi_stmt (i
);
864 /* Don't look at __builtin_va_{start,end}, they are ok. */
865 if (is_gimple_call (stmt
))
867 tree callee
= gimple_call_fndecl (stmt
);
870 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
871 && (DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_START
872 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_END
))
876 if (is_gimple_assign (stmt
))
878 lhs
= gimple_assign_lhs (stmt
);
879 rhs
= gimple_assign_rhs1 (stmt
);
881 if (va_list_simple_ptr
)
883 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
884 == GIMPLE_SINGLE_RHS
)
886 /* Check for ap ={v} {}. */
887 if (TREE_CLOBBER_P (rhs
))
890 /* Check for tem = ap. */
891 else if (va_list_ptr_read (&si
, rhs
, lhs
))
894 /* Check for the last insn in:
899 else if (va_list_ptr_write (&si
, lhs
, rhs
))
903 if ((gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
904 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == INTEGER_CST
)
905 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
906 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
907 == GIMPLE_SINGLE_RHS
))
908 check_va_list_escapes (&si
, lhs
, rhs
);
912 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
913 == GIMPLE_SINGLE_RHS
)
915 /* Check for ap ={v} {}. */
916 if (TREE_CLOBBER_P (rhs
))
919 /* Check for ap[0].field = temp. */
920 else if (va_list_counter_struct_op (&si
, lhs
, rhs
, true))
923 /* Check for temp = ap[0].field. */
924 else if (va_list_counter_struct_op (&si
, rhs
, lhs
,
929 /* Do any architecture specific checking. */
930 if (targetm
.stdarg_optimize_hook
931 && targetm
.stdarg_optimize_hook (&si
, stmt
))
935 else if (is_gimple_debug (stmt
))
938 /* All other uses of va_list are either va_copy (that is not handled
939 in this optimization), taking address of va_list variable or
940 passing va_list to other functions (in that case va_list might
941 escape the function and therefore va_start needs to set it up
942 fully), or some unexpected use of va_list. None of these should
943 happen in a gimplified VA_ARG_EXPR. */
944 if (si
.va_list_escapes
945 || walk_gimple_op (stmt
, find_va_list_reference
, &wi
))
947 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
949 fputs ("va_list escapes in ", dump_file
);
950 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
951 fputc ('\n', dump_file
);
953 va_list_escapes
= true;
961 if (! va_list_escapes
962 && va_list_simple_ptr
963 && ! bitmap_empty_p (si
.va_list_escape_vars
)
964 && check_all_va_list_escapes (&si
))
965 va_list_escapes
= true;
970 fun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
971 fun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
973 BITMAP_FREE (si
.va_list_vars
);
974 BITMAP_FREE (si
.va_list_escape_vars
);
978 fprintf (dump_file
, "%s: va_list escapes %d, needs to save ",
979 funcname
, (int) va_list_escapes
);
980 if (fun
->va_list_gpr_size
>= VA_LIST_MAX_GPR_SIZE
)
981 fputs ("all", dump_file
);
983 fprintf (dump_file
, "%d", cfun
->va_list_gpr_size
);
984 fputs (" GPR units and ", dump_file
);
985 if (fun
->va_list_fpr_size
>= VA_LIST_MAX_FPR_SIZE
)
986 fputs ("all", dump_file
);
988 fprintf (dump_file
, "%d", cfun
->va_list_fpr_size
);
989 fputs (" FPR units.\n", dump_file
);
993 /* Expand IFN_VA_ARGs in FUN. */
996 expand_ifn_va_arg_1 (function
*fun
)
998 bool modified
= false;
1000 gimple_stmt_iterator i
;
1001 location_t saved_location
;
1003 FOR_EACH_BB_FN (bb
, fun
)
1004 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1006 gimple
*stmt
= gsi_stmt (i
);
1007 tree ap
, aptype
, expr
, lhs
, type
;
1008 gimple_seq pre
= NULL
, post
= NULL
;
1010 if (!gimple_call_internal_p (stmt
, IFN_VA_ARG
))
1015 type
= TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt
, 1)));
1016 ap
= gimple_call_arg (stmt
, 0);
1017 aptype
= TREE_TYPE (gimple_call_arg (stmt
, 2));
1018 gcc_assert (POINTER_TYPE_P (aptype
));
1020 /* Balanced out the &ap, usually added by build_va_arg. */
1021 ap
= build2 (MEM_REF
, TREE_TYPE (aptype
), ap
,
1022 build_int_cst (aptype
, 0));
1024 push_gimplify_context (false);
1025 saved_location
= input_location
;
1026 input_location
= gimple_location (stmt
);
1028 /* Make it easier for the backends by protecting the valist argument
1029 from multiple evaluations. */
1030 gimplify_expr (&ap
, &pre
, &post
, is_gimple_min_lval
, fb_lvalue
);
1032 expr
= targetm
.gimplify_va_arg_expr (ap
, type
, &pre
, &post
);
1034 lhs
= gimple_call_lhs (stmt
);
1035 if (lhs
!= NULL_TREE
)
1037 unsigned int nargs
= gimple_call_num_args (stmt
);
1038 gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs
), type
));
1042 /* We've transported the size of with WITH_SIZE_EXPR here as
1043 the last argument of the internal fn call. Now reinstate
1045 tree size
= gimple_call_arg (stmt
, nargs
- 1);
1046 expr
= build2 (WITH_SIZE_EXPR
, TREE_TYPE (expr
), expr
, size
);
1049 /* We use gimplify_assign here, rather than gimple_build_assign,
1050 because gimple_assign knows how to deal with variable-sized
1052 gimplify_assign (lhs
, expr
, &pre
);
1055 gimplify_and_add (expr
, &pre
);
1057 input_location
= saved_location
;
1058 pop_gimplify_context (NULL
);
1060 gimple_seq_add_seq (&pre
, post
);
1061 update_modified_stmts (pre
);
1063 /* Add the sequence after IFN_VA_ARG. This splits the bb right
1064 after IFN_VA_ARG, and adds the sequence in one or more new bbs
1066 gimple_find_sub_bbs (pre
, &i
);
1068 /* Remove the IFN_VA_ARG gimple_call. It's the last stmt in the
1070 unlink_stmt_vdef (stmt
);
1071 release_ssa_name_fn (fun
, gimple_vdef (stmt
));
1072 gsi_remove (&i
, true);
1073 gcc_assert (gsi_end_p (i
));
1075 /* We're walking here into the bbs which contain the expansion of
1076 IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs
1077 expanding. We could try to skip walking these bbs, perhaps by
1078 walking backwards over gimples and bbs. */
1085 free_dominance_info (CDI_DOMINATORS
);
1086 update_ssa (TODO_update_ssa
);
1089 /* Expand IFN_VA_ARGs in FUN, if necessary. */
1092 expand_ifn_va_arg (function
*fun
)
1094 if ((fun
->curr_properties
& PROP_gimple_lva
) == 0)
1095 expand_ifn_va_arg_1 (fun
);
1100 gimple_stmt_iterator i
;
1101 FOR_EACH_BB_FN (bb
, fun
)
1102 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1103 gcc_assert (!gimple_call_internal_p (gsi_stmt (i
), IFN_VA_ARG
));
1109 const pass_data pass_data_stdarg
=
1111 GIMPLE_PASS
, /* type */
1112 "stdarg", /* name */
1113 OPTGROUP_NONE
, /* optinfo_flags */
1114 TV_NONE
, /* tv_id */
1115 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1116 PROP_gimple_lva
, /* properties_provided */
1117 0, /* properties_destroyed */
1118 0, /* todo_flags_start */
1119 0, /* todo_flags_finish */
1122 class pass_stdarg
: public gimple_opt_pass
1125 pass_stdarg (gcc::context
*ctxt
)
1126 : gimple_opt_pass (pass_data_stdarg
, ctxt
)
1129 /* opt_pass methods: */
1130 virtual bool gate (function
*)
1132 /* Always run this pass, in order to expand va_arg internal_fns. We
1133 also need to do that if fun->stdarg == 0, because a va_arg may also
1134 occur in a function without varargs, f.i. if when passing a va_list to
1135 another function. */
1139 virtual unsigned int execute (function
*);
1141 }; // class pass_stdarg
1144 pass_stdarg::execute (function
*fun
)
1146 /* TODO: Postpone expand_ifn_va_arg till after
1147 optimize_va_list_gpr_fpr_size. */
1148 expand_ifn_va_arg (fun
);
1151 /* This optimization is only for stdarg functions. */
1152 && fun
->stdarg
!= 0)
1153 optimize_va_list_gpr_fpr_size (fun
);
1161 make_pass_stdarg (gcc::context
*ctxt
)
1163 return new pass_stdarg (ctxt
);
1168 const pass_data pass_data_lower_vaarg
=
1170 GIMPLE_PASS
, /* type */
1171 "lower_vaarg", /* name */
1172 OPTGROUP_NONE
, /* optinfo_flags */
1173 TV_NONE
, /* tv_id */
1174 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1175 PROP_gimple_lva
, /* properties_provided */
1176 0, /* properties_destroyed */
1177 0, /* todo_flags_start */
1178 0, /* todo_flags_finish */
1181 class pass_lower_vaarg
: public gimple_opt_pass
1184 pass_lower_vaarg (gcc::context
*ctxt
)
1185 : gimple_opt_pass (pass_data_lower_vaarg
, ctxt
)
1188 /* opt_pass methods: */
1189 virtual bool gate (function
*)
1191 return (cfun
->curr_properties
& PROP_gimple_lva
) == 0;
1194 virtual unsigned int execute (function
*);
1196 }; // class pass_lower_vaarg
1199 pass_lower_vaarg::execute (function
*fun
)
1201 expand_ifn_va_arg (fun
);
1208 make_pass_lower_vaarg (gcc::context
*ctxt
)
1210 return new pass_lower_vaarg (ctxt
);