re PR lto/45375 ([meta-bug] Issues with building Mozilla (i.e. Firefox) with LTO)
[official-gcc.git] / gcc / tree-stdarg.c
blob883c692e8c68433cc4e97f6fe922438a46345ce1
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "hard-reg-set.h"
37 #include "input.h"
38 #include "function.h"
39 #include "langhooks.h"
40 #include "gimple-pretty-print.h"
41 #include "target.h"
42 #include "bitmap.h"
43 #include "predict.h"
44 #include "dominance.h"
45 #include "cfg.h"
46 #include "basic-block.h"
47 #include "tree-ssa-alias.h"
48 #include "internal-fn.h"
49 #include "gimple-expr.h"
50 #include "is-a.h"
51 #include "gimple.h"
52 #include "gimple-iterator.h"
53 #include "gimple-walk.h"
54 #include "gimple-ssa.h"
55 #include "tree-phinodes.h"
56 #include "ssa-iterators.h"
57 #include "stringpool.h"
58 #include "tree-ssanames.h"
59 #include "sbitmap.h"
60 #include "tree-pass.h"
61 #include "tree-stdarg.h"
63 /* A simple pass that attempts to optimize stdarg functions on architectures
64 that need to save register arguments to stack on entry to stdarg functions.
65 If the function doesn't use any va_start macros, no registers need to
66 be saved. If va_start macros are used, the va_list variables don't escape
67 the function, it is only necessary to save registers that will be used
68 in va_arg macros. E.g. if va_arg is only used with integral types
69 in the function, floating point registers don't need to be saved, etc. */
72 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
73 is executed at most as many times as VA_START_BB. */
75 static bool
76 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
78 vec<edge> stack = vNULL;
79 edge e;
80 edge_iterator ei;
81 sbitmap visited;
82 bool ret;
84 if (va_arg_bb == va_start_bb)
85 return true;
87 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
88 return false;
90 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
91 bitmap_clear (visited);
92 ret = true;
94 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
95 stack.safe_push (e);
97 while (! stack.is_empty ())
99 basic_block src;
101 e = stack.pop ();
102 src = e->src;
104 if (e->flags & EDGE_COMPLEX)
106 ret = false;
107 break;
110 if (src == va_start_bb)
111 continue;
113 /* va_arg_bb can be executed more times than va_start_bb. */
114 if (src == va_arg_bb)
116 ret = false;
117 break;
120 gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
122 if (! bitmap_bit_p (visited, src->index))
124 bitmap_set_bit (visited, src->index);
125 FOR_EACH_EDGE (e, ei, src->preds)
126 stack.safe_push (e);
130 stack.release ();
131 sbitmap_free (visited);
132 return ret;
136 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
137 return constant, otherwise return HOST_WIDE_INT_M1U.
138 GPR_P is true if this is GPR counter. */
140 static unsigned HOST_WIDE_INT
141 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
142 bool gpr_p)
144 tree lhs, orig_lhs;
145 gimple stmt;
146 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
147 unsigned int max_size;
149 if (si->offsets == NULL)
151 unsigned int i;
153 si->offsets = XNEWVEC (int, num_ssa_names);
154 for (i = 0; i < num_ssa_names; ++i)
155 si->offsets[i] = -1;
158 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
159 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
160 orig_lhs = lhs = rhs;
161 while (lhs)
163 enum tree_code rhs_code;
164 tree rhs1;
166 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
168 if (counter_val >= max_size)
170 ret = max_size;
171 break;
174 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
175 break;
178 stmt = SSA_NAME_DEF_STMT (lhs);
180 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
181 return HOST_WIDE_INT_M1U;
183 rhs_code = gimple_assign_rhs_code (stmt);
184 rhs1 = gimple_assign_rhs1 (stmt);
185 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
186 || gimple_assign_cast_p (stmt))
187 && TREE_CODE (rhs1) == SSA_NAME)
189 lhs = rhs1;
190 continue;
193 if ((rhs_code == POINTER_PLUS_EXPR
194 || rhs_code == PLUS_EXPR)
195 && TREE_CODE (rhs1) == SSA_NAME
196 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
198 ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
199 lhs = rhs1;
200 continue;
203 if (rhs_code == ADDR_EXPR
204 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
205 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
206 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
208 ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
209 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
210 continue;
213 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
214 return HOST_WIDE_INT_M1U;
216 rhs = gimple_assign_rhs1 (stmt);
217 if (TREE_CODE (counter) != TREE_CODE (rhs))
218 return HOST_WIDE_INT_M1U;
220 if (TREE_CODE (counter) == COMPONENT_REF)
222 if (get_base_address (counter) != get_base_address (rhs)
223 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
224 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
225 return HOST_WIDE_INT_M1U;
227 else if (counter != rhs)
228 return HOST_WIDE_INT_M1U;
230 lhs = NULL;
233 lhs = orig_lhs;
234 val = ret + counter_val;
235 while (lhs)
237 enum tree_code rhs_code;
238 tree rhs1;
240 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
241 break;
243 if (val >= max_size)
244 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
245 else
246 si->offsets[SSA_NAME_VERSION (lhs)] = val;
248 stmt = SSA_NAME_DEF_STMT (lhs);
250 rhs_code = gimple_assign_rhs_code (stmt);
251 rhs1 = gimple_assign_rhs1 (stmt);
252 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
253 || gimple_assign_cast_p (stmt))
254 && TREE_CODE (rhs1) == SSA_NAME)
256 lhs = rhs1;
257 continue;
260 if ((rhs_code == POINTER_PLUS_EXPR
261 || rhs_code == PLUS_EXPR)
262 && TREE_CODE (rhs1) == SSA_NAME
263 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
265 val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
266 lhs = rhs1;
267 continue;
270 if (rhs_code == ADDR_EXPR
271 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
272 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
273 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
275 val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
276 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
277 continue;
280 lhs = NULL;
283 return ret;
287 /* Called by walk_tree to look for references to va_list variables. */
289 static tree
290 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
291 void *data)
293 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
294 tree var = *tp;
296 if (TREE_CODE (var) == SSA_NAME)
298 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
299 return var;
301 else if (TREE_CODE (var) == VAR_DECL)
303 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
304 return var;
307 return NULL_TREE;
311 /* Helper function of va_list_counter_struct_op. Compute
312 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
313 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
314 statement. GPR_P is true if AP is a GPR counter, false if it is
315 a FPR counter. */
317 static void
318 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
319 bool write_p)
321 unsigned HOST_WIDE_INT increment;
323 if (si->compute_sizes < 0)
325 si->compute_sizes = 0;
326 if (si->va_start_count == 1
327 && reachable_at_most_once (si->bb, si->va_start_bb))
328 si->compute_sizes = 1;
330 if (dump_file && (dump_flags & TDF_DETAILS))
331 fprintf (dump_file,
332 "bb%d will %sbe executed at most once for each va_start "
333 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
334 si->va_start_bb->index);
337 if (write_p
338 && si->compute_sizes
339 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
341 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
343 cfun->va_list_gpr_size += increment;
344 return;
347 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
349 cfun->va_list_fpr_size += increment;
350 return;
354 if (write_p || !si->compute_sizes)
356 if (gpr_p)
357 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
358 else
359 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
364 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
365 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
366 is false, AP has been seen in VAR = AP assignment.
367 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
368 va_arg operation that doesn't cause the va_list variable to escape
369 current function. */
371 static bool
372 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
373 bool write_p)
375 tree base;
377 if (TREE_CODE (ap) != COMPONENT_REF
378 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
379 return false;
381 if (TREE_CODE (var) != SSA_NAME
382 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
383 return false;
385 base = get_base_address (ap);
386 if (TREE_CODE (base) != VAR_DECL
387 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
388 return false;
390 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
391 va_list_counter_op (si, ap, var, true, write_p);
392 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
393 va_list_counter_op (si, ap, var, false, write_p);
395 return true;
399 /* Check for TEM = AP. Return true if found and the caller shouldn't
400 search for va_list references in the statement. */
402 static bool
403 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
405 if (TREE_CODE (ap) != VAR_DECL
406 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
407 return false;
409 if (TREE_CODE (tem) != SSA_NAME
410 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
411 return false;
413 if (si->compute_sizes < 0)
415 si->compute_sizes = 0;
416 if (si->va_start_count == 1
417 && reachable_at_most_once (si->bb, si->va_start_bb))
418 si->compute_sizes = 1;
420 if (dump_file && (dump_flags & TDF_DETAILS))
421 fprintf (dump_file,
422 "bb%d will %sbe executed at most once for each va_start "
423 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
424 si->va_start_bb->index);
427 /* For void * or char * va_list types, there is just one counter.
428 If va_arg is used in a loop, we don't know how many registers need
429 saving. */
430 if (! si->compute_sizes)
431 return false;
433 if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
434 return false;
436 /* Note the temporary, as we need to track whether it doesn't escape
437 the current function. */
438 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
440 return true;
444 /* Check for:
445 tem1 = AP;
446 TEM2 = tem1 + CST;
447 AP = TEM2;
448 sequence and update cfun->va_list_gpr_size. Return true if found. */
450 static bool
451 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
453 unsigned HOST_WIDE_INT increment;
455 if (TREE_CODE (ap) != VAR_DECL
456 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
457 return false;
459 if (TREE_CODE (tem2) != SSA_NAME
460 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
461 return false;
463 if (si->compute_sizes <= 0)
464 return false;
466 increment = va_list_counter_bump (si, ap, tem2, true);
467 if (increment + 1 <= 1)
468 return false;
470 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
471 cfun->va_list_gpr_size += increment;
472 else
473 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
475 return true;
479 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
480 containing value of some va_list variable plus optionally some constant,
481 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
482 depending whether LHS is a function local temporary. */
484 static void
485 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
487 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
488 return;
490 if (TREE_CODE (rhs) == SSA_NAME)
492 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
493 return;
495 else if (TREE_CODE (rhs) == ADDR_EXPR
496 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
497 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
499 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
500 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
501 return;
503 else
504 return;
506 if (TREE_CODE (lhs) != SSA_NAME)
508 si->va_list_escapes = true;
509 return;
512 if (si->compute_sizes < 0)
514 si->compute_sizes = 0;
515 if (si->va_start_count == 1
516 && reachable_at_most_once (si->bb, si->va_start_bb))
517 si->compute_sizes = 1;
519 if (dump_file && (dump_flags & TDF_DETAILS))
520 fprintf (dump_file,
521 "bb%d will %sbe executed at most once for each va_start "
522 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
523 si->va_start_bb->index);
526 /* For void * or char * va_list types, there is just one counter.
527 If va_arg is used in a loop, we don't know how many registers need
528 saving. */
529 if (! si->compute_sizes)
531 si->va_list_escapes = true;
532 return;
535 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
536 == HOST_WIDE_INT_M1U)
538 si->va_list_escapes = true;
539 return;
542 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
546 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
547 Return true if va_list might be escaping. */
549 static bool
550 check_all_va_list_escapes (struct stdarg_info *si)
552 basic_block bb;
554 FOR_EACH_BB_FN (bb, cfun)
556 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
557 gsi_next (&i))
559 tree lhs;
560 use_operand_p uop;
561 ssa_op_iter soi;
562 gphi *phi = i.phi ();
564 lhs = PHI_RESULT (phi);
565 if (virtual_operand_p (lhs)
566 || bitmap_bit_p (si->va_list_escape_vars,
567 SSA_NAME_VERSION (lhs)))
568 continue;
570 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
572 tree rhs = USE_FROM_PTR (uop);
573 if (TREE_CODE (rhs) == SSA_NAME
574 && bitmap_bit_p (si->va_list_escape_vars,
575 SSA_NAME_VERSION (rhs)))
577 if (dump_file && (dump_flags & TDF_DETAILS))
579 fputs ("va_list escapes in ", dump_file);
580 print_gimple_stmt (dump_file, phi, 0, dump_flags);
581 fputc ('\n', dump_file);
583 return true;
588 for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i);
589 gsi_next (&i))
591 gimple stmt = gsi_stmt (i);
592 tree use;
593 ssa_op_iter iter;
595 if (is_gimple_debug (stmt))
596 continue;
598 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
600 if (! bitmap_bit_p (si->va_list_escape_vars,
601 SSA_NAME_VERSION (use)))
602 continue;
604 if (is_gimple_assign (stmt))
606 tree rhs = gimple_assign_rhs1 (stmt);
607 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
609 /* x = *ap_temp; */
610 if (rhs_code == MEM_REF
611 && TREE_OPERAND (rhs, 0) == use
612 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
613 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
614 && si->offsets[SSA_NAME_VERSION (use)] != -1)
616 unsigned HOST_WIDE_INT gpr_size;
617 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
619 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
620 + tree_to_shwi (TREE_OPERAND (rhs, 1))
621 + tree_to_uhwi (access_size);
622 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
623 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
624 else if (gpr_size > cfun->va_list_gpr_size)
625 cfun->va_list_gpr_size = gpr_size;
626 continue;
629 /* va_arg sequences may contain
630 other_ap_temp = ap_temp;
631 other_ap_temp = ap_temp + constant;
632 other_ap_temp = (some_type *) ap_temp;
633 ap = ap_temp;
634 statements. */
635 if (rhs == use
636 && ((rhs_code == POINTER_PLUS_EXPR
637 && (TREE_CODE (gimple_assign_rhs2 (stmt))
638 == INTEGER_CST))
639 || gimple_assign_cast_p (stmt)
640 || (get_gimple_rhs_class (rhs_code)
641 == GIMPLE_SINGLE_RHS)))
643 tree lhs = gimple_assign_lhs (stmt);
645 if (TREE_CODE (lhs) == SSA_NAME
646 && bitmap_bit_p (si->va_list_escape_vars,
647 SSA_NAME_VERSION (lhs)))
648 continue;
650 if (TREE_CODE (lhs) == VAR_DECL
651 && bitmap_bit_p (si->va_list_vars,
652 DECL_UID (lhs) + num_ssa_names))
653 continue;
655 else if (rhs_code == ADDR_EXPR
656 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
657 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
659 tree lhs = gimple_assign_lhs (stmt);
661 if (bitmap_bit_p (si->va_list_escape_vars,
662 SSA_NAME_VERSION (lhs)))
663 continue;
667 if (dump_file && (dump_flags & TDF_DETAILS))
669 fputs ("va_list escapes in ", dump_file);
670 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
671 fputc ('\n', dump_file);
673 return true;
678 return false;
682 namespace {
684 const pass_data pass_data_stdarg =
686 GIMPLE_PASS, /* type */
687 "stdarg", /* name */
688 OPTGROUP_NONE, /* optinfo_flags */
689 TV_NONE, /* tv_id */
690 ( PROP_cfg | PROP_ssa ), /* properties_required */
691 0, /* properties_provided */
692 0, /* properties_destroyed */
693 0, /* todo_flags_start */
694 0, /* todo_flags_finish */
697 class pass_stdarg : public gimple_opt_pass
699 public:
700 pass_stdarg (gcc::context *ctxt)
701 : gimple_opt_pass (pass_data_stdarg, ctxt)
704 /* opt_pass methods: */
705 virtual bool gate (function *fun)
707 /* This optimization is only for stdarg functions. */
708 return fun->stdarg != 0;
711 virtual unsigned int execute (function *);
713 }; // class pass_stdarg
715 unsigned int
716 pass_stdarg::execute (function *fun)
718 basic_block bb;
719 bool va_list_escapes = false;
720 bool va_list_simple_ptr;
721 struct stdarg_info si;
722 struct walk_stmt_info wi;
723 const char *funcname = NULL;
724 tree cfun_va_list;
726 fun->va_list_gpr_size = 0;
727 fun->va_list_fpr_size = 0;
728 memset (&si, 0, sizeof (si));
729 si.va_list_vars = BITMAP_ALLOC (NULL);
730 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
732 if (dump_file)
733 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
735 cfun_va_list = targetm.fn_abi_va_list (fun->decl);
736 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
737 && (TREE_TYPE (cfun_va_list) == void_type_node
738 || TREE_TYPE (cfun_va_list) == char_type_node);
739 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
741 FOR_EACH_BB_FN (bb, fun)
743 gimple_stmt_iterator i;
745 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
747 gimple stmt = gsi_stmt (i);
748 tree callee, ap;
750 if (!is_gimple_call (stmt))
751 continue;
753 callee = gimple_call_fndecl (stmt);
754 if (!callee
755 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
756 continue;
758 switch (DECL_FUNCTION_CODE (callee))
760 case BUILT_IN_VA_START:
761 break;
762 /* If old style builtins are used, don't optimize anything. */
763 case BUILT_IN_SAVEREGS:
764 case BUILT_IN_NEXT_ARG:
765 va_list_escapes = true;
766 continue;
767 default:
768 continue;
771 si.va_start_count++;
772 ap = gimple_call_arg (stmt, 0);
774 if (TREE_CODE (ap) != ADDR_EXPR)
776 va_list_escapes = true;
777 break;
779 ap = TREE_OPERAND (ap, 0);
780 if (TREE_CODE (ap) == ARRAY_REF)
782 if (! integer_zerop (TREE_OPERAND (ap, 1)))
784 va_list_escapes = true;
785 break;
787 ap = TREE_OPERAND (ap, 0);
789 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
790 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl))
791 || TREE_CODE (ap) != VAR_DECL)
793 va_list_escapes = true;
794 break;
797 if (is_global_var (ap))
799 va_list_escapes = true;
800 break;
803 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
805 /* VA_START_BB and VA_START_AP will be only used if there is just
806 one va_start in the function. */
807 si.va_start_bb = bb;
808 si.va_start_ap = ap;
811 if (va_list_escapes)
812 break;
815 /* If there were no va_start uses in the function, there is no need to
816 save anything. */
817 if (si.va_start_count == 0)
818 goto finish;
820 /* If some va_list arguments weren't local, we can't optimize. */
821 if (va_list_escapes)
822 goto finish;
824 /* For void * or char * va_list, something useful can be done only
825 if there is just one va_start. */
826 if (va_list_simple_ptr && si.va_start_count > 1)
828 va_list_escapes = true;
829 goto finish;
832 /* For struct * va_list, if the backend didn't tell us what the counter fields
833 are, there is nothing more we can do. */
834 if (!va_list_simple_ptr
835 && va_list_gpr_counter_field == NULL_TREE
836 && va_list_fpr_counter_field == NULL_TREE)
838 va_list_escapes = true;
839 goto finish;
842 /* For void * or char * va_list there is just one counter
843 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
844 if (va_list_simple_ptr)
845 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
847 calculate_dominance_info (CDI_DOMINATORS);
848 memset (&wi, 0, sizeof (wi));
849 wi.info = si.va_list_vars;
851 FOR_EACH_BB_FN (bb, fun)
853 si.compute_sizes = -1;
854 si.bb = bb;
856 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
857 them as assignments for the purpose of escape analysis. This is
858 not needed for non-simple va_list because virtual phis don't perform
859 any real data movement. */
860 if (va_list_simple_ptr)
862 tree lhs, rhs;
863 use_operand_p uop;
864 ssa_op_iter soi;
866 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
867 gsi_next (&i))
869 gphi *phi = i.phi ();
870 lhs = PHI_RESULT (phi);
872 if (virtual_operand_p (lhs))
873 continue;
875 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
877 rhs = USE_FROM_PTR (uop);
878 if (va_list_ptr_read (&si, rhs, lhs))
879 continue;
880 else if (va_list_ptr_write (&si, lhs, rhs))
881 continue;
882 else
883 check_va_list_escapes (&si, lhs, rhs);
885 if (si.va_list_escapes)
887 if (dump_file && (dump_flags & TDF_DETAILS))
889 fputs ("va_list escapes in ", dump_file);
890 print_gimple_stmt (dump_file, phi, 0, dump_flags);
891 fputc ('\n', dump_file);
893 va_list_escapes = true;
899 for (gimple_stmt_iterator i = gsi_start_bb (bb);
900 !gsi_end_p (i) && !va_list_escapes;
901 gsi_next (&i))
903 gimple stmt = gsi_stmt (i);
905 /* Don't look at __builtin_va_{start,end}, they are ok. */
906 if (is_gimple_call (stmt))
908 tree callee = gimple_call_fndecl (stmt);
910 if (callee
911 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
912 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
913 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
914 continue;
917 if (is_gimple_assign (stmt))
919 tree lhs = gimple_assign_lhs (stmt);
920 tree rhs = gimple_assign_rhs1 (stmt);
922 if (va_list_simple_ptr)
924 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
925 == GIMPLE_SINGLE_RHS)
927 /* Check for ap ={v} {}. */
928 if (TREE_CLOBBER_P (rhs))
929 continue;
931 /* Check for tem = ap. */
932 else if (va_list_ptr_read (&si, rhs, lhs))
933 continue;
935 /* Check for the last insn in:
936 tem1 = ap;
937 tem2 = tem1 + CST;
938 ap = tem2;
939 sequence. */
940 else if (va_list_ptr_write (&si, lhs, rhs))
941 continue;
944 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
945 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
946 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
947 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
948 == GIMPLE_SINGLE_RHS))
949 check_va_list_escapes (&si, lhs, rhs);
951 else
953 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
954 == GIMPLE_SINGLE_RHS)
956 /* Check for ap ={v} {}. */
957 if (TREE_CLOBBER_P (rhs))
958 continue;
960 /* Check for ap[0].field = temp. */
961 else if (va_list_counter_struct_op (&si, lhs, rhs, true))
962 continue;
964 /* Check for temp = ap[0].field. */
965 else if (va_list_counter_struct_op (&si, rhs, lhs,
966 false))
967 continue;
970 /* Do any architecture specific checking. */
971 if (targetm.stdarg_optimize_hook
972 && targetm.stdarg_optimize_hook (&si, stmt))
973 continue;
976 else if (is_gimple_debug (stmt))
977 continue;
979 /* All other uses of va_list are either va_copy (that is not handled
980 in this optimization), taking address of va_list variable or
981 passing va_list to other functions (in that case va_list might
982 escape the function and therefore va_start needs to set it up
983 fully), or some unexpected use of va_list. None of these should
984 happen in a gimplified VA_ARG_EXPR. */
985 if (si.va_list_escapes
986 || walk_gimple_op (stmt, find_va_list_reference, &wi))
988 if (dump_file && (dump_flags & TDF_DETAILS))
990 fputs ("va_list escapes in ", dump_file);
991 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
992 fputc ('\n', dump_file);
994 va_list_escapes = true;
998 if (va_list_escapes)
999 break;
1002 if (! va_list_escapes
1003 && va_list_simple_ptr
1004 && ! bitmap_empty_p (si.va_list_escape_vars)
1005 && check_all_va_list_escapes (&si))
1006 va_list_escapes = true;
1008 finish:
1009 if (va_list_escapes)
1011 fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
1012 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
1014 BITMAP_FREE (si.va_list_vars);
1015 BITMAP_FREE (si.va_list_escape_vars);
1016 free (si.offsets);
1017 if (dump_file)
1019 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
1020 funcname, (int) va_list_escapes);
1021 if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
1022 fputs ("all", dump_file);
1023 else
1024 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
1025 fputs (" GPR units and ", dump_file);
1026 if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
1027 fputs ("all", dump_file);
1028 else
1029 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
1030 fputs (" FPR units.\n", dump_file);
1032 return 0;
1035 } // anon namespace
1037 gimple_opt_pass *
1038 make_pass_stdarg (gcc::context *ctxt)
1040 return new pass_stdarg (ctxt);