* config/i386/gnu-user.h (TARGET_CAN_SPLIT_STACK): Move from here ...
[official-gcc.git] / gcc / tree-stdarg.c
blob7bf3335c48ad4755b4c74ad956cd0aa4c0c4147f
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "hashtab.h"
27 #include "hash-set.h"
28 #include "vec.h"
29 #include "machmode.h"
30 #include "hard-reg-set.h"
31 #include "input.h"
32 #include "function.h"
33 #include "langhooks.h"
34 #include "gimple-pretty-print.h"
35 #include "target.h"
36 #include "bitmap.h"
37 #include "predict.h"
38 #include "dominance.h"
39 #include "cfg.h"
40 #include "basic-block.h"
41 #include "tree-ssa-alias.h"
42 #include "internal-fn.h"
43 #include "gimple-expr.h"
44 #include "is-a.h"
45 #include "gimple.h"
46 #include "gimple-iterator.h"
47 #include "gimple-walk.h"
48 #include "gimple-ssa.h"
49 #include "tree-phinodes.h"
50 #include "ssa-iterators.h"
51 #include "stringpool.h"
52 #include "tree-ssanames.h"
53 #include "sbitmap.h"
54 #include "tree-pass.h"
55 #include "tree-stdarg.h"
57 /* A simple pass that attempts to optimize stdarg functions on architectures
58 that need to save register arguments to stack on entry to stdarg functions.
59 If the function doesn't use any va_start macros, no registers need to
60 be saved. If va_start macros are used, the va_list variables don't escape
61 the function, it is only necessary to save registers that will be used
62 in va_arg macros. E.g. if va_arg is only used with integral types
63 in the function, floating point registers don't need to be saved, etc. */
66 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
67 is executed at most as many times as VA_START_BB. */
69 static bool
70 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
72 vec<edge> stack = vNULL;
73 edge e;
74 edge_iterator ei;
75 sbitmap visited;
76 bool ret;
78 if (va_arg_bb == va_start_bb)
79 return true;
81 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
82 return false;
84 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
85 bitmap_clear (visited);
86 ret = true;
88 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
89 stack.safe_push (e);
91 while (! stack.is_empty ())
93 basic_block src;
95 e = stack.pop ();
96 src = e->src;
98 if (e->flags & EDGE_COMPLEX)
100 ret = false;
101 break;
104 if (src == va_start_bb)
105 continue;
107 /* va_arg_bb can be executed more times than va_start_bb. */
108 if (src == va_arg_bb)
110 ret = false;
111 break;
114 gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
116 if (! bitmap_bit_p (visited, src->index))
118 bitmap_set_bit (visited, src->index);
119 FOR_EACH_EDGE (e, ei, src->preds)
120 stack.safe_push (e);
124 stack.release ();
125 sbitmap_free (visited);
126 return ret;
130 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
131 return constant, otherwise return HOST_WIDE_INT_M1U.
132 GPR_P is true if this is GPR counter. */
134 static unsigned HOST_WIDE_INT
135 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
136 bool gpr_p)
138 tree lhs, orig_lhs;
139 gimple stmt;
140 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
141 unsigned int max_size;
143 if (si->offsets == NULL)
145 unsigned int i;
147 si->offsets = XNEWVEC (int, num_ssa_names);
148 for (i = 0; i < num_ssa_names; ++i)
149 si->offsets[i] = -1;
152 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
153 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
154 orig_lhs = lhs = rhs;
155 while (lhs)
157 enum tree_code rhs_code;
158 tree rhs1;
160 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
162 if (counter_val >= max_size)
164 ret = max_size;
165 break;
168 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
169 break;
172 stmt = SSA_NAME_DEF_STMT (lhs);
174 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
175 return HOST_WIDE_INT_M1U;
177 rhs_code = gimple_assign_rhs_code (stmt);
178 rhs1 = gimple_assign_rhs1 (stmt);
179 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
180 || gimple_assign_cast_p (stmt))
181 && TREE_CODE (rhs1) == SSA_NAME)
183 lhs = rhs1;
184 continue;
187 if ((rhs_code == POINTER_PLUS_EXPR
188 || rhs_code == PLUS_EXPR)
189 && TREE_CODE (rhs1) == SSA_NAME
190 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
192 ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
193 lhs = rhs1;
194 continue;
197 if (rhs_code == ADDR_EXPR
198 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
199 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
200 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
202 ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
203 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
204 continue;
207 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
208 return HOST_WIDE_INT_M1U;
210 rhs = gimple_assign_rhs1 (stmt);
211 if (TREE_CODE (counter) != TREE_CODE (rhs))
212 return HOST_WIDE_INT_M1U;
214 if (TREE_CODE (counter) == COMPONENT_REF)
216 if (get_base_address (counter) != get_base_address (rhs)
217 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
218 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
219 return HOST_WIDE_INT_M1U;
221 else if (counter != rhs)
222 return HOST_WIDE_INT_M1U;
224 lhs = NULL;
227 lhs = orig_lhs;
228 val = ret + counter_val;
229 while (lhs)
231 enum tree_code rhs_code;
232 tree rhs1;
234 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
235 break;
237 if (val >= max_size)
238 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
239 else
240 si->offsets[SSA_NAME_VERSION (lhs)] = val;
242 stmt = SSA_NAME_DEF_STMT (lhs);
244 rhs_code = gimple_assign_rhs_code (stmt);
245 rhs1 = gimple_assign_rhs1 (stmt);
246 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
247 || gimple_assign_cast_p (stmt))
248 && TREE_CODE (rhs1) == SSA_NAME)
250 lhs = rhs1;
251 continue;
254 if ((rhs_code == POINTER_PLUS_EXPR
255 || rhs_code == PLUS_EXPR)
256 && TREE_CODE (rhs1) == SSA_NAME
257 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
259 val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
260 lhs = rhs1;
261 continue;
264 if (rhs_code == ADDR_EXPR
265 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
266 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
267 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
269 val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
270 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
271 continue;
274 lhs = NULL;
277 return ret;
281 /* Called by walk_tree to look for references to va_list variables. */
283 static tree
284 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
285 void *data)
287 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
288 tree var = *tp;
290 if (TREE_CODE (var) == SSA_NAME)
292 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
293 return var;
295 else if (TREE_CODE (var) == VAR_DECL)
297 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
298 return var;
301 return NULL_TREE;
305 /* Helper function of va_list_counter_struct_op. Compute
306 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
307 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
308 statement. GPR_P is true if AP is a GPR counter, false if it is
309 a FPR counter. */
311 static void
312 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
313 bool write_p)
315 unsigned HOST_WIDE_INT increment;
317 if (si->compute_sizes < 0)
319 si->compute_sizes = 0;
320 if (si->va_start_count == 1
321 && reachable_at_most_once (si->bb, si->va_start_bb))
322 si->compute_sizes = 1;
324 if (dump_file && (dump_flags & TDF_DETAILS))
325 fprintf (dump_file,
326 "bb%d will %sbe executed at most once for each va_start "
327 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
328 si->va_start_bb->index);
331 if (write_p
332 && si->compute_sizes
333 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
335 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
337 cfun->va_list_gpr_size += increment;
338 return;
341 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
343 cfun->va_list_fpr_size += increment;
344 return;
348 if (write_p || !si->compute_sizes)
350 if (gpr_p)
351 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
352 else
353 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
358 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
359 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
360 is false, AP has been seen in VAR = AP assignment.
361 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
362 va_arg operation that doesn't cause the va_list variable to escape
363 current function. */
365 static bool
366 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
367 bool write_p)
369 tree base;
371 if (TREE_CODE (ap) != COMPONENT_REF
372 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
373 return false;
375 if (TREE_CODE (var) != SSA_NAME
376 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
377 return false;
379 base = get_base_address (ap);
380 if (TREE_CODE (base) != VAR_DECL
381 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
382 return false;
384 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
385 va_list_counter_op (si, ap, var, true, write_p);
386 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
387 va_list_counter_op (si, ap, var, false, write_p);
389 return true;
393 /* Check for TEM = AP. Return true if found and the caller shouldn't
394 search for va_list references in the statement. */
396 static bool
397 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
399 if (TREE_CODE (ap) != VAR_DECL
400 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
401 return false;
403 if (TREE_CODE (tem) != SSA_NAME
404 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
405 return false;
407 if (si->compute_sizes < 0)
409 si->compute_sizes = 0;
410 if (si->va_start_count == 1
411 && reachable_at_most_once (si->bb, si->va_start_bb))
412 si->compute_sizes = 1;
414 if (dump_file && (dump_flags & TDF_DETAILS))
415 fprintf (dump_file,
416 "bb%d will %sbe executed at most once for each va_start "
417 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
418 si->va_start_bb->index);
421 /* For void * or char * va_list types, there is just one counter.
422 If va_arg is used in a loop, we don't know how many registers need
423 saving. */
424 if (! si->compute_sizes)
425 return false;
427 if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
428 return false;
430 /* Note the temporary, as we need to track whether it doesn't escape
431 the current function. */
432 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
434 return true;
438 /* Check for:
439 tem1 = AP;
440 TEM2 = tem1 + CST;
441 AP = TEM2;
442 sequence and update cfun->va_list_gpr_size. Return true if found. */
444 static bool
445 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
447 unsigned HOST_WIDE_INT increment;
449 if (TREE_CODE (ap) != VAR_DECL
450 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
451 return false;
453 if (TREE_CODE (tem2) != SSA_NAME
454 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
455 return false;
457 if (si->compute_sizes <= 0)
458 return false;
460 increment = va_list_counter_bump (si, ap, tem2, true);
461 if (increment + 1 <= 1)
462 return false;
464 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
465 cfun->va_list_gpr_size += increment;
466 else
467 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
469 return true;
473 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
474 containing value of some va_list variable plus optionally some constant,
475 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
476 depending whether LHS is a function local temporary. */
478 static void
479 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
481 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
482 return;
484 if (TREE_CODE (rhs) == SSA_NAME)
486 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
487 return;
489 else if (TREE_CODE (rhs) == ADDR_EXPR
490 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
491 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
493 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
494 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
495 return;
497 else
498 return;
500 if (TREE_CODE (lhs) != SSA_NAME)
502 si->va_list_escapes = true;
503 return;
506 if (si->compute_sizes < 0)
508 si->compute_sizes = 0;
509 if (si->va_start_count == 1
510 && reachable_at_most_once (si->bb, si->va_start_bb))
511 si->compute_sizes = 1;
513 if (dump_file && (dump_flags & TDF_DETAILS))
514 fprintf (dump_file,
515 "bb%d will %sbe executed at most once for each va_start "
516 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
517 si->va_start_bb->index);
520 /* For void * or char * va_list types, there is just one counter.
521 If va_arg is used in a loop, we don't know how many registers need
522 saving. */
523 if (! si->compute_sizes)
525 si->va_list_escapes = true;
526 return;
529 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
530 == HOST_WIDE_INT_M1U)
532 si->va_list_escapes = true;
533 return;
536 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
540 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
541 Return true if va_list might be escaping. */
543 static bool
544 check_all_va_list_escapes (struct stdarg_info *si)
546 basic_block bb;
548 FOR_EACH_BB_FN (bb, cfun)
550 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
551 gsi_next (&i))
553 tree lhs;
554 use_operand_p uop;
555 ssa_op_iter soi;
556 gphi *phi = i.phi ();
558 lhs = PHI_RESULT (phi);
559 if (virtual_operand_p (lhs)
560 || bitmap_bit_p (si->va_list_escape_vars,
561 SSA_NAME_VERSION (lhs)))
562 continue;
564 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
566 tree rhs = USE_FROM_PTR (uop);
567 if (TREE_CODE (rhs) == SSA_NAME
568 && bitmap_bit_p (si->va_list_escape_vars,
569 SSA_NAME_VERSION (rhs)))
571 if (dump_file && (dump_flags & TDF_DETAILS))
573 fputs ("va_list escapes in ", dump_file);
574 print_gimple_stmt (dump_file, phi, 0, dump_flags);
575 fputc ('\n', dump_file);
577 return true;
582 for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i);
583 gsi_next (&i))
585 gimple stmt = gsi_stmt (i);
586 tree use;
587 ssa_op_iter iter;
589 if (is_gimple_debug (stmt))
590 continue;
592 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
594 if (! bitmap_bit_p (si->va_list_escape_vars,
595 SSA_NAME_VERSION (use)))
596 continue;
598 if (is_gimple_assign (stmt))
600 tree rhs = gimple_assign_rhs1 (stmt);
601 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
603 /* x = *ap_temp; */
604 if (rhs_code == MEM_REF
605 && TREE_OPERAND (rhs, 0) == use
606 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
607 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
608 && si->offsets[SSA_NAME_VERSION (use)] != -1)
610 unsigned HOST_WIDE_INT gpr_size;
611 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
613 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
614 + tree_to_shwi (TREE_OPERAND (rhs, 1))
615 + tree_to_uhwi (access_size);
616 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
617 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
618 else if (gpr_size > cfun->va_list_gpr_size)
619 cfun->va_list_gpr_size = gpr_size;
620 continue;
623 /* va_arg sequences may contain
624 other_ap_temp = ap_temp;
625 other_ap_temp = ap_temp + constant;
626 other_ap_temp = (some_type *) ap_temp;
627 ap = ap_temp;
628 statements. */
629 if (rhs == use
630 && ((rhs_code == POINTER_PLUS_EXPR
631 && (TREE_CODE (gimple_assign_rhs2 (stmt))
632 == INTEGER_CST))
633 || gimple_assign_cast_p (stmt)
634 || (get_gimple_rhs_class (rhs_code)
635 == GIMPLE_SINGLE_RHS)))
637 tree lhs = gimple_assign_lhs (stmt);
639 if (TREE_CODE (lhs) == SSA_NAME
640 && bitmap_bit_p (si->va_list_escape_vars,
641 SSA_NAME_VERSION (lhs)))
642 continue;
644 if (TREE_CODE (lhs) == VAR_DECL
645 && bitmap_bit_p (si->va_list_vars,
646 DECL_UID (lhs) + num_ssa_names))
647 continue;
649 else if (rhs_code == ADDR_EXPR
650 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
651 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
653 tree lhs = gimple_assign_lhs (stmt);
655 if (bitmap_bit_p (si->va_list_escape_vars,
656 SSA_NAME_VERSION (lhs)))
657 continue;
661 if (dump_file && (dump_flags & TDF_DETAILS))
663 fputs ("va_list escapes in ", dump_file);
664 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
665 fputc ('\n', dump_file);
667 return true;
672 return false;
676 namespace {
678 const pass_data pass_data_stdarg =
680 GIMPLE_PASS, /* type */
681 "stdarg", /* name */
682 OPTGROUP_NONE, /* optinfo_flags */
683 TV_NONE, /* tv_id */
684 ( PROP_cfg | PROP_ssa ), /* properties_required */
685 0, /* properties_provided */
686 0, /* properties_destroyed */
687 0, /* todo_flags_start */
688 0, /* todo_flags_finish */
691 class pass_stdarg : public gimple_opt_pass
693 public:
694 pass_stdarg (gcc::context *ctxt)
695 : gimple_opt_pass (pass_data_stdarg, ctxt)
698 /* opt_pass methods: */
699 virtual bool gate (function *fun)
701 /* This optimization is only for stdarg functions. */
702 return fun->stdarg != 0;
705 virtual unsigned int execute (function *);
707 }; // class pass_stdarg
709 unsigned int
710 pass_stdarg::execute (function *fun)
712 basic_block bb;
713 bool va_list_escapes = false;
714 bool va_list_simple_ptr;
715 struct stdarg_info si;
716 struct walk_stmt_info wi;
717 const char *funcname = NULL;
718 tree cfun_va_list;
720 fun->va_list_gpr_size = 0;
721 fun->va_list_fpr_size = 0;
722 memset (&si, 0, sizeof (si));
723 si.va_list_vars = BITMAP_ALLOC (NULL);
724 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
726 if (dump_file)
727 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
729 cfun_va_list = targetm.fn_abi_va_list (fun->decl);
730 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
731 && (TREE_TYPE (cfun_va_list) == void_type_node
732 || TREE_TYPE (cfun_va_list) == char_type_node);
733 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
735 FOR_EACH_BB_FN (bb, fun)
737 gimple_stmt_iterator i;
739 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
741 gimple stmt = gsi_stmt (i);
742 tree callee, ap;
744 if (!is_gimple_call (stmt))
745 continue;
747 callee = gimple_call_fndecl (stmt);
748 if (!callee
749 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
750 continue;
752 switch (DECL_FUNCTION_CODE (callee))
754 case BUILT_IN_VA_START:
755 break;
756 /* If old style builtins are used, don't optimize anything. */
757 case BUILT_IN_SAVEREGS:
758 case BUILT_IN_NEXT_ARG:
759 va_list_escapes = true;
760 continue;
761 default:
762 continue;
765 si.va_start_count++;
766 ap = gimple_call_arg (stmt, 0);
768 if (TREE_CODE (ap) != ADDR_EXPR)
770 va_list_escapes = true;
771 break;
773 ap = TREE_OPERAND (ap, 0);
774 if (TREE_CODE (ap) == ARRAY_REF)
776 if (! integer_zerop (TREE_OPERAND (ap, 1)))
778 va_list_escapes = true;
779 break;
781 ap = TREE_OPERAND (ap, 0);
783 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
784 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl))
785 || TREE_CODE (ap) != VAR_DECL)
787 va_list_escapes = true;
788 break;
791 if (is_global_var (ap))
793 va_list_escapes = true;
794 break;
797 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
799 /* VA_START_BB and VA_START_AP will be only used if there is just
800 one va_start in the function. */
801 si.va_start_bb = bb;
802 si.va_start_ap = ap;
805 if (va_list_escapes)
806 break;
809 /* If there were no va_start uses in the function, there is no need to
810 save anything. */
811 if (si.va_start_count == 0)
812 goto finish;
814 /* If some va_list arguments weren't local, we can't optimize. */
815 if (va_list_escapes)
816 goto finish;
818 /* For void * or char * va_list, something useful can be done only
819 if there is just one va_start. */
820 if (va_list_simple_ptr && si.va_start_count > 1)
822 va_list_escapes = true;
823 goto finish;
826 /* For struct * va_list, if the backend didn't tell us what the counter fields
827 are, there is nothing more we can do. */
828 if (!va_list_simple_ptr
829 && va_list_gpr_counter_field == NULL_TREE
830 && va_list_fpr_counter_field == NULL_TREE)
832 va_list_escapes = true;
833 goto finish;
836 /* For void * or char * va_list there is just one counter
837 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
838 if (va_list_simple_ptr)
839 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
841 calculate_dominance_info (CDI_DOMINATORS);
842 memset (&wi, 0, sizeof (wi));
843 wi.info = si.va_list_vars;
845 FOR_EACH_BB_FN (bb, fun)
847 si.compute_sizes = -1;
848 si.bb = bb;
850 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
851 them as assignments for the purpose of escape analysis. This is
852 not needed for non-simple va_list because virtual phis don't perform
853 any real data movement. */
854 if (va_list_simple_ptr)
856 tree lhs, rhs;
857 use_operand_p uop;
858 ssa_op_iter soi;
860 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
861 gsi_next (&i))
863 gphi *phi = i.phi ();
864 lhs = PHI_RESULT (phi);
866 if (virtual_operand_p (lhs))
867 continue;
869 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
871 rhs = USE_FROM_PTR (uop);
872 if (va_list_ptr_read (&si, rhs, lhs))
873 continue;
874 else if (va_list_ptr_write (&si, lhs, rhs))
875 continue;
876 else
877 check_va_list_escapes (&si, lhs, rhs);
879 if (si.va_list_escapes)
881 if (dump_file && (dump_flags & TDF_DETAILS))
883 fputs ("va_list escapes in ", dump_file);
884 print_gimple_stmt (dump_file, phi, 0, dump_flags);
885 fputc ('\n', dump_file);
887 va_list_escapes = true;
893 for (gimple_stmt_iterator i = gsi_start_bb (bb);
894 !gsi_end_p (i) && !va_list_escapes;
895 gsi_next (&i))
897 gimple stmt = gsi_stmt (i);
899 /* Don't look at __builtin_va_{start,end}, they are ok. */
900 if (is_gimple_call (stmt))
902 tree callee = gimple_call_fndecl (stmt);
904 if (callee
905 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
906 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
907 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
908 continue;
911 if (is_gimple_assign (stmt))
913 tree lhs = gimple_assign_lhs (stmt);
914 tree rhs = gimple_assign_rhs1 (stmt);
916 if (va_list_simple_ptr)
918 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
919 == GIMPLE_SINGLE_RHS)
921 /* Check for ap ={v} {}. */
922 if (TREE_CLOBBER_P (rhs))
923 continue;
925 /* Check for tem = ap. */
926 else if (va_list_ptr_read (&si, rhs, lhs))
927 continue;
929 /* Check for the last insn in:
930 tem1 = ap;
931 tem2 = tem1 + CST;
932 ap = tem2;
933 sequence. */
934 else if (va_list_ptr_write (&si, lhs, rhs))
935 continue;
938 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
939 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
940 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
941 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
942 == GIMPLE_SINGLE_RHS))
943 check_va_list_escapes (&si, lhs, rhs);
945 else
947 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
948 == GIMPLE_SINGLE_RHS)
950 /* Check for ap ={v} {}. */
951 if (TREE_CLOBBER_P (rhs))
952 continue;
954 /* Check for ap[0].field = temp. */
955 else if (va_list_counter_struct_op (&si, lhs, rhs, true))
956 continue;
958 /* Check for temp = ap[0].field. */
959 else if (va_list_counter_struct_op (&si, rhs, lhs,
960 false))
961 continue;
964 /* Do any architecture specific checking. */
965 if (targetm.stdarg_optimize_hook
966 && targetm.stdarg_optimize_hook (&si, stmt))
967 continue;
970 else if (is_gimple_debug (stmt))
971 continue;
973 /* All other uses of va_list are either va_copy (that is not handled
974 in this optimization), taking address of va_list variable or
975 passing va_list to other functions (in that case va_list might
976 escape the function and therefore va_start needs to set it up
977 fully), or some unexpected use of va_list. None of these should
978 happen in a gimplified VA_ARG_EXPR. */
979 if (si.va_list_escapes
980 || walk_gimple_op (stmt, find_va_list_reference, &wi))
982 if (dump_file && (dump_flags & TDF_DETAILS))
984 fputs ("va_list escapes in ", dump_file);
985 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
986 fputc ('\n', dump_file);
988 va_list_escapes = true;
992 if (va_list_escapes)
993 break;
996 if (! va_list_escapes
997 && va_list_simple_ptr
998 && ! bitmap_empty_p (si.va_list_escape_vars)
999 && check_all_va_list_escapes (&si))
1000 va_list_escapes = true;
1002 finish:
1003 if (va_list_escapes)
1005 fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
1006 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
1008 BITMAP_FREE (si.va_list_vars);
1009 BITMAP_FREE (si.va_list_escape_vars);
1010 free (si.offsets);
1011 if (dump_file)
1013 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
1014 funcname, (int) va_list_escapes);
1015 if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
1016 fputs ("all", dump_file);
1017 else
1018 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
1019 fputs (" GPR units and ", dump_file);
1020 if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
1021 fputs ("all", dump_file);
1022 else
1023 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
1024 fputs (" FPR units.\n", dump_file);
1026 return 0;
1029 } // anon namespace
1031 gimple_opt_pass *
1032 make_pass_stdarg (gcc::context *ctxt)
1034 return new pass_stdarg (ctxt);