* gcc.dg/tree-ssa/loop-1.c: Handle i?86-*-mingw* i?86-*-cygwin* as
[official-gcc/alias-decl.git] / gcc / tree-stdarg.c
blob8a275295aa4158f52f92ae3b9251745abbda35eb
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "function.h"
27 #include "langhooks.h"
28 #include "diagnostic.h"
29 #include "target.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-stdarg.h"
34 /* A simple pass that attempts to optimize stdarg functions on architectures
35 that need to save register arguments to stack on entry to stdarg functions.
36 If the function doesn't use any va_start macros, no registers need to
37 be saved. If va_start macros are used, the va_list variables don't escape
38 the function, it is only necessary to save registers that will be used
39 in va_arg macros. E.g. if va_arg is only used with integral types
40 in the function, floating point registers don't need to be saved, etc. */
43 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
44 is executed at most as many times as VA_START_BB. */
46 static bool
47 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
49 VEC (edge, heap) *stack = NULL;
50 edge e;
51 edge_iterator ei;
52 sbitmap visited;
53 bool ret;
55 if (va_arg_bb == va_start_bb)
56 return true;
58 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
59 return false;
61 visited = sbitmap_alloc (last_basic_block);
62 sbitmap_zero (visited);
63 ret = true;
65 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
66 VEC_safe_push (edge, heap, stack, e);
68 while (! VEC_empty (edge, stack))
70 basic_block src;
72 e = VEC_pop (edge, stack);
73 src = e->src;
75 if (e->flags & EDGE_COMPLEX)
77 ret = false;
78 break;
81 if (src == va_start_bb)
82 continue;
84 /* va_arg_bb can be executed more times than va_start_bb. */
85 if (src == va_arg_bb)
87 ret = false;
88 break;
91 gcc_assert (src != ENTRY_BLOCK_PTR);
93 if (! TEST_BIT (visited, src->index))
95 SET_BIT (visited, src->index);
96 FOR_EACH_EDGE (e, ei, src->preds)
97 VEC_safe_push (edge, heap, stack, e);
101 VEC_free (edge, heap, stack);
102 sbitmap_free (visited);
103 return ret;
107 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
108 return constant, otherwise return (unsigned HOST_WIDE_INT) -1.
109 GPR_P is true if this is GPR counter. */
111 static unsigned HOST_WIDE_INT
112 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
113 bool gpr_p)
115 tree stmt, lhs, orig_lhs;
116 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
117 unsigned int max_size;
119 if (si->offsets == NULL)
121 unsigned int i;
123 si->offsets = XNEWVEC (int, num_ssa_names);
124 for (i = 0; i < num_ssa_names; ++i)
125 si->offsets[i] = -1;
128 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
129 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
130 orig_lhs = lhs = rhs;
131 while (lhs)
133 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
135 if (counter_val >= max_size)
137 ret = max_size;
138 break;
141 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
142 break;
145 stmt = SSA_NAME_DEF_STMT (lhs);
147 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
148 || GIMPLE_STMT_OPERAND (stmt, 0) != lhs)
149 return (unsigned HOST_WIDE_INT) -1;
151 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
152 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
153 rhs = TREE_OPERAND (rhs, 0);
155 if (TREE_CODE (rhs) == SSA_NAME)
157 lhs = rhs;
158 continue;
161 if ((TREE_CODE (rhs) == NOP_EXPR
162 || TREE_CODE (rhs) == CONVERT_EXPR)
163 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
165 lhs = TREE_OPERAND (rhs, 0);
166 continue;
169 if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
170 || TREE_CODE (rhs) == PLUS_EXPR)
171 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
172 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST
173 && host_integerp (TREE_OPERAND (rhs, 1), 1))
175 ret += tree_low_cst (TREE_OPERAND (rhs, 1), 1);
176 lhs = TREE_OPERAND (rhs, 0);
177 continue;
180 if (TREE_CODE (counter) != TREE_CODE (rhs))
181 return (unsigned HOST_WIDE_INT) -1;
183 if (TREE_CODE (counter) == COMPONENT_REF)
185 if (get_base_address (counter) != get_base_address (rhs)
186 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
187 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
188 return (unsigned HOST_WIDE_INT) -1;
190 else if (counter != rhs)
191 return (unsigned HOST_WIDE_INT) -1;
193 lhs = NULL;
196 lhs = orig_lhs;
197 val = ret + counter_val;
198 while (lhs)
200 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
201 break;
203 if (val >= max_size)
204 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
205 else
206 si->offsets[SSA_NAME_VERSION (lhs)] = val;
208 stmt = SSA_NAME_DEF_STMT (lhs);
210 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
211 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
212 rhs = TREE_OPERAND (rhs, 0);
214 if (TREE_CODE (rhs) == SSA_NAME)
216 lhs = rhs;
217 continue;
220 if ((TREE_CODE (rhs) == NOP_EXPR
221 || TREE_CODE (rhs) == CONVERT_EXPR)
222 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
224 lhs = TREE_OPERAND (rhs, 0);
225 continue;
228 if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
229 || TREE_CODE (rhs) == PLUS_EXPR)
230 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
231 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST
232 && host_integerp (TREE_OPERAND (rhs, 1), 1))
234 val -= tree_low_cst (TREE_OPERAND (rhs, 1), 1);
235 lhs = TREE_OPERAND (rhs, 0);
236 continue;
239 lhs = NULL;
242 return ret;
246 /* Called by walk_tree to look for references to va_list variables. */
248 static tree
249 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
250 void *data)
252 bitmap va_list_vars = (bitmap) data;
253 tree var = *tp;
255 if (TREE_CODE (var) == SSA_NAME)
256 var = SSA_NAME_VAR (var);
258 if (TREE_CODE (var) == VAR_DECL
259 && bitmap_bit_p (va_list_vars, DECL_UID (var)))
260 return var;
262 return NULL_TREE;
266 /* Helper function of va_list_counter_struct_op. Compute
267 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
268 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
269 statement. GPR_P is true if AP is a GPR counter, false if it is
270 a FPR counter. */
272 static void
273 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
274 bool write_p)
276 unsigned HOST_WIDE_INT increment;
278 if (si->compute_sizes < 0)
280 si->compute_sizes = 0;
281 if (si->va_start_count == 1
282 && reachable_at_most_once (si->bb, si->va_start_bb))
283 si->compute_sizes = 1;
285 if (dump_file && (dump_flags & TDF_DETAILS))
286 fprintf (dump_file,
287 "bb%d will %sbe executed at most once for each va_start "
288 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
289 si->va_start_bb->index);
292 if (write_p
293 && si->compute_sizes
294 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
296 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
298 cfun->va_list_gpr_size += increment;
299 return;
302 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
304 cfun->va_list_fpr_size += increment;
305 return;
309 if (write_p || !si->compute_sizes)
311 if (gpr_p)
312 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
313 else
314 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
319 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
320 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
321 is false, AP has been seen in VAR = AP assignment.
322 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
323 va_arg operation that doesn't cause the va_list variable to escape
324 current function. */
326 static bool
327 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
328 bool write_p)
330 tree base;
332 if (TREE_CODE (ap) != COMPONENT_REF
333 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
334 return false;
336 if (TREE_CODE (var) != SSA_NAME
337 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (var))))
338 return false;
340 base = get_base_address (ap);
341 if (TREE_CODE (base) != VAR_DECL
342 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base)))
343 return false;
345 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
346 va_list_counter_op (si, ap, var, true, write_p);
347 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
348 va_list_counter_op (si, ap, var, false, write_p);
350 return true;
354 /* Check for TEM = AP. Return true if found and the caller shouldn't
355 search for va_list references in the statement. */
357 static bool
358 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
360 if (TREE_CODE (ap) != VAR_DECL
361 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
362 return false;
364 if (TREE_CODE (tem) != SSA_NAME
365 || bitmap_bit_p (si->va_list_vars,
366 DECL_UID (SSA_NAME_VAR (tem)))
367 || is_global_var (SSA_NAME_VAR (tem)))
368 return false;
370 if (si->compute_sizes < 0)
372 si->compute_sizes = 0;
373 if (si->va_start_count == 1
374 && reachable_at_most_once (si->bb, si->va_start_bb))
375 si->compute_sizes = 1;
377 if (dump_file && (dump_flags & TDF_DETAILS))
378 fprintf (dump_file,
379 "bb%d will %sbe executed at most once for each va_start "
380 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
381 si->va_start_bb->index);
384 /* For void * or char * va_list types, there is just one counter.
385 If va_arg is used in a loop, we don't know how many registers need
386 saving. */
387 if (! si->compute_sizes)
388 return false;
390 if (va_list_counter_bump (si, ap, tem, true) == (unsigned HOST_WIDE_INT) -1)
391 return false;
393 /* Note the temporary, as we need to track whether it doesn't escape
394 the current function. */
395 bitmap_set_bit (si->va_list_escape_vars,
396 DECL_UID (SSA_NAME_VAR (tem)));
397 return true;
401 /* Check for:
402 tem1 = AP;
403 TEM2 = tem1 + CST;
404 AP = TEM2;
405 sequence and update cfun->va_list_gpr_size. Return true if found. */
407 static bool
408 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
410 unsigned HOST_WIDE_INT increment;
412 if (TREE_CODE (ap) != VAR_DECL
413 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
414 return false;
416 if (TREE_CODE (tem2) != SSA_NAME
417 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (tem2))))
418 return false;
420 if (si->compute_sizes <= 0)
421 return false;
423 increment = va_list_counter_bump (si, ap, tem2, true);
424 if (increment + 1 <= 1)
425 return false;
427 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
428 cfun->va_list_gpr_size += increment;
429 else
430 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
432 return true;
436 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
437 containing value of some va_list variable plus optionally some constant,
438 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
439 depending whether LHS is a function local temporary. */
441 static void
442 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
444 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
445 return;
447 if (((TREE_CODE (rhs) == POINTER_PLUS_EXPR
448 || TREE_CODE (rhs) == PLUS_EXPR)
449 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
450 || TREE_CODE (rhs) == NOP_EXPR
451 || TREE_CODE (rhs) == CONVERT_EXPR)
452 rhs = TREE_OPERAND (rhs, 0);
454 if (TREE_CODE (rhs) != SSA_NAME
455 || ! bitmap_bit_p (si->va_list_escape_vars,
456 DECL_UID (SSA_NAME_VAR (rhs))))
457 return;
459 if (TREE_CODE (lhs) != SSA_NAME || is_global_var (SSA_NAME_VAR (lhs)))
461 si->va_list_escapes = true;
462 return;
465 if (si->compute_sizes < 0)
467 si->compute_sizes = 0;
468 if (si->va_start_count == 1
469 && reachable_at_most_once (si->bb, si->va_start_bb))
470 si->compute_sizes = 1;
472 if (dump_file && (dump_flags & TDF_DETAILS))
473 fprintf (dump_file,
474 "bb%d will %sbe executed at most once for each va_start "
475 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
476 si->va_start_bb->index);
479 /* For void * or char * va_list types, there is just one counter.
480 If va_arg is used in a loop, we don't know how many registers need
481 saving. */
482 if (! si->compute_sizes)
484 si->va_list_escapes = true;
485 return;
488 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
489 == (unsigned HOST_WIDE_INT) -1)
491 si->va_list_escapes = true;
492 return;
495 bitmap_set_bit (si->va_list_escape_vars,
496 DECL_UID (SSA_NAME_VAR (lhs)));
500 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
501 Return true if va_list might be escaping. */
503 static bool
504 check_all_va_list_escapes (struct stdarg_info *si)
506 basic_block bb;
508 FOR_EACH_BB (bb)
510 block_stmt_iterator i;
512 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
514 tree stmt = bsi_stmt (i), use;
515 ssa_op_iter iter;
517 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
519 if (! bitmap_bit_p (si->va_list_escape_vars,
520 DECL_UID (SSA_NAME_VAR (use))))
521 continue;
523 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
525 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
526 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
528 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
529 rhs = TREE_OPERAND (rhs, 0);
531 /* x = *ap_temp; */
532 if (TREE_CODE (rhs) == INDIRECT_REF
533 && TREE_OPERAND (rhs, 0) == use
534 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
535 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs)), 1)
536 && si->offsets[SSA_NAME_VERSION (use)] != -1)
538 unsigned HOST_WIDE_INT gpr_size;
539 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
541 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
542 + tree_low_cst (access_size, 1);
543 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
544 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
545 else if (gpr_size > cfun->va_list_gpr_size)
546 cfun->va_list_gpr_size = gpr_size;
547 continue;
550 /* va_arg sequences may contain
551 other_ap_temp = ap_temp;
552 other_ap_temp = ap_temp + constant;
553 other_ap_temp = (some_type *) ap_temp;
554 ap = ap_temp;
555 statements. */
556 if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
557 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
558 || TREE_CODE (rhs) == NOP_EXPR
559 || TREE_CODE (rhs) == CONVERT_EXPR)
560 rhs = TREE_OPERAND (rhs, 0);
562 if (rhs == use)
564 if (TREE_CODE (lhs) == SSA_NAME
565 && bitmap_bit_p (si->va_list_escape_vars,
566 DECL_UID (SSA_NAME_VAR (lhs))))
567 continue;
569 if (TREE_CODE (lhs) == VAR_DECL
570 && bitmap_bit_p (si->va_list_vars,
571 DECL_UID (lhs)))
572 continue;
576 if (dump_file && (dump_flags & TDF_DETAILS))
578 fputs ("va_list escapes in ", dump_file);
579 print_generic_expr (dump_file, stmt, dump_flags);
580 fputc ('\n', dump_file);
582 return true;
587 return false;
591 /* Return true if this optimization pass should be done.
592 It makes only sense for stdarg functions. */
594 static bool
595 gate_optimize_stdarg (void)
597 /* This optimization is only for stdarg functions. */
598 return current_function_stdarg != 0;
602 /* Entry point to the stdarg optimization pass. */
604 static unsigned int
605 execute_optimize_stdarg (void)
607 basic_block bb;
608 bool va_list_escapes = false;
609 bool va_list_simple_ptr;
610 struct stdarg_info si;
611 const char *funcname = NULL;
613 cfun->va_list_gpr_size = 0;
614 cfun->va_list_fpr_size = 0;
615 memset (&si, 0, sizeof (si));
616 si.va_list_vars = BITMAP_ALLOC (NULL);
617 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
619 if (dump_file)
620 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
622 va_list_simple_ptr = POINTER_TYPE_P (va_list_type_node)
623 && (TREE_TYPE (va_list_type_node) == void_type_node
624 || TREE_TYPE (va_list_type_node) == char_type_node);
625 gcc_assert (is_gimple_reg_type (va_list_type_node) == va_list_simple_ptr);
627 FOR_EACH_BB (bb)
629 block_stmt_iterator i;
631 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
633 tree stmt = bsi_stmt (i);
634 tree call = get_call_expr_in (stmt), callee;
635 tree ap;
637 if (!call)
638 continue;
640 callee = get_callee_fndecl (call);
641 if (!callee
642 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
643 continue;
645 switch (DECL_FUNCTION_CODE (callee))
647 case BUILT_IN_VA_START:
648 break;
649 /* If old style builtins are used, don't optimize anything. */
650 case BUILT_IN_SAVEREGS:
651 case BUILT_IN_ARGS_INFO:
652 case BUILT_IN_NEXT_ARG:
653 va_list_escapes = true;
654 continue;
655 default:
656 continue;
659 si.va_start_count++;
660 ap = CALL_EXPR_ARG (call, 0);
662 if (TREE_CODE (ap) != ADDR_EXPR)
664 va_list_escapes = true;
665 break;
667 ap = TREE_OPERAND (ap, 0);
668 if (TREE_CODE (ap) == ARRAY_REF)
670 if (! integer_zerop (TREE_OPERAND (ap, 1)))
672 va_list_escapes = true;
673 break;
675 ap = TREE_OPERAND (ap, 0);
677 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
678 != TYPE_MAIN_VARIANT (va_list_type_node)
679 || TREE_CODE (ap) != VAR_DECL)
681 va_list_escapes = true;
682 break;
685 if (is_global_var (ap))
687 va_list_escapes = true;
688 break;
691 bitmap_set_bit (si.va_list_vars, DECL_UID (ap));
693 /* VA_START_BB and VA_START_AP will be only used if there is just
694 one va_start in the function. */
695 si.va_start_bb = bb;
696 si.va_start_ap = ap;
699 if (va_list_escapes)
700 break;
703 /* If there were no va_start uses in the function, there is no need to
704 save anything. */
705 if (si.va_start_count == 0)
706 goto finish;
708 /* If some va_list arguments weren't local, we can't optimize. */
709 if (va_list_escapes)
710 goto finish;
712 /* For void * or char * va_list, something useful can be done only
713 if there is just one va_start. */
714 if (va_list_simple_ptr && si.va_start_count > 1)
716 va_list_escapes = true;
717 goto finish;
720 /* For struct * va_list, if the backend didn't tell us what the counter fields
721 are, there is nothing more we can do. */
722 if (!va_list_simple_ptr
723 && va_list_gpr_counter_field == NULL_TREE
724 && va_list_fpr_counter_field == NULL_TREE)
726 va_list_escapes = true;
727 goto finish;
730 /* For void * or char * va_list there is just one counter
731 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
732 if (va_list_simple_ptr)
733 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
735 calculate_dominance_info (CDI_DOMINATORS);
737 FOR_EACH_BB (bb)
739 block_stmt_iterator i;
741 si.compute_sizes = -1;
742 si.bb = bb;
744 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
745 them as assignments for the purpose of escape analysis. This is
746 not needed for non-simple va_list because virtual phis don't perform
747 any real data movement. */
748 if (va_list_simple_ptr)
750 tree phi, lhs, rhs;
751 use_operand_p uop;
752 ssa_op_iter soi;
754 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
756 lhs = PHI_RESULT (phi);
758 if (!is_gimple_reg (lhs))
759 continue;
761 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
763 rhs = USE_FROM_PTR (uop);
764 if (va_list_ptr_read (&si, rhs, lhs))
765 continue;
766 else if (va_list_ptr_write (&si, lhs, rhs))
767 continue;
768 else
769 check_va_list_escapes (&si, lhs, rhs);
771 if (si.va_list_escapes
772 || walk_tree (&phi, find_va_list_reference,
773 si.va_list_vars, NULL))
775 if (dump_file && (dump_flags & TDF_DETAILS))
777 fputs ("va_list escapes in ", dump_file);
778 print_generic_expr (dump_file, phi, dump_flags);
779 fputc ('\n', dump_file);
781 va_list_escapes = true;
787 for (i = bsi_start (bb);
788 !bsi_end_p (i) && !va_list_escapes;
789 bsi_next (&i))
791 tree stmt = bsi_stmt (i);
792 tree call;
794 /* Don't look at __builtin_va_{start,end}, they are ok. */
795 call = get_call_expr_in (stmt);
796 if (call)
798 tree callee = get_callee_fndecl (call);
800 if (callee
801 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
802 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
803 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
804 continue;
807 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
809 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
810 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
812 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
813 rhs = TREE_OPERAND (rhs, 0);
815 if (va_list_simple_ptr)
817 /* Check for tem = ap. */
818 if (va_list_ptr_read (&si, rhs, lhs))
819 continue;
821 /* Check for the last insn in:
822 tem1 = ap;
823 tem2 = tem1 + CST;
824 ap = tem2;
825 sequence. */
826 else if (va_list_ptr_write (&si, lhs, rhs))
827 continue;
829 else
830 check_va_list_escapes (&si, lhs, rhs);
832 else
834 /* Check for ap[0].field = temp. */
835 if (va_list_counter_struct_op (&si, lhs, rhs, true))
836 continue;
838 /* Check for temp = ap[0].field. */
839 else if (va_list_counter_struct_op (&si, rhs, lhs, false))
840 continue;
842 /* Do any architecture specific checking. */
843 else if (targetm.stdarg_optimize_hook
844 && targetm.stdarg_optimize_hook (&si, lhs, rhs))
845 continue;
849 /* All other uses of va_list are either va_copy (that is not handled
850 in this optimization), taking address of va_list variable or
851 passing va_list to other functions (in that case va_list might
852 escape the function and therefore va_start needs to set it up
853 fully), or some unexpected use of va_list. None of these should
854 happen in a gimplified VA_ARG_EXPR. */
855 if (si.va_list_escapes
856 || walk_tree (&stmt, find_va_list_reference,
857 si.va_list_vars, NULL))
859 if (dump_file && (dump_flags & TDF_DETAILS))
861 fputs ("va_list escapes in ", dump_file);
862 print_generic_expr (dump_file, stmt, dump_flags);
863 fputc ('\n', dump_file);
865 va_list_escapes = true;
869 if (va_list_escapes)
870 break;
873 if (! va_list_escapes
874 && va_list_simple_ptr
875 && ! bitmap_empty_p (si.va_list_escape_vars)
876 && check_all_va_list_escapes (&si))
877 va_list_escapes = true;
879 finish:
880 if (va_list_escapes)
882 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
883 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
885 BITMAP_FREE (si.va_list_vars);
886 BITMAP_FREE (si.va_list_escape_vars);
887 free (si.offsets);
888 if (dump_file)
890 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
891 funcname, (int) va_list_escapes);
892 if (cfun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
893 fputs ("all", dump_file);
894 else
895 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
896 fputs (" GPR units and ", dump_file);
897 if (cfun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
898 fputs ("all", dump_file);
899 else
900 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
901 fputs (" FPR units.\n", dump_file);
903 return 0;
907 struct tree_opt_pass pass_stdarg =
909 "stdarg", /* name */
910 gate_optimize_stdarg, /* gate */
911 execute_optimize_stdarg, /* execute */
912 NULL, /* sub */
913 NULL, /* next */
914 0, /* static_pass_number */
915 0, /* tv_id */
916 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
917 0, /* properties_provided */
918 0, /* properties_destroyed */
919 0, /* todo_flags_start */
920 TODO_dump_func, /* todo_flags_finish */
921 0 /* letter */