Merge -r 127928:132243 from trunk
[official-gcc.git] / gcc / tree-stdarg.c
blob8df09ec4979c2d8fc74684d2cd69f7f3554feed9
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "function.h"
27 #include "langhooks.h"
28 #include "diagnostic.h"
29 #include "target.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-stdarg.h"
34 /* A simple pass that attempts to optimize stdarg functions on architectures
35 that need to save register arguments to stack on entry to stdarg functions.
36 If the function doesn't use any va_start macros, no registers need to
37 be saved. If va_start macros are used, the va_list variables don't escape
38 the function, it is only necessary to save registers that will be used
39 in va_arg macros. E.g. if va_arg is only used with integral types
40 in the function, floating point registers don't need to be saved, etc. */
43 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
44 is executed at most as many times as VA_START_BB. */
46 static bool
47 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
49 VEC (edge, heap) *stack = NULL;
50 edge e;
51 edge_iterator ei;
52 sbitmap visited;
53 bool ret;
55 if (va_arg_bb == va_start_bb)
56 return true;
58 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
59 return false;
61 visited = sbitmap_alloc (last_basic_block);
62 sbitmap_zero (visited);
63 ret = true;
65 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
66 VEC_safe_push (edge, heap, stack, e);
68 while (! VEC_empty (edge, stack))
70 basic_block src;
72 e = VEC_pop (edge, stack);
73 src = e->src;
75 if (e->flags & EDGE_COMPLEX)
77 ret = false;
78 break;
81 if (src == va_start_bb)
82 continue;
84 /* va_arg_bb can be executed more times than va_start_bb. */
85 if (src == va_arg_bb)
87 ret = false;
88 break;
91 gcc_assert (src != ENTRY_BLOCK_PTR);
93 if (! TEST_BIT (visited, src->index))
95 SET_BIT (visited, src->index);
96 FOR_EACH_EDGE (e, ei, src->preds)
97 VEC_safe_push (edge, heap, stack, e);
101 VEC_free (edge, heap, stack);
102 sbitmap_free (visited);
103 return ret;
107 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
108 return constant, otherwise return (unsigned HOST_WIDE_INT) -1.
109 GPR_P is true if this is GPR counter. */
111 static unsigned HOST_WIDE_INT
112 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
113 bool gpr_p)
115 tree stmt, lhs, orig_lhs;
116 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
117 unsigned int max_size;
119 if (si->offsets == NULL)
121 unsigned int i;
123 si->offsets = XNEWVEC (int, num_ssa_names);
124 for (i = 0; i < num_ssa_names; ++i)
125 si->offsets[i] = -1;
128 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
129 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
130 orig_lhs = lhs = rhs;
131 while (lhs)
133 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
135 if (counter_val >= max_size)
137 ret = max_size;
138 break;
141 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
142 break;
145 stmt = SSA_NAME_DEF_STMT (lhs);
147 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
148 || GIMPLE_STMT_OPERAND (stmt, 0) != lhs)
149 return (unsigned HOST_WIDE_INT) -1;
151 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
152 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
153 rhs = TREE_OPERAND (rhs, 0);
155 if (TREE_CODE (rhs) == SSA_NAME)
157 lhs = rhs;
158 continue;
161 if ((TREE_CODE (rhs) == NOP_EXPR
162 || TREE_CODE (rhs) == CONVERT_EXPR)
163 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
165 lhs = TREE_OPERAND (rhs, 0);
166 continue;
169 if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
170 || TREE_CODE (rhs) == PLUS_EXPR)
171 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
172 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST
173 && host_integerp (TREE_OPERAND (rhs, 1), 1))
175 ret += tree_low_cst (TREE_OPERAND (rhs, 1), 1);
176 lhs = TREE_OPERAND (rhs, 0);
177 continue;
180 if (TREE_CODE (counter) != TREE_CODE (rhs))
181 return (unsigned HOST_WIDE_INT) -1;
183 if (TREE_CODE (counter) == COMPONENT_REF)
185 if (get_base_address (counter) != get_base_address (rhs)
186 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
187 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
188 return (unsigned HOST_WIDE_INT) -1;
190 else if (counter != rhs)
191 return (unsigned HOST_WIDE_INT) -1;
193 lhs = NULL;
196 lhs = orig_lhs;
197 val = ret + counter_val;
198 while (lhs)
200 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
201 break;
203 if (val >= max_size)
204 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
205 else
206 si->offsets[SSA_NAME_VERSION (lhs)] = val;
208 stmt = SSA_NAME_DEF_STMT (lhs);
210 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
211 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
212 rhs = TREE_OPERAND (rhs, 0);
214 if (TREE_CODE (rhs) == SSA_NAME)
216 lhs = rhs;
217 continue;
220 if ((TREE_CODE (rhs) == NOP_EXPR
221 || TREE_CODE (rhs) == CONVERT_EXPR)
222 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
224 lhs = TREE_OPERAND (rhs, 0);
225 continue;
228 if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
229 || TREE_CODE (rhs) == PLUS_EXPR)
230 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
231 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST
232 && host_integerp (TREE_OPERAND (rhs, 1), 1))
234 val -= tree_low_cst (TREE_OPERAND (rhs, 1), 1);
235 lhs = TREE_OPERAND (rhs, 0);
236 continue;
239 lhs = NULL;
242 return ret;
246 /* Called by walk_tree to look for references to va_list variables. */
248 static tree
249 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
250 void *data)
252 bitmap va_list_vars = (bitmap) data;
253 tree var = *tp;
255 if (TREE_CODE (var) == SSA_NAME)
256 var = SSA_NAME_VAR (var);
258 if (TREE_CODE (var) == VAR_DECL
259 && bitmap_bit_p (va_list_vars, DECL_UID (var)))
260 return var;
262 return NULL_TREE;
266 /* Helper function of va_list_counter_struct_op. Compute
267 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
268 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
269 statement. GPR_P is true if AP is a GPR counter, false if it is
270 a FPR counter. */
272 static void
273 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
274 bool write_p)
276 unsigned HOST_WIDE_INT increment;
278 if (si->compute_sizes < 0)
280 si->compute_sizes = 0;
281 if (si->va_start_count == 1
282 && reachable_at_most_once (si->bb, si->va_start_bb))
283 si->compute_sizes = 1;
285 if (dump_file && (dump_flags & TDF_DETAILS))
286 fprintf (dump_file,
287 "bb%d will %sbe executed at most once for each va_start "
288 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
289 si->va_start_bb->index);
292 if (write_p
293 && si->compute_sizes
294 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
296 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
298 cfun->va_list_gpr_size += increment;
299 return;
302 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
304 cfun->va_list_fpr_size += increment;
305 return;
309 if (write_p || !si->compute_sizes)
311 if (gpr_p)
312 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
313 else
314 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
319 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
320 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
321 is false, AP has been seen in VAR = AP assignment.
322 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
323 va_arg operation that doesn't cause the va_list variable to escape
324 current function. */
326 static bool
327 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
328 bool write_p)
330 tree base;
332 if (TREE_CODE (ap) != COMPONENT_REF
333 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
334 return false;
336 if (TREE_CODE (var) != SSA_NAME
337 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (var))))
338 return false;
340 base = get_base_address (ap);
341 if (TREE_CODE (base) != VAR_DECL
342 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base)))
343 return false;
345 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
346 va_list_counter_op (si, ap, var, true, write_p);
347 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
348 va_list_counter_op (si, ap, var, false, write_p);
350 return true;
354 /* Check for TEM = AP. Return true if found and the caller shouldn't
355 search for va_list references in the statement. */
357 static bool
358 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
360 if (TREE_CODE (ap) != VAR_DECL
361 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
362 return false;
364 if (TREE_CODE (tem) != SSA_NAME
365 || bitmap_bit_p (si->va_list_vars,
366 DECL_UID (SSA_NAME_VAR (tem)))
367 || is_global_var (SSA_NAME_VAR (tem)))
368 return false;
370 if (si->compute_sizes < 0)
372 si->compute_sizes = 0;
373 if (si->va_start_count == 1
374 && reachable_at_most_once (si->bb, si->va_start_bb))
375 si->compute_sizes = 1;
377 if (dump_file && (dump_flags & TDF_DETAILS))
378 fprintf (dump_file,
379 "bb%d will %sbe executed at most once for each va_start "
380 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
381 si->va_start_bb->index);
384 /* For void * or char * va_list types, there is just one counter.
385 If va_arg is used in a loop, we don't know how many registers need
386 saving. */
387 if (! si->compute_sizes)
388 return false;
390 if (va_list_counter_bump (si, ap, tem, true) == (unsigned HOST_WIDE_INT) -1)
391 return false;
393 /* Note the temporary, as we need to track whether it doesn't escape
394 the current function. */
395 bitmap_set_bit (si->va_list_escape_vars,
396 DECL_UID (SSA_NAME_VAR (tem)));
397 return true;
401 /* Check for:
402 tem1 = AP;
403 TEM2 = tem1 + CST;
404 AP = TEM2;
405 sequence and update cfun->va_list_gpr_size. Return true if found. */
407 static bool
408 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
410 unsigned HOST_WIDE_INT increment;
412 if (TREE_CODE (ap) != VAR_DECL
413 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
414 return false;
416 if (TREE_CODE (tem2) != SSA_NAME
417 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (tem2))))
418 return false;
420 if (si->compute_sizes <= 0)
421 return false;
423 increment = va_list_counter_bump (si, ap, tem2, true);
424 if (increment + 1 <= 1)
425 return false;
427 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
428 cfun->va_list_gpr_size += increment;
429 else
430 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
432 return true;
436 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
437 containing value of some va_list variable plus optionally some constant,
438 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
439 depending whether LHS is a function local temporary. */
441 static void
442 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
444 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
445 return;
447 if (((TREE_CODE (rhs) == POINTER_PLUS_EXPR
448 || TREE_CODE (rhs) == PLUS_EXPR)
449 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
450 || TREE_CODE (rhs) == NOP_EXPR
451 || TREE_CODE (rhs) == CONVERT_EXPR)
452 rhs = TREE_OPERAND (rhs, 0);
454 if (TREE_CODE (rhs) != SSA_NAME
455 || ! bitmap_bit_p (si->va_list_escape_vars,
456 DECL_UID (SSA_NAME_VAR (rhs))))
457 return;
459 if (TREE_CODE (lhs) != SSA_NAME || is_global_var (SSA_NAME_VAR (lhs)))
461 si->va_list_escapes = true;
462 return;
465 if (si->compute_sizes < 0)
467 si->compute_sizes = 0;
468 if (si->va_start_count == 1
469 && reachable_at_most_once (si->bb, si->va_start_bb))
470 si->compute_sizes = 1;
472 if (dump_file && (dump_flags & TDF_DETAILS))
473 fprintf (dump_file,
474 "bb%d will %sbe executed at most once for each va_start "
475 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
476 si->va_start_bb->index);
479 /* For void * or char * va_list types, there is just one counter.
480 If va_arg is used in a loop, we don't know how many registers need
481 saving. */
482 if (! si->compute_sizes)
484 si->va_list_escapes = true;
485 return;
488 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
489 == (unsigned HOST_WIDE_INT) -1)
491 si->va_list_escapes = true;
492 return;
495 bitmap_set_bit (si->va_list_escape_vars,
496 DECL_UID (SSA_NAME_VAR (lhs)));
500 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
501 Return true if va_list might be escaping. */
503 static bool
504 check_all_va_list_escapes (struct stdarg_info *si)
506 basic_block bb;
508 FOR_EACH_BB (bb)
510 block_stmt_iterator i;
512 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
514 tree stmt = bsi_stmt (i), use;
515 ssa_op_iter iter;
517 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
519 if (! bitmap_bit_p (si->va_list_escape_vars,
520 DECL_UID (SSA_NAME_VAR (use))))
521 continue;
523 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
525 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
526 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
528 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
529 rhs = TREE_OPERAND (rhs, 0);
531 /* x = *ap_temp; */
532 if (TREE_CODE (rhs) == INDIRECT_REF
533 && TREE_OPERAND (rhs, 0) == use
534 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
535 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs)), 1)
536 && si->offsets[SSA_NAME_VERSION (use)] != -1)
538 unsigned HOST_WIDE_INT gpr_size;
539 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
541 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
542 + tree_low_cst (access_size, 1);
543 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
544 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
545 else if (gpr_size > cfun->va_list_gpr_size)
546 cfun->va_list_gpr_size = gpr_size;
547 continue;
550 /* va_arg sequences may contain
551 other_ap_temp = ap_temp;
552 other_ap_temp = ap_temp + constant;
553 other_ap_temp = (some_type *) ap_temp;
554 ap = ap_temp;
555 statements. */
556 if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
557 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
558 || TREE_CODE (rhs) == NOP_EXPR
559 || TREE_CODE (rhs) == CONVERT_EXPR)
560 rhs = TREE_OPERAND (rhs, 0);
562 if (rhs == use)
564 if (TREE_CODE (lhs) == SSA_NAME
565 && bitmap_bit_p (si->va_list_escape_vars,
566 DECL_UID (SSA_NAME_VAR (lhs))))
567 continue;
569 if (TREE_CODE (lhs) == VAR_DECL
570 && bitmap_bit_p (si->va_list_vars,
571 DECL_UID (lhs)))
572 continue;
576 if (dump_file && (dump_flags & TDF_DETAILS))
578 fputs ("va_list escapes in ", dump_file);
579 print_generic_expr (dump_file, stmt, dump_flags);
580 fputc ('\n', dump_file);
582 return true;
587 return false;
591 /* Return true if this optimization pass should be done.
592 It makes only sense for stdarg functions. */
594 static bool
595 gate_optimize_stdarg (void)
597 /* This optimization is only for stdarg functions. */
598 return current_function_stdarg != 0;
602 /* Entry point to the stdarg optimization pass. */
604 static unsigned int
605 execute_optimize_stdarg (void)
607 basic_block bb;
608 bool va_list_escapes = false;
609 bool va_list_simple_ptr;
610 struct stdarg_info si;
611 const char *funcname = NULL;
613 cfun->va_list_gpr_size = 0;
614 cfun->va_list_fpr_size = 0;
615 memset (&si, 0, sizeof (si));
616 si.va_list_vars = BITMAP_ALLOC (NULL);
617 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
619 if (dump_file)
620 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
622 va_list_simple_ptr = POINTER_TYPE_P (va_list_type_node)
623 && (TREE_TYPE (va_list_type_node) == void_type_node
624 || TREE_TYPE (va_list_type_node) == char_type_node);
625 gcc_assert (is_gimple_reg_type (va_list_type_node) == va_list_simple_ptr);
627 FOR_EACH_BB (bb)
629 block_stmt_iterator i;
631 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
633 tree stmt = bsi_stmt (i);
634 tree call = get_call_expr_in (stmt), callee;
635 tree ap;
637 if (!call)
638 continue;
640 callee = get_callee_fndecl (call);
641 if (!callee
642 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
643 continue;
645 switch (DECL_FUNCTION_CODE (callee))
647 case BUILT_IN_VA_START:
648 break;
649 /* If old style builtins are used, don't optimize anything. */
650 case BUILT_IN_SAVEREGS:
651 case BUILT_IN_STDARG_START:
652 case BUILT_IN_ARGS_INFO:
653 case BUILT_IN_NEXT_ARG:
654 va_list_escapes = true;
655 continue;
656 default:
657 continue;
660 si.va_start_count++;
661 ap = CALL_EXPR_ARG (call, 0);
663 if (TREE_CODE (ap) != ADDR_EXPR)
665 va_list_escapes = true;
666 break;
668 ap = TREE_OPERAND (ap, 0);
669 if (TREE_CODE (ap) == ARRAY_REF)
671 if (! integer_zerop (TREE_OPERAND (ap, 1)))
673 va_list_escapes = true;
674 break;
676 ap = TREE_OPERAND (ap, 0);
678 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
679 != TYPE_MAIN_VARIANT (va_list_type_node)
680 || TREE_CODE (ap) != VAR_DECL)
682 va_list_escapes = true;
683 break;
686 if (is_global_var (ap))
688 va_list_escapes = true;
689 break;
692 bitmap_set_bit (si.va_list_vars, DECL_UID (ap));
694 /* VA_START_BB and VA_START_AP will be only used if there is just
695 one va_start in the function. */
696 si.va_start_bb = bb;
697 si.va_start_ap = ap;
700 if (va_list_escapes)
701 break;
704 /* If there were no va_start uses in the function, there is no need to
705 save anything. */
706 if (si.va_start_count == 0)
707 goto finish;
709 /* If some va_list arguments weren't local, we can't optimize. */
710 if (va_list_escapes)
711 goto finish;
713 /* For void * or char * va_list, something useful can be done only
714 if there is just one va_start. */
715 if (va_list_simple_ptr && si.va_start_count > 1)
717 va_list_escapes = true;
718 goto finish;
721 /* For struct * va_list, if the backend didn't tell us what the counter fields
722 are, there is nothing more we can do. */
723 if (!va_list_simple_ptr
724 && va_list_gpr_counter_field == NULL_TREE
725 && va_list_fpr_counter_field == NULL_TREE)
727 va_list_escapes = true;
728 goto finish;
731 /* For void * or char * va_list there is just one counter
732 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
733 if (va_list_simple_ptr)
734 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
736 calculate_dominance_info (CDI_DOMINATORS);
738 FOR_EACH_BB (bb)
740 block_stmt_iterator i;
742 si.compute_sizes = -1;
743 si.bb = bb;
745 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
746 them as assignments for the purpose of escape analysis. This is
747 not needed for non-simple va_list because virtual phis don't perform
748 any real data movement. */
749 if (va_list_simple_ptr)
751 tree phi, lhs, rhs;
752 use_operand_p uop;
753 ssa_op_iter soi;
755 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
757 lhs = PHI_RESULT (phi);
759 if (!is_gimple_reg (lhs))
760 continue;
762 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
764 rhs = USE_FROM_PTR (uop);
765 if (va_list_ptr_read (&si, rhs, lhs))
766 continue;
767 else if (va_list_ptr_write (&si, lhs, rhs))
768 continue;
769 else
770 check_va_list_escapes (&si, lhs, rhs);
772 if (si.va_list_escapes
773 || walk_tree (&phi, find_va_list_reference,
774 si.va_list_vars, NULL))
776 if (dump_file && (dump_flags & TDF_DETAILS))
778 fputs ("va_list escapes in ", dump_file);
779 print_generic_expr (dump_file, phi, dump_flags);
780 fputc ('\n', dump_file);
782 va_list_escapes = true;
788 for (i = bsi_start (bb);
789 !bsi_end_p (i) && !va_list_escapes;
790 bsi_next (&i))
792 tree stmt = bsi_stmt (i);
793 tree call;
795 /* Don't look at __builtin_va_{start,end}, they are ok. */
796 call = get_call_expr_in (stmt);
797 if (call)
799 tree callee = get_callee_fndecl (call);
801 if (callee
802 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
803 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
804 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
805 continue;
808 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
810 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
811 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
813 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
814 rhs = TREE_OPERAND (rhs, 0);
816 if (va_list_simple_ptr)
818 /* Check for tem = ap. */
819 if (va_list_ptr_read (&si, rhs, lhs))
820 continue;
822 /* Check for the last insn in:
823 tem1 = ap;
824 tem2 = tem1 + CST;
825 ap = tem2;
826 sequence. */
827 else if (va_list_ptr_write (&si, lhs, rhs))
828 continue;
830 else
831 check_va_list_escapes (&si, lhs, rhs);
833 else
835 /* Check for ap[0].field = temp. */
836 if (va_list_counter_struct_op (&si, lhs, rhs, true))
837 continue;
839 /* Check for temp = ap[0].field. */
840 else if (va_list_counter_struct_op (&si, rhs, lhs, false))
841 continue;
843 /* Do any architecture specific checking. */
844 else if (targetm.stdarg_optimize_hook
845 && targetm.stdarg_optimize_hook (&si, lhs, rhs))
846 continue;
850 /* All other uses of va_list are either va_copy (that is not handled
851 in this optimization), taking address of va_list variable or
852 passing va_list to other functions (in that case va_list might
853 escape the function and therefore va_start needs to set it up
854 fully), or some unexpected use of va_list. None of these should
855 happen in a gimplified VA_ARG_EXPR. */
856 if (si.va_list_escapes
857 || walk_tree (&stmt, find_va_list_reference,
858 si.va_list_vars, NULL))
860 if (dump_file && (dump_flags & TDF_DETAILS))
862 fputs ("va_list escapes in ", dump_file);
863 print_generic_expr (dump_file, stmt, dump_flags);
864 fputc ('\n', dump_file);
866 va_list_escapes = true;
870 if (va_list_escapes)
871 break;
874 if (! va_list_escapes
875 && va_list_simple_ptr
876 && ! bitmap_empty_p (si.va_list_escape_vars)
877 && check_all_va_list_escapes (&si))
878 va_list_escapes = true;
880 finish:
881 if (va_list_escapes)
883 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
884 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
886 BITMAP_FREE (si.va_list_vars);
887 BITMAP_FREE (si.va_list_escape_vars);
888 free (si.offsets);
889 if (dump_file)
891 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
892 funcname, (int) va_list_escapes);
893 if (cfun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
894 fputs ("all", dump_file);
895 else
896 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
897 fputs (" GPR units and ", dump_file);
898 if (cfun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
899 fputs ("all", dump_file);
900 else
901 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
902 fputs (" FPR units.\n", dump_file);
904 return 0;
908 struct tree_opt_pass pass_stdarg =
910 "stdarg", /* name */
911 gate_optimize_stdarg, /* gate */
912 execute_optimize_stdarg, /* execute */
913 NULL, /* sub */
914 NULL, /* next */
915 0, /* static_pass_number */
916 0, /* tv_id */
917 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
918 0, /* properties_provided */
919 0, /* properties_destroyed */
920 0, /* todo_flags_start */
921 TODO_dump_func, /* todo_flags_finish */
922 0 /* letter */