2008-07-07 Richard Guenther <rguenther@suse.de>
[official-gcc.git] / gcc / tree-stdarg.c
blobf9228872dc2c6379c240f61dad51c68b7f7cdb7f
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "function.h"
27 #include "langhooks.h"
28 #include "diagnostic.h"
29 #include "target.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-stdarg.h"
34 /* A simple pass that attempts to optimize stdarg functions on architectures
35 that need to save register arguments to stack on entry to stdarg functions.
36 If the function doesn't use any va_start macros, no registers need to
37 be saved. If va_start macros are used, the va_list variables don't escape
38 the function, it is only necessary to save registers that will be used
39 in va_arg macros. E.g. if va_arg is only used with integral types
40 in the function, floating point registers don't need to be saved, etc. */
43 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
44 is executed at most as many times as VA_START_BB. */
46 static bool
47 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
49 VEC (edge, heap) *stack = NULL;
50 edge e;
51 edge_iterator ei;
52 sbitmap visited;
53 bool ret;
55 if (va_arg_bb == va_start_bb)
56 return true;
58 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
59 return false;
61 visited = sbitmap_alloc (last_basic_block);
62 sbitmap_zero (visited);
63 ret = true;
65 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
66 VEC_safe_push (edge, heap, stack, e);
68 while (! VEC_empty (edge, stack))
70 basic_block src;
72 e = VEC_pop (edge, stack);
73 src = e->src;
75 if (e->flags & EDGE_COMPLEX)
77 ret = false;
78 break;
81 if (src == va_start_bb)
82 continue;
84 /* va_arg_bb can be executed more times than va_start_bb. */
85 if (src == va_arg_bb)
87 ret = false;
88 break;
91 gcc_assert (src != ENTRY_BLOCK_PTR);
93 if (! TEST_BIT (visited, src->index))
95 SET_BIT (visited, src->index);
96 FOR_EACH_EDGE (e, ei, src->preds)
97 VEC_safe_push (edge, heap, stack, e);
101 VEC_free (edge, heap, stack);
102 sbitmap_free (visited);
103 return ret;
107 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
108 return constant, otherwise return (unsigned HOST_WIDE_INT) -1.
109 GPR_P is true if this is GPR counter. */
111 static unsigned HOST_WIDE_INT
112 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
113 bool gpr_p)
115 tree stmt, lhs, orig_lhs;
116 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
117 unsigned int max_size;
119 if (si->offsets == NULL)
121 unsigned int i;
123 si->offsets = XNEWVEC (int, num_ssa_names);
124 for (i = 0; i < num_ssa_names; ++i)
125 si->offsets[i] = -1;
128 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
129 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
130 orig_lhs = lhs = rhs;
131 while (lhs)
133 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
135 if (counter_val >= max_size)
137 ret = max_size;
138 break;
141 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
142 break;
145 stmt = SSA_NAME_DEF_STMT (lhs);
147 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
148 || GIMPLE_STMT_OPERAND (stmt, 0) != lhs)
149 return (unsigned HOST_WIDE_INT) -1;
151 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
152 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
153 rhs = TREE_OPERAND (rhs, 0);
155 if (TREE_CODE (rhs) == SSA_NAME)
157 lhs = rhs;
158 continue;
161 if (CONVERT_EXPR_P (rhs)
162 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
164 lhs = TREE_OPERAND (rhs, 0);
165 continue;
168 if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
169 || TREE_CODE (rhs) == PLUS_EXPR)
170 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
171 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST
172 && host_integerp (TREE_OPERAND (rhs, 1), 1))
174 ret += tree_low_cst (TREE_OPERAND (rhs, 1), 1);
175 lhs = TREE_OPERAND (rhs, 0);
176 continue;
179 if (TREE_CODE (counter) != TREE_CODE (rhs))
180 return (unsigned HOST_WIDE_INT) -1;
182 if (TREE_CODE (counter) == COMPONENT_REF)
184 if (get_base_address (counter) != get_base_address (rhs)
185 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
186 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
187 return (unsigned HOST_WIDE_INT) -1;
189 else if (counter != rhs)
190 return (unsigned HOST_WIDE_INT) -1;
192 lhs = NULL;
195 lhs = orig_lhs;
196 val = ret + counter_val;
197 while (lhs)
199 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
200 break;
202 if (val >= max_size)
203 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
204 else
205 si->offsets[SSA_NAME_VERSION (lhs)] = val;
207 stmt = SSA_NAME_DEF_STMT (lhs);
209 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
210 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
211 rhs = TREE_OPERAND (rhs, 0);
213 if (TREE_CODE (rhs) == SSA_NAME)
215 lhs = rhs;
216 continue;
219 if (CONVERT_EXPR_P (rhs)
220 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
222 lhs = TREE_OPERAND (rhs, 0);
223 continue;
226 if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
227 || TREE_CODE (rhs) == PLUS_EXPR)
228 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
229 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST
230 && host_integerp (TREE_OPERAND (rhs, 1), 1))
232 val -= tree_low_cst (TREE_OPERAND (rhs, 1), 1);
233 lhs = TREE_OPERAND (rhs, 0);
234 continue;
237 lhs = NULL;
240 return ret;
244 /* Called by walk_tree to look for references to va_list variables. */
246 static tree
247 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
248 void *data)
250 bitmap va_list_vars = (bitmap) data;
251 tree var = *tp;
253 if (TREE_CODE (var) == SSA_NAME)
254 var = SSA_NAME_VAR (var);
256 if (TREE_CODE (var) == VAR_DECL
257 && bitmap_bit_p (va_list_vars, DECL_UID (var)))
258 return var;
260 return NULL_TREE;
264 /* Helper function of va_list_counter_struct_op. Compute
265 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
266 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
267 statement. GPR_P is true if AP is a GPR counter, false if it is
268 a FPR counter. */
270 static void
271 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
272 bool write_p)
274 unsigned HOST_WIDE_INT increment;
276 if (si->compute_sizes < 0)
278 si->compute_sizes = 0;
279 if (si->va_start_count == 1
280 && reachable_at_most_once (si->bb, si->va_start_bb))
281 si->compute_sizes = 1;
283 if (dump_file && (dump_flags & TDF_DETAILS))
284 fprintf (dump_file,
285 "bb%d will %sbe executed at most once for each va_start "
286 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
287 si->va_start_bb->index);
290 if (write_p
291 && si->compute_sizes
292 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
294 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
296 cfun->va_list_gpr_size += increment;
297 return;
300 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
302 cfun->va_list_fpr_size += increment;
303 return;
307 if (write_p || !si->compute_sizes)
309 if (gpr_p)
310 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
311 else
312 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
317 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
318 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
319 is false, AP has been seen in VAR = AP assignment.
320 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
321 va_arg operation that doesn't cause the va_list variable to escape
322 current function. */
324 static bool
325 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
326 bool write_p)
328 tree base;
330 if (TREE_CODE (ap) != COMPONENT_REF
331 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
332 return false;
334 if (TREE_CODE (var) != SSA_NAME
335 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (var))))
336 return false;
338 base = get_base_address (ap);
339 if (TREE_CODE (base) != VAR_DECL
340 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base)))
341 return false;
343 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
344 va_list_counter_op (si, ap, var, true, write_p);
345 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
346 va_list_counter_op (si, ap, var, false, write_p);
348 return true;
352 /* Check for TEM = AP. Return true if found and the caller shouldn't
353 search for va_list references in the statement. */
355 static bool
356 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
358 if (TREE_CODE (ap) != VAR_DECL
359 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
360 return false;
362 if (TREE_CODE (tem) != SSA_NAME
363 || bitmap_bit_p (si->va_list_vars,
364 DECL_UID (SSA_NAME_VAR (tem)))
365 || is_global_var (SSA_NAME_VAR (tem)))
366 return false;
368 if (si->compute_sizes < 0)
370 si->compute_sizes = 0;
371 if (si->va_start_count == 1
372 && reachable_at_most_once (si->bb, si->va_start_bb))
373 si->compute_sizes = 1;
375 if (dump_file && (dump_flags & TDF_DETAILS))
376 fprintf (dump_file,
377 "bb%d will %sbe executed at most once for each va_start "
378 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
379 si->va_start_bb->index);
382 /* For void * or char * va_list types, there is just one counter.
383 If va_arg is used in a loop, we don't know how many registers need
384 saving. */
385 if (! si->compute_sizes)
386 return false;
388 if (va_list_counter_bump (si, ap, tem, true) == (unsigned HOST_WIDE_INT) -1)
389 return false;
391 /* Note the temporary, as we need to track whether it doesn't escape
392 the current function. */
393 bitmap_set_bit (si->va_list_escape_vars,
394 DECL_UID (SSA_NAME_VAR (tem)));
395 return true;
399 /* Check for:
400 tem1 = AP;
401 TEM2 = tem1 + CST;
402 AP = TEM2;
403 sequence and update cfun->va_list_gpr_size. Return true if found. */
405 static bool
406 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
408 unsigned HOST_WIDE_INT increment;
410 if (TREE_CODE (ap) != VAR_DECL
411 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
412 return false;
414 if (TREE_CODE (tem2) != SSA_NAME
415 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (tem2))))
416 return false;
418 if (si->compute_sizes <= 0)
419 return false;
421 increment = va_list_counter_bump (si, ap, tem2, true);
422 if (increment + 1 <= 1)
423 return false;
425 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
426 cfun->va_list_gpr_size += increment;
427 else
428 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
430 return true;
434 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
435 containing value of some va_list variable plus optionally some constant,
436 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
437 depending whether LHS is a function local temporary. */
439 static void
440 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
442 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
443 return;
445 if (((TREE_CODE (rhs) == POINTER_PLUS_EXPR
446 || TREE_CODE (rhs) == PLUS_EXPR)
447 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
448 || CONVERT_EXPR_P (rhs))
449 rhs = TREE_OPERAND (rhs, 0);
451 if (TREE_CODE (rhs) != SSA_NAME
452 || ! bitmap_bit_p (si->va_list_escape_vars,
453 DECL_UID (SSA_NAME_VAR (rhs))))
454 return;
456 if (TREE_CODE (lhs) != SSA_NAME || is_global_var (SSA_NAME_VAR (lhs)))
458 si->va_list_escapes = true;
459 return;
462 if (si->compute_sizes < 0)
464 si->compute_sizes = 0;
465 if (si->va_start_count == 1
466 && reachable_at_most_once (si->bb, si->va_start_bb))
467 si->compute_sizes = 1;
469 if (dump_file && (dump_flags & TDF_DETAILS))
470 fprintf (dump_file,
471 "bb%d will %sbe executed at most once for each va_start "
472 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
473 si->va_start_bb->index);
476 /* For void * or char * va_list types, there is just one counter.
477 If va_arg is used in a loop, we don't know how many registers need
478 saving. */
479 if (! si->compute_sizes)
481 si->va_list_escapes = true;
482 return;
485 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
486 == (unsigned HOST_WIDE_INT) -1)
488 si->va_list_escapes = true;
489 return;
492 bitmap_set_bit (si->va_list_escape_vars,
493 DECL_UID (SSA_NAME_VAR (lhs)));
497 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
498 Return true if va_list might be escaping. */
500 static bool
501 check_all_va_list_escapes (struct stdarg_info *si)
503 basic_block bb;
505 FOR_EACH_BB (bb)
507 block_stmt_iterator i;
509 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
511 tree stmt = bsi_stmt (i), use;
512 ssa_op_iter iter;
514 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
516 if (! bitmap_bit_p (si->va_list_escape_vars,
517 DECL_UID (SSA_NAME_VAR (use))))
518 continue;
520 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
522 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
523 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
525 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
526 rhs = TREE_OPERAND (rhs, 0);
528 /* x = *ap_temp; */
529 if (TREE_CODE (rhs) == INDIRECT_REF
530 && TREE_OPERAND (rhs, 0) == use
531 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
532 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs)), 1)
533 && si->offsets[SSA_NAME_VERSION (use)] != -1)
535 unsigned HOST_WIDE_INT gpr_size;
536 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
538 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
539 + tree_low_cst (access_size, 1);
540 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
541 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
542 else if (gpr_size > cfun->va_list_gpr_size)
543 cfun->va_list_gpr_size = gpr_size;
544 continue;
547 /* va_arg sequences may contain
548 other_ap_temp = ap_temp;
549 other_ap_temp = ap_temp + constant;
550 other_ap_temp = (some_type *) ap_temp;
551 ap = ap_temp;
552 statements. */
553 if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
554 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
555 || CONVERT_EXPR_P (rhs))
556 rhs = TREE_OPERAND (rhs, 0);
558 if (rhs == use)
560 if (TREE_CODE (lhs) == SSA_NAME
561 && bitmap_bit_p (si->va_list_escape_vars,
562 DECL_UID (SSA_NAME_VAR (lhs))))
563 continue;
565 if (TREE_CODE (lhs) == VAR_DECL
566 && bitmap_bit_p (si->va_list_vars,
567 DECL_UID (lhs)))
568 continue;
572 if (dump_file && (dump_flags & TDF_DETAILS))
574 fputs ("va_list escapes in ", dump_file);
575 print_generic_expr (dump_file, stmt, dump_flags);
576 fputc ('\n', dump_file);
578 return true;
583 return false;
587 /* Return true if this optimization pass should be done.
588 It makes only sense for stdarg functions. */
590 static bool
591 gate_optimize_stdarg (void)
593 /* This optimization is only for stdarg functions. */
594 return cfun->stdarg != 0;
598 /* Entry point to the stdarg optimization pass. */
600 static unsigned int
601 execute_optimize_stdarg (void)
603 basic_block bb;
604 bool va_list_escapes = false;
605 bool va_list_simple_ptr;
606 struct stdarg_info si;
607 const char *funcname = NULL;
608 tree cfun_va_list;
610 cfun->va_list_gpr_size = 0;
611 cfun->va_list_fpr_size = 0;
612 memset (&si, 0, sizeof (si));
613 si.va_list_vars = BITMAP_ALLOC (NULL);
614 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
616 if (dump_file)
617 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
619 cfun_va_list = targetm.fn_abi_va_list (cfun->decl);
620 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
621 && (TREE_TYPE (cfun_va_list) == void_type_node
622 || TREE_TYPE (cfun_va_list) == char_type_node);
623 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
625 FOR_EACH_BB (bb)
627 block_stmt_iterator i;
629 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
631 tree stmt = bsi_stmt (i);
632 tree call = get_call_expr_in (stmt), callee;
633 tree ap;
635 if (!call)
636 continue;
638 callee = get_callee_fndecl (call);
639 if (!callee
640 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
641 continue;
643 switch (DECL_FUNCTION_CODE (callee))
645 case BUILT_IN_VA_START:
646 break;
647 /* If old style builtins are used, don't optimize anything. */
648 case BUILT_IN_SAVEREGS:
649 case BUILT_IN_ARGS_INFO:
650 case BUILT_IN_NEXT_ARG:
651 va_list_escapes = true;
652 continue;
653 default:
654 continue;
657 si.va_start_count++;
658 ap = CALL_EXPR_ARG (call, 0);
660 if (TREE_CODE (ap) != ADDR_EXPR)
662 va_list_escapes = true;
663 break;
665 ap = TREE_OPERAND (ap, 0);
666 if (TREE_CODE (ap) == ARRAY_REF)
668 if (! integer_zerop (TREE_OPERAND (ap, 1)))
670 va_list_escapes = true;
671 break;
673 ap = TREE_OPERAND (ap, 0);
675 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
676 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (cfun->decl))
677 || TREE_CODE (ap) != VAR_DECL)
679 va_list_escapes = true;
680 break;
683 if (is_global_var (ap))
685 va_list_escapes = true;
686 break;
689 bitmap_set_bit (si.va_list_vars, DECL_UID (ap));
691 /* VA_START_BB and VA_START_AP will be only used if there is just
692 one va_start in the function. */
693 si.va_start_bb = bb;
694 si.va_start_ap = ap;
697 if (va_list_escapes)
698 break;
701 /* If there were no va_start uses in the function, there is no need to
702 save anything. */
703 if (si.va_start_count == 0)
704 goto finish;
706 /* If some va_list arguments weren't local, we can't optimize. */
707 if (va_list_escapes)
708 goto finish;
710 /* For void * or char * va_list, something useful can be done only
711 if there is just one va_start. */
712 if (va_list_simple_ptr && si.va_start_count > 1)
714 va_list_escapes = true;
715 goto finish;
718 /* For struct * va_list, if the backend didn't tell us what the counter fields
719 are, there is nothing more we can do. */
720 if (!va_list_simple_ptr
721 && va_list_gpr_counter_field == NULL_TREE
722 && va_list_fpr_counter_field == NULL_TREE)
724 va_list_escapes = true;
725 goto finish;
728 /* For void * or char * va_list there is just one counter
729 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
730 if (va_list_simple_ptr)
731 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
733 calculate_dominance_info (CDI_DOMINATORS);
735 FOR_EACH_BB (bb)
737 block_stmt_iterator i;
739 si.compute_sizes = -1;
740 si.bb = bb;
742 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
743 them as assignments for the purpose of escape analysis. This is
744 not needed for non-simple va_list because virtual phis don't perform
745 any real data movement. */
746 if (va_list_simple_ptr)
748 tree phi, lhs, rhs;
749 use_operand_p uop;
750 ssa_op_iter soi;
752 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
754 lhs = PHI_RESULT (phi);
756 if (!is_gimple_reg (lhs))
757 continue;
759 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
761 rhs = USE_FROM_PTR (uop);
762 if (va_list_ptr_read (&si, rhs, lhs))
763 continue;
764 else if (va_list_ptr_write (&si, lhs, rhs))
765 continue;
766 else
767 check_va_list_escapes (&si, lhs, rhs);
769 if (si.va_list_escapes
770 || walk_tree (&phi, find_va_list_reference,
771 si.va_list_vars, NULL))
773 if (dump_file && (dump_flags & TDF_DETAILS))
775 fputs ("va_list escapes in ", dump_file);
776 print_generic_expr (dump_file, phi, dump_flags);
777 fputc ('\n', dump_file);
779 va_list_escapes = true;
785 for (i = bsi_start (bb);
786 !bsi_end_p (i) && !va_list_escapes;
787 bsi_next (&i))
789 tree stmt = bsi_stmt (i);
790 tree call;
792 /* Don't look at __builtin_va_{start,end}, they are ok. */
793 call = get_call_expr_in (stmt);
794 if (call)
796 tree callee = get_callee_fndecl (call);
798 if (callee
799 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
800 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
801 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
802 continue;
805 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
807 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
808 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
810 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
811 rhs = TREE_OPERAND (rhs, 0);
813 if (va_list_simple_ptr)
815 /* Check for tem = ap. */
816 if (va_list_ptr_read (&si, rhs, lhs))
817 continue;
819 /* Check for the last insn in:
820 tem1 = ap;
821 tem2 = tem1 + CST;
822 ap = tem2;
823 sequence. */
824 else if (va_list_ptr_write (&si, lhs, rhs))
825 continue;
827 else
828 check_va_list_escapes (&si, lhs, rhs);
830 else
832 /* Check for ap[0].field = temp. */
833 if (va_list_counter_struct_op (&si, lhs, rhs, true))
834 continue;
836 /* Check for temp = ap[0].field. */
837 else if (va_list_counter_struct_op (&si, rhs, lhs, false))
838 continue;
840 /* Do any architecture specific checking. */
841 else if (targetm.stdarg_optimize_hook
842 && targetm.stdarg_optimize_hook (&si, lhs, rhs))
843 continue;
847 /* All other uses of va_list are either va_copy (that is not handled
848 in this optimization), taking address of va_list variable or
849 passing va_list to other functions (in that case va_list might
850 escape the function and therefore va_start needs to set it up
851 fully), or some unexpected use of va_list. None of these should
852 happen in a gimplified VA_ARG_EXPR. */
853 if (si.va_list_escapes
854 || walk_tree (&stmt, find_va_list_reference,
855 si.va_list_vars, NULL))
857 if (dump_file && (dump_flags & TDF_DETAILS))
859 fputs ("va_list escapes in ", dump_file);
860 print_generic_expr (dump_file, stmt, dump_flags);
861 fputc ('\n', dump_file);
863 va_list_escapes = true;
867 if (va_list_escapes)
868 break;
871 if (! va_list_escapes
872 && va_list_simple_ptr
873 && ! bitmap_empty_p (si.va_list_escape_vars)
874 && check_all_va_list_escapes (&si))
875 va_list_escapes = true;
877 finish:
878 if (va_list_escapes)
880 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
881 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
883 BITMAP_FREE (si.va_list_vars);
884 BITMAP_FREE (si.va_list_escape_vars);
885 free (si.offsets);
886 if (dump_file)
888 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
889 funcname, (int) va_list_escapes);
890 if (cfun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
891 fputs ("all", dump_file);
892 else
893 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
894 fputs (" GPR units and ", dump_file);
895 if (cfun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
896 fputs ("all", dump_file);
897 else
898 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
899 fputs (" FPR units.\n", dump_file);
901 return 0;
905 struct gimple_opt_pass pass_stdarg =
908 GIMPLE_PASS,
909 "stdarg", /* name */
910 gate_optimize_stdarg, /* gate */
911 execute_optimize_stdarg, /* execute */
912 NULL, /* sub */
913 NULL, /* next */
914 0, /* static_pass_number */
915 0, /* tv_id */
916 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
917 0, /* properties_provided */
918 0, /* properties_destroyed */
919 0, /* todo_flags_start */
920 TODO_dump_func /* todo_flags_finish */