Gator cleanup
[official-gcc.git] / gcc / tree-stdarg.c
blob7f16092f436dd811f1fb0f827a0879e4cb323139
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jakub Jelinek <jakub@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "function.h"
28 #include "langhooks.h"
29 #include "gimple-pretty-print.h"
30 #include "target.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "tree-stdarg.h"
35 /* A simple pass that attempts to optimize stdarg functions on architectures
36 that need to save register arguments to stack on entry to stdarg functions.
37 If the function doesn't use any va_start macros, no registers need to
38 be saved. If va_start macros are used, the va_list variables don't escape
39 the function, it is only necessary to save registers that will be used
40 in va_arg macros. E.g. if va_arg is only used with integral types
41 in the function, floating point registers don't need to be saved, etc. */
44 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
45 is executed at most as many times as VA_START_BB. */
47 static bool
48 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
50 VEC (edge, heap) *stack = NULL;
51 edge e;
52 edge_iterator ei;
53 sbitmap visited;
54 bool ret;
56 if (va_arg_bb == va_start_bb)
57 return true;
59 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
60 return false;
62 visited = sbitmap_alloc (last_basic_block);
63 sbitmap_zero (visited);
64 ret = true;
66 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
67 VEC_safe_push (edge, heap, stack, e);
69 while (! VEC_empty (edge, stack))
71 basic_block src;
73 e = VEC_pop (edge, stack);
74 src = e->src;
76 if (e->flags & EDGE_COMPLEX)
78 ret = false;
79 break;
82 if (src == va_start_bb)
83 continue;
85 /* va_arg_bb can be executed more times than va_start_bb. */
86 if (src == va_arg_bb)
88 ret = false;
89 break;
92 gcc_assert (src != ENTRY_BLOCK_PTR);
94 if (! TEST_BIT (visited, src->index))
96 SET_BIT (visited, src->index);
97 FOR_EACH_EDGE (e, ei, src->preds)
98 VEC_safe_push (edge, heap, stack, e);
102 VEC_free (edge, heap, stack);
103 sbitmap_free (visited);
104 return ret;
108 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
109 return constant, otherwise return (unsigned HOST_WIDE_INT) -1.
110 GPR_P is true if this is GPR counter. */
112 static unsigned HOST_WIDE_INT
113 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
114 bool gpr_p)
116 tree lhs, orig_lhs;
117 gimple stmt;
118 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
119 unsigned int max_size;
121 if (si->offsets == NULL)
123 unsigned int i;
125 si->offsets = XNEWVEC (int, num_ssa_names);
126 for (i = 0; i < num_ssa_names; ++i)
127 si->offsets[i] = -1;
130 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
131 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
132 orig_lhs = lhs = rhs;
133 while (lhs)
135 enum tree_code rhs_code;
136 tree rhs1;
138 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
140 if (counter_val >= max_size)
142 ret = max_size;
143 break;
146 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
147 break;
150 stmt = SSA_NAME_DEF_STMT (lhs);
152 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
153 return (unsigned HOST_WIDE_INT) -1;
155 rhs_code = gimple_assign_rhs_code (stmt);
156 rhs1 = gimple_assign_rhs1 (stmt);
157 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
158 || gimple_assign_cast_p (stmt))
159 && TREE_CODE (rhs1) == SSA_NAME)
161 lhs = rhs1;
162 continue;
165 if ((rhs_code == POINTER_PLUS_EXPR
166 || rhs_code == PLUS_EXPR)
167 && TREE_CODE (rhs1) == SSA_NAME
168 && host_integerp (gimple_assign_rhs2 (stmt), 1))
170 ret += tree_low_cst (gimple_assign_rhs2 (stmt), 1);
171 lhs = rhs1;
172 continue;
175 if (rhs_code == ADDR_EXPR
176 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
177 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
178 && host_integerp (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1))
180 ret += tree_low_cst (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1);
181 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
182 continue;
185 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
186 return (unsigned HOST_WIDE_INT) -1;
188 rhs = gimple_assign_rhs1 (stmt);
189 if (TREE_CODE (counter) != TREE_CODE (rhs))
190 return (unsigned HOST_WIDE_INT) -1;
192 if (TREE_CODE (counter) == COMPONENT_REF)
194 if (get_base_address (counter) != get_base_address (rhs)
195 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
196 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
197 return (unsigned HOST_WIDE_INT) -1;
199 else if (counter != rhs)
200 return (unsigned HOST_WIDE_INT) -1;
202 lhs = NULL;
205 lhs = orig_lhs;
206 val = ret + counter_val;
207 while (lhs)
209 enum tree_code rhs_code;
210 tree rhs1;
212 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
213 break;
215 if (val >= max_size)
216 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
217 else
218 si->offsets[SSA_NAME_VERSION (lhs)] = val;
220 stmt = SSA_NAME_DEF_STMT (lhs);
222 rhs_code = gimple_assign_rhs_code (stmt);
223 rhs1 = gimple_assign_rhs1 (stmt);
224 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
225 || gimple_assign_cast_p (stmt))
226 && TREE_CODE (rhs1) == SSA_NAME)
228 lhs = rhs1;
229 continue;
232 if ((rhs_code == POINTER_PLUS_EXPR
233 || rhs_code == PLUS_EXPR)
234 && TREE_CODE (rhs1) == SSA_NAME
235 && host_integerp (gimple_assign_rhs2 (stmt), 1))
237 val -= tree_low_cst (gimple_assign_rhs2 (stmt), 1);
238 lhs = rhs1;
239 continue;
242 if (rhs_code == ADDR_EXPR
243 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
244 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
245 && host_integerp (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1))
247 val -= tree_low_cst (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1);
248 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
249 continue;
252 lhs = NULL;
255 return ret;
259 /* Called by walk_tree to look for references to va_list variables. */
261 static tree
262 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
263 void *data)
265 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
266 tree var = *tp;
268 if (TREE_CODE (var) == SSA_NAME)
269 var = SSA_NAME_VAR (var);
271 if (TREE_CODE (var) == VAR_DECL
272 && bitmap_bit_p (va_list_vars, DECL_UID (var)))
273 return var;
275 return NULL_TREE;
279 /* Helper function of va_list_counter_struct_op. Compute
280 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
281 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
282 statement. GPR_P is true if AP is a GPR counter, false if it is
283 a FPR counter. */
285 static void
286 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
287 bool write_p)
289 unsigned HOST_WIDE_INT increment;
291 if (si->compute_sizes < 0)
293 si->compute_sizes = 0;
294 if (si->va_start_count == 1
295 && reachable_at_most_once (si->bb, si->va_start_bb))
296 si->compute_sizes = 1;
298 if (dump_file && (dump_flags & TDF_DETAILS))
299 fprintf (dump_file,
300 "bb%d will %sbe executed at most once for each va_start "
301 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
302 si->va_start_bb->index);
305 if (write_p
306 && si->compute_sizes
307 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
309 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
311 cfun->va_list_gpr_size += increment;
312 return;
315 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
317 cfun->va_list_fpr_size += increment;
318 return;
322 if (write_p || !si->compute_sizes)
324 if (gpr_p)
325 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
326 else
327 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
332 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
333 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
334 is false, AP has been seen in VAR = AP assignment.
335 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
336 va_arg operation that doesn't cause the va_list variable to escape
337 current function. */
339 static bool
340 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
341 bool write_p)
343 tree base;
345 if (TREE_CODE (ap) != COMPONENT_REF
346 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
347 return false;
349 if (TREE_CODE (var) != SSA_NAME
350 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (var))))
351 return false;
353 base = get_base_address (ap);
354 if (TREE_CODE (base) != VAR_DECL
355 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base)))
356 return false;
358 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
359 va_list_counter_op (si, ap, var, true, write_p);
360 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
361 va_list_counter_op (si, ap, var, false, write_p);
363 return true;
367 /* Check for TEM = AP. Return true if found and the caller shouldn't
368 search for va_list references in the statement. */
370 static bool
371 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
373 if (TREE_CODE (ap) != VAR_DECL
374 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
375 return false;
377 if (TREE_CODE (tem) != SSA_NAME
378 || bitmap_bit_p (si->va_list_vars,
379 DECL_UID (SSA_NAME_VAR (tem)))
380 || is_global_var (SSA_NAME_VAR (tem)))
381 return false;
383 if (si->compute_sizes < 0)
385 si->compute_sizes = 0;
386 if (si->va_start_count == 1
387 && reachable_at_most_once (si->bb, si->va_start_bb))
388 si->compute_sizes = 1;
390 if (dump_file && (dump_flags & TDF_DETAILS))
391 fprintf (dump_file,
392 "bb%d will %sbe executed at most once for each va_start "
393 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
394 si->va_start_bb->index);
397 /* For void * or char * va_list types, there is just one counter.
398 If va_arg is used in a loop, we don't know how many registers need
399 saving. */
400 if (! si->compute_sizes)
401 return false;
403 if (va_list_counter_bump (si, ap, tem, true) == (unsigned HOST_WIDE_INT) -1)
404 return false;
406 /* Note the temporary, as we need to track whether it doesn't escape
407 the current function. */
408 bitmap_set_bit (si->va_list_escape_vars,
409 DECL_UID (SSA_NAME_VAR (tem)));
410 return true;
414 /* Check for:
415 tem1 = AP;
416 TEM2 = tem1 + CST;
417 AP = TEM2;
418 sequence and update cfun->va_list_gpr_size. Return true if found. */
420 static bool
421 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
423 unsigned HOST_WIDE_INT increment;
425 if (TREE_CODE (ap) != VAR_DECL
426 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
427 return false;
429 if (TREE_CODE (tem2) != SSA_NAME
430 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (tem2))))
431 return false;
433 if (si->compute_sizes <= 0)
434 return false;
436 increment = va_list_counter_bump (si, ap, tem2, true);
437 if (increment + 1 <= 1)
438 return false;
440 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
441 cfun->va_list_gpr_size += increment;
442 else
443 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
445 return true;
449 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
450 containing value of some va_list variable plus optionally some constant,
451 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
452 depending whether LHS is a function local temporary. */
454 static void
455 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
457 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
458 return;
460 if (TREE_CODE (rhs) == SSA_NAME)
462 if (! bitmap_bit_p (si->va_list_escape_vars,
463 DECL_UID (SSA_NAME_VAR (rhs))))
464 return;
466 else if (TREE_CODE (rhs) == ADDR_EXPR
467 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
468 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
470 if (! bitmap_bit_p (si->va_list_escape_vars,
471 DECL_UID (SSA_NAME_VAR (TREE_OPERAND
472 (TREE_OPERAND (rhs, 0), 0)))))
473 return;
475 else
476 return;
478 if (TREE_CODE (lhs) != SSA_NAME || is_global_var (SSA_NAME_VAR (lhs)))
480 si->va_list_escapes = true;
481 return;
484 if (si->compute_sizes < 0)
486 si->compute_sizes = 0;
487 if (si->va_start_count == 1
488 && reachable_at_most_once (si->bb, si->va_start_bb))
489 si->compute_sizes = 1;
491 if (dump_file && (dump_flags & TDF_DETAILS))
492 fprintf (dump_file,
493 "bb%d will %sbe executed at most once for each va_start "
494 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
495 si->va_start_bb->index);
498 /* For void * or char * va_list types, there is just one counter.
499 If va_arg is used in a loop, we don't know how many registers need
500 saving. */
501 if (! si->compute_sizes)
503 si->va_list_escapes = true;
504 return;
507 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
508 == (unsigned HOST_WIDE_INT) -1)
510 si->va_list_escapes = true;
511 return;
514 bitmap_set_bit (si->va_list_escape_vars,
515 DECL_UID (SSA_NAME_VAR (lhs)));
519 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
520 Return true if va_list might be escaping. */
522 static bool
523 check_all_va_list_escapes (struct stdarg_info *si)
525 basic_block bb;
527 FOR_EACH_BB (bb)
529 gimple_stmt_iterator i;
531 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
533 gimple stmt = gsi_stmt (i);
534 tree use;
535 ssa_op_iter iter;
537 if (is_gimple_debug (stmt))
538 continue;
540 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
542 if (! bitmap_bit_p (si->va_list_escape_vars,
543 DECL_UID (SSA_NAME_VAR (use))))
544 continue;
546 if (is_gimple_assign (stmt))
548 tree rhs = gimple_assign_rhs1 (stmt);
549 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
551 /* x = *ap_temp; */
552 if (rhs_code == MEM_REF
553 && TREE_OPERAND (rhs, 0) == use
554 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
555 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs)), 1)
556 && si->offsets[SSA_NAME_VERSION (use)] != -1)
558 unsigned HOST_WIDE_INT gpr_size;
559 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
561 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
562 + tree_low_cst (TREE_OPERAND (rhs, 1), 0)
563 + tree_low_cst (access_size, 1);
564 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
565 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
566 else if (gpr_size > cfun->va_list_gpr_size)
567 cfun->va_list_gpr_size = gpr_size;
568 continue;
571 /* va_arg sequences may contain
572 other_ap_temp = ap_temp;
573 other_ap_temp = ap_temp + constant;
574 other_ap_temp = (some_type *) ap_temp;
575 ap = ap_temp;
576 statements. */
577 if (rhs == use
578 && ((rhs_code == POINTER_PLUS_EXPR
579 && (TREE_CODE (gimple_assign_rhs2 (stmt))
580 == INTEGER_CST))
581 || gimple_assign_cast_p (stmt)
582 || (get_gimple_rhs_class (rhs_code)
583 == GIMPLE_SINGLE_RHS)))
585 tree lhs = gimple_assign_lhs (stmt);
587 if (TREE_CODE (lhs) == SSA_NAME
588 && bitmap_bit_p (si->va_list_escape_vars,
589 DECL_UID (SSA_NAME_VAR (lhs))))
590 continue;
592 if (TREE_CODE (lhs) == VAR_DECL
593 && bitmap_bit_p (si->va_list_vars,
594 DECL_UID (lhs)))
595 continue;
597 else if (rhs_code == ADDR_EXPR
598 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
599 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
601 tree lhs = gimple_assign_lhs (stmt);
603 if (bitmap_bit_p (si->va_list_escape_vars,
604 DECL_UID (SSA_NAME_VAR (lhs))))
605 continue;
609 if (dump_file && (dump_flags & TDF_DETAILS))
611 fputs ("va_list escapes in ", dump_file);
612 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
613 fputc ('\n', dump_file);
615 return true;
620 return false;
624 /* Return true if this optimization pass should be done.
625 It makes only sense for stdarg functions. */
627 static bool
628 gate_optimize_stdarg (void)
630 return true;
634 /* Entry point to the stdarg optimization pass. */
636 static unsigned int
637 execute_optimize_stdarg (void)
639 basic_block bb;
640 bool va_list_escapes = false;
641 bool va_list_simple_ptr;
642 struct stdarg_info si;
643 struct walk_stmt_info wi;
644 const char *funcname = NULL;
645 tree cfun_va_list;
647 /* This optimization is only for stdarg functions. */
648 if (cfun->stdarg == 0)
649 return 0;
651 cfun->va_list_gpr_size = 0;
652 cfun->va_list_fpr_size = 0;
653 memset (&si, 0, sizeof (si));
654 si.va_list_vars = BITMAP_ALLOC (NULL);
655 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
657 if (dump_file)
658 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
660 cfun_va_list = targetm.fn_abi_va_list (cfun->decl);
661 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
662 && (TREE_TYPE (cfun_va_list) == void_type_node
663 || TREE_TYPE (cfun_va_list) == char_type_node);
664 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
666 FOR_EACH_BB (bb)
668 gimple_stmt_iterator i;
670 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
672 gimple stmt = gsi_stmt (i);
673 tree callee, ap;
675 if (!is_gimple_call (stmt))
676 continue;
678 callee = gimple_call_fndecl (stmt);
679 if (!callee
680 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
681 continue;
683 switch (DECL_FUNCTION_CODE (callee))
685 case BUILT_IN_VA_START:
686 break;
687 /* If old style builtins are used, don't optimize anything. */
688 case BUILT_IN_SAVEREGS:
689 case BUILT_IN_NEXT_ARG:
690 va_list_escapes = true;
691 continue;
692 default:
693 continue;
696 si.va_start_count++;
697 ap = gimple_call_arg (stmt, 0);
699 if (TREE_CODE (ap) != ADDR_EXPR)
701 va_list_escapes = true;
702 break;
704 ap = TREE_OPERAND (ap, 0);
705 if (TREE_CODE (ap) == ARRAY_REF)
707 if (! integer_zerop (TREE_OPERAND (ap, 1)))
709 va_list_escapes = true;
710 break;
712 ap = TREE_OPERAND (ap, 0);
714 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
715 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (cfun->decl))
716 || TREE_CODE (ap) != VAR_DECL)
718 va_list_escapes = true;
719 break;
722 if (is_global_var (ap))
724 va_list_escapes = true;
725 break;
728 bitmap_set_bit (si.va_list_vars, DECL_UID (ap));
730 /* VA_START_BB and VA_START_AP will be only used if there is just
731 one va_start in the function. */
732 si.va_start_bb = bb;
733 si.va_start_ap = ap;
736 if (va_list_escapes)
737 break;
740 /* If there were no va_start uses in the function, there is no need to
741 save anything. */
742 if (si.va_start_count == 0)
743 goto finish;
745 /* If some va_list arguments weren't local, we can't optimize. */
746 if (va_list_escapes)
747 goto finish;
749 /* For void * or char * va_list, something useful can be done only
750 if there is just one va_start. */
751 if (va_list_simple_ptr && si.va_start_count > 1)
753 va_list_escapes = true;
754 goto finish;
757 /* For struct * va_list, if the backend didn't tell us what the counter fields
758 are, there is nothing more we can do. */
759 if (!va_list_simple_ptr
760 && va_list_gpr_counter_field == NULL_TREE
761 && va_list_fpr_counter_field == NULL_TREE)
763 va_list_escapes = true;
764 goto finish;
767 /* For void * or char * va_list there is just one counter
768 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
769 if (va_list_simple_ptr)
770 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
772 calculate_dominance_info (CDI_DOMINATORS);
773 memset (&wi, 0, sizeof (wi));
774 wi.info = si.va_list_vars;
776 FOR_EACH_BB (bb)
778 gimple_stmt_iterator i;
780 si.compute_sizes = -1;
781 si.bb = bb;
783 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
784 them as assignments for the purpose of escape analysis. This is
785 not needed for non-simple va_list because virtual phis don't perform
786 any real data movement. */
787 if (va_list_simple_ptr)
789 tree lhs, rhs;
790 use_operand_p uop;
791 ssa_op_iter soi;
793 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
795 gimple phi = gsi_stmt (i);
796 lhs = PHI_RESULT (phi);
798 if (!is_gimple_reg (lhs))
799 continue;
801 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
803 rhs = USE_FROM_PTR (uop);
804 if (va_list_ptr_read (&si, rhs, lhs))
805 continue;
806 else if (va_list_ptr_write (&si, lhs, rhs))
807 continue;
808 else
809 check_va_list_escapes (&si, lhs, rhs);
811 if (si.va_list_escapes)
813 if (dump_file && (dump_flags & TDF_DETAILS))
815 fputs ("va_list escapes in ", dump_file);
816 print_gimple_stmt (dump_file, phi, 0, dump_flags);
817 fputc ('\n', dump_file);
819 va_list_escapes = true;
825 for (i = gsi_start_bb (bb);
826 !gsi_end_p (i) && !va_list_escapes;
827 gsi_next (&i))
829 gimple stmt = gsi_stmt (i);
831 /* Don't look at __builtin_va_{start,end}, they are ok. */
832 if (is_gimple_call (stmt))
834 tree callee = gimple_call_fndecl (stmt);
836 if (callee
837 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
838 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
839 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
840 continue;
843 if (is_gimple_assign (stmt))
845 tree lhs = gimple_assign_lhs (stmt);
846 tree rhs = gimple_assign_rhs1 (stmt);
848 if (va_list_simple_ptr)
850 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
851 == GIMPLE_SINGLE_RHS)
853 /* Check for tem = ap. */
854 if (va_list_ptr_read (&si, rhs, lhs))
855 continue;
857 /* Check for the last insn in:
858 tem1 = ap;
859 tem2 = tem1 + CST;
860 ap = tem2;
861 sequence. */
862 else if (va_list_ptr_write (&si, lhs, rhs))
863 continue;
866 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
867 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
868 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
869 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
870 == GIMPLE_SINGLE_RHS))
871 check_va_list_escapes (&si, lhs, rhs);
873 else
875 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
876 == GIMPLE_SINGLE_RHS)
878 /* Check for ap[0].field = temp. */
879 if (va_list_counter_struct_op (&si, lhs, rhs, true))
880 continue;
882 /* Check for temp = ap[0].field. */
883 else if (va_list_counter_struct_op (&si, rhs, lhs,
884 false))
885 continue;
888 /* Do any architecture specific checking. */
889 if (targetm.stdarg_optimize_hook
890 && targetm.stdarg_optimize_hook (&si, stmt))
891 continue;
894 else if (is_gimple_debug (stmt))
895 continue;
897 /* All other uses of va_list are either va_copy (that is not handled
898 in this optimization), taking address of va_list variable or
899 passing va_list to other functions (in that case va_list might
900 escape the function and therefore va_start needs to set it up
901 fully), or some unexpected use of va_list. None of these should
902 happen in a gimplified VA_ARG_EXPR. */
903 if (si.va_list_escapes
904 || walk_gimple_op (stmt, find_va_list_reference, &wi))
906 if (dump_file && (dump_flags & TDF_DETAILS))
908 fputs ("va_list escapes in ", dump_file);
909 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
910 fputc ('\n', dump_file);
912 va_list_escapes = true;
916 if (va_list_escapes)
917 break;
920 if (! va_list_escapes
921 && va_list_simple_ptr
922 && ! bitmap_empty_p (si.va_list_escape_vars)
923 && check_all_va_list_escapes (&si))
924 va_list_escapes = true;
926 finish:
927 if (va_list_escapes)
929 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
930 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
932 BITMAP_FREE (si.va_list_vars);
933 BITMAP_FREE (si.va_list_escape_vars);
934 free (si.offsets);
935 if (dump_file)
937 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
938 funcname, (int) va_list_escapes);
939 if (cfun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
940 fputs ("all", dump_file);
941 else
942 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
943 fputs (" GPR units and ", dump_file);
944 if (cfun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
945 fputs ("all", dump_file);
946 else
947 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
948 fputs (" FPR units.\n", dump_file);
950 return 0;
954 struct gimple_opt_pass pass_stdarg =
957 GIMPLE_PASS,
958 "stdarg", /* name */
959 gate_optimize_stdarg, /* gate */
960 execute_optimize_stdarg, /* execute */
961 NULL, /* sub */
962 NULL, /* next */
963 0, /* static_pass_number */
964 TV_NONE, /* tv_id */
965 PROP_cfg | PROP_ssa, /* properties_required */
966 0, /* properties_provided */
967 0, /* properties_destroyed */
968 0, /* todo_flags_start */
969 TODO_dump_func /* todo_flags_finish */