2011-08-19 Andrew Stubbs <ams@codesourcery.com>
[official-gcc.git] / gcc / tree-stdarg.c
blob3e890163d01dc964297fdfab5fef9ce1b4c27dea
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jakub Jelinek <jakub@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "function.h"
28 #include "langhooks.h"
29 #include "gimple-pretty-print.h"
30 #include "target.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "tree-stdarg.h"
35 /* A simple pass that attempts to optimize stdarg functions on architectures
36 that need to save register arguments to stack on entry to stdarg functions.
37 If the function doesn't use any va_start macros, no registers need to
38 be saved. If va_start macros are used, the va_list variables don't escape
39 the function, it is only necessary to save registers that will be used
40 in va_arg macros. E.g. if va_arg is only used with integral types
41 in the function, floating point registers don't need to be saved, etc. */
44 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
45 is executed at most as many times as VA_START_BB. */
47 static bool
48 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
50 VEC (edge, heap) *stack = NULL;
51 edge e;
52 edge_iterator ei;
53 sbitmap visited;
54 bool ret;
56 if (va_arg_bb == va_start_bb)
57 return true;
59 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
60 return false;
62 visited = sbitmap_alloc (last_basic_block);
63 sbitmap_zero (visited);
64 ret = true;
66 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
67 VEC_safe_push (edge, heap, stack, e);
69 while (! VEC_empty (edge, stack))
71 basic_block src;
73 e = VEC_pop (edge, stack);
74 src = e->src;
76 if (e->flags & EDGE_COMPLEX)
78 ret = false;
79 break;
82 if (src == va_start_bb)
83 continue;
85 /* va_arg_bb can be executed more times than va_start_bb. */
86 if (src == va_arg_bb)
88 ret = false;
89 break;
92 gcc_assert (src != ENTRY_BLOCK_PTR);
94 if (! TEST_BIT (visited, src->index))
96 SET_BIT (visited, src->index);
97 FOR_EACH_EDGE (e, ei, src->preds)
98 VEC_safe_push (edge, heap, stack, e);
102 VEC_free (edge, heap, stack);
103 sbitmap_free (visited);
104 return ret;
108 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
109 return constant, otherwise return (unsigned HOST_WIDE_INT) -1.
110 GPR_P is true if this is GPR counter. */
112 static unsigned HOST_WIDE_INT
113 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
114 bool gpr_p)
116 tree lhs, orig_lhs;
117 gimple stmt;
118 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
119 unsigned int max_size;
121 if (si->offsets == NULL)
123 unsigned int i;
125 si->offsets = XNEWVEC (int, num_ssa_names);
126 for (i = 0; i < num_ssa_names; ++i)
127 si->offsets[i] = -1;
130 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
131 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
132 orig_lhs = lhs = rhs;
133 while (lhs)
135 enum tree_code rhs_code;
136 tree rhs1;
138 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
140 if (counter_val >= max_size)
142 ret = max_size;
143 break;
146 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
147 break;
150 stmt = SSA_NAME_DEF_STMT (lhs);
152 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
153 return (unsigned HOST_WIDE_INT) -1;
155 rhs_code = gimple_assign_rhs_code (stmt);
156 rhs1 = gimple_assign_rhs1 (stmt);
157 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
158 || gimple_assign_cast_p (stmt))
159 && TREE_CODE (rhs1) == SSA_NAME)
161 lhs = rhs1;
162 continue;
165 if ((rhs_code == POINTER_PLUS_EXPR
166 || rhs_code == PLUS_EXPR)
167 && TREE_CODE (rhs1) == SSA_NAME
168 && host_integerp (gimple_assign_rhs2 (stmt), 1))
170 ret += tree_low_cst (gimple_assign_rhs2 (stmt), 1);
171 lhs = rhs1;
172 continue;
175 if (rhs_code == ADDR_EXPR
176 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
177 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
178 && host_integerp (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1))
180 ret += tree_low_cst (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1);
181 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
182 continue;
185 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
186 return (unsigned HOST_WIDE_INT) -1;
188 rhs = gimple_assign_rhs1 (stmt);
189 if (TREE_CODE (counter) != TREE_CODE (rhs))
190 return (unsigned HOST_WIDE_INT) -1;
192 if (TREE_CODE (counter) == COMPONENT_REF)
194 if (get_base_address (counter) != get_base_address (rhs)
195 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
196 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
197 return (unsigned HOST_WIDE_INT) -1;
199 else if (counter != rhs)
200 return (unsigned HOST_WIDE_INT) -1;
202 lhs = NULL;
205 lhs = orig_lhs;
206 val = ret + counter_val;
207 while (lhs)
209 enum tree_code rhs_code;
210 tree rhs1;
212 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
213 break;
215 if (val >= max_size)
216 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
217 else
218 si->offsets[SSA_NAME_VERSION (lhs)] = val;
220 stmt = SSA_NAME_DEF_STMT (lhs);
222 rhs_code = gimple_assign_rhs_code (stmt);
223 rhs1 = gimple_assign_rhs1 (stmt);
224 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
225 || gimple_assign_cast_p (stmt))
226 && TREE_CODE (rhs1) == SSA_NAME)
228 lhs = rhs1;
229 continue;
232 if ((rhs_code == POINTER_PLUS_EXPR
233 || rhs_code == PLUS_EXPR)
234 && TREE_CODE (rhs1) == SSA_NAME
235 && host_integerp (gimple_assign_rhs2 (stmt), 1))
237 val -= tree_low_cst (gimple_assign_rhs2 (stmt), 1);
238 lhs = rhs1;
239 continue;
242 if (rhs_code == ADDR_EXPR
243 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
244 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
245 && host_integerp (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1))
247 val -= tree_low_cst (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1);
248 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
249 continue;
252 lhs = NULL;
255 return ret;
259 /* Called by walk_tree to look for references to va_list variables. */
261 static tree
262 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
263 void *data)
265 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
266 tree var = *tp;
268 if (TREE_CODE (var) == SSA_NAME)
269 var = SSA_NAME_VAR (var);
271 if (TREE_CODE (var) == VAR_DECL
272 && bitmap_bit_p (va_list_vars, DECL_UID (var)))
273 return var;
275 return NULL_TREE;
279 /* Helper function of va_list_counter_struct_op. Compute
280 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
281 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
282 statement. GPR_P is true if AP is a GPR counter, false if it is
283 a FPR counter. */
285 static void
286 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
287 bool write_p)
289 unsigned HOST_WIDE_INT increment;
291 if (si->compute_sizes < 0)
293 si->compute_sizes = 0;
294 if (si->va_start_count == 1
295 && reachable_at_most_once (si->bb, si->va_start_bb))
296 si->compute_sizes = 1;
298 if (dump_file && (dump_flags & TDF_DETAILS))
299 fprintf (dump_file,
300 "bb%d will %sbe executed at most once for each va_start "
301 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
302 si->va_start_bb->index);
305 if (write_p
306 && si->compute_sizes
307 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
309 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
311 cfun->va_list_gpr_size += increment;
312 return;
315 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
317 cfun->va_list_fpr_size += increment;
318 return;
322 if (write_p || !si->compute_sizes)
324 if (gpr_p)
325 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
326 else
327 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
332 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
333 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
334 is false, AP has been seen in VAR = AP assignment.
335 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
336 va_arg operation that doesn't cause the va_list variable to escape
337 current function. */
339 static bool
340 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
341 bool write_p)
343 tree base;
345 if (TREE_CODE (ap) != COMPONENT_REF
346 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
347 return false;
349 if (TREE_CODE (var) != SSA_NAME
350 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (var))))
351 return false;
353 base = get_base_address (ap);
354 if (TREE_CODE (base) != VAR_DECL
355 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base)))
356 return false;
358 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
359 va_list_counter_op (si, ap, var, true, write_p);
360 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
361 va_list_counter_op (si, ap, var, false, write_p);
363 return true;
367 /* Check for TEM = AP. Return true if found and the caller shouldn't
368 search for va_list references in the statement. */
370 static bool
371 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
373 if (TREE_CODE (ap) != VAR_DECL
374 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
375 return false;
377 if (TREE_CODE (tem) != SSA_NAME
378 || bitmap_bit_p (si->va_list_vars,
379 DECL_UID (SSA_NAME_VAR (tem)))
380 || is_global_var (SSA_NAME_VAR (tem)))
381 return false;
383 if (si->compute_sizes < 0)
385 si->compute_sizes = 0;
386 if (si->va_start_count == 1
387 && reachable_at_most_once (si->bb, si->va_start_bb))
388 si->compute_sizes = 1;
390 if (dump_file && (dump_flags & TDF_DETAILS))
391 fprintf (dump_file,
392 "bb%d will %sbe executed at most once for each va_start "
393 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
394 si->va_start_bb->index);
397 /* For void * or char * va_list types, there is just one counter.
398 If va_arg is used in a loop, we don't know how many registers need
399 saving. */
400 if (! si->compute_sizes)
401 return false;
403 if (va_list_counter_bump (si, ap, tem, true) == (unsigned HOST_WIDE_INT) -1)
404 return false;
406 /* Note the temporary, as we need to track whether it doesn't escape
407 the current function. */
408 bitmap_set_bit (si->va_list_escape_vars,
409 DECL_UID (SSA_NAME_VAR (tem)));
410 return true;
414 /* Check for:
415 tem1 = AP;
416 TEM2 = tem1 + CST;
417 AP = TEM2;
418 sequence and update cfun->va_list_gpr_size. Return true if found. */
420 static bool
421 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
423 unsigned HOST_WIDE_INT increment;
425 if (TREE_CODE (ap) != VAR_DECL
426 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
427 return false;
429 if (TREE_CODE (tem2) != SSA_NAME
430 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (tem2))))
431 return false;
433 if (si->compute_sizes <= 0)
434 return false;
436 increment = va_list_counter_bump (si, ap, tem2, true);
437 if (increment + 1 <= 1)
438 return false;
440 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
441 cfun->va_list_gpr_size += increment;
442 else
443 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
445 return true;
449 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
450 containing value of some va_list variable plus optionally some constant,
451 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
452 depending whether LHS is a function local temporary. */
454 static void
455 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
457 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
458 return;
460 if (TREE_CODE (rhs) == SSA_NAME)
462 if (! bitmap_bit_p (si->va_list_escape_vars,
463 DECL_UID (SSA_NAME_VAR (rhs))))
464 return;
466 else if (TREE_CODE (rhs) == ADDR_EXPR
467 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
468 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
470 if (! bitmap_bit_p (si->va_list_escape_vars,
471 DECL_UID (SSA_NAME_VAR (TREE_OPERAND
472 (TREE_OPERAND (rhs, 0), 0)))))
473 return;
475 else
476 return;
478 if (TREE_CODE (lhs) != SSA_NAME || is_global_var (SSA_NAME_VAR (lhs)))
480 si->va_list_escapes = true;
481 return;
484 if (si->compute_sizes < 0)
486 si->compute_sizes = 0;
487 if (si->va_start_count == 1
488 && reachable_at_most_once (si->bb, si->va_start_bb))
489 si->compute_sizes = 1;
491 if (dump_file && (dump_flags & TDF_DETAILS))
492 fprintf (dump_file,
493 "bb%d will %sbe executed at most once for each va_start "
494 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
495 si->va_start_bb->index);
498 /* For void * or char * va_list types, there is just one counter.
499 If va_arg is used in a loop, we don't know how many registers need
500 saving. */
501 if (! si->compute_sizes)
503 si->va_list_escapes = true;
504 return;
507 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
508 == (unsigned HOST_WIDE_INT) -1)
510 si->va_list_escapes = true;
511 return;
514 bitmap_set_bit (si->va_list_escape_vars,
515 DECL_UID (SSA_NAME_VAR (lhs)));
519 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
520 Return true if va_list might be escaping. */
522 static bool
523 check_all_va_list_escapes (struct stdarg_info *si)
525 basic_block bb;
527 FOR_EACH_BB (bb)
529 gimple_stmt_iterator i;
531 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
533 gimple stmt = gsi_stmt (i);
534 tree use;
535 ssa_op_iter iter;
537 if (is_gimple_debug (stmt))
538 continue;
540 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
542 if (! bitmap_bit_p (si->va_list_escape_vars,
543 DECL_UID (SSA_NAME_VAR (use))))
544 continue;
546 if (is_gimple_assign (stmt))
548 tree rhs = gimple_assign_rhs1 (stmt);
549 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
551 /* x = *ap_temp; */
552 if (rhs_code == MEM_REF
553 && TREE_OPERAND (rhs, 0) == use
554 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
555 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs)), 1)
556 && si->offsets[SSA_NAME_VERSION (use)] != -1)
558 unsigned HOST_WIDE_INT gpr_size;
559 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
561 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
562 + tree_low_cst (TREE_OPERAND (rhs, 1), 0)
563 + tree_low_cst (access_size, 1);
564 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
565 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
566 else if (gpr_size > cfun->va_list_gpr_size)
567 cfun->va_list_gpr_size = gpr_size;
568 continue;
571 /* va_arg sequences may contain
572 other_ap_temp = ap_temp;
573 other_ap_temp = ap_temp + constant;
574 other_ap_temp = (some_type *) ap_temp;
575 ap = ap_temp;
576 statements. */
577 if (rhs == use
578 && ((rhs_code == POINTER_PLUS_EXPR
579 && (TREE_CODE (gimple_assign_rhs2 (stmt))
580 == INTEGER_CST))
581 || gimple_assign_cast_p (stmt)
582 || (get_gimple_rhs_class (rhs_code)
583 == GIMPLE_SINGLE_RHS)))
585 tree lhs = gimple_assign_lhs (stmt);
587 if (TREE_CODE (lhs) == SSA_NAME
588 && bitmap_bit_p (si->va_list_escape_vars,
589 DECL_UID (SSA_NAME_VAR (lhs))))
590 continue;
592 if (TREE_CODE (lhs) == VAR_DECL
593 && bitmap_bit_p (si->va_list_vars,
594 DECL_UID (lhs)))
595 continue;
597 else if (rhs_code == ADDR_EXPR
598 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
599 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
601 tree lhs = gimple_assign_lhs (stmt);
603 if (bitmap_bit_p (si->va_list_escape_vars,
604 DECL_UID (SSA_NAME_VAR (lhs))))
605 continue;
609 if (dump_file && (dump_flags & TDF_DETAILS))
611 fputs ("va_list escapes in ", dump_file);
612 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
613 fputc ('\n', dump_file);
615 return true;
620 return false;
624 /* Return true if this optimization pass should be done.
625 It makes only sense for stdarg functions. */
627 static bool
628 gate_optimize_stdarg (void)
630 /* This optimization is only for stdarg functions. */
631 return cfun->stdarg != 0;
635 /* Entry point to the stdarg optimization pass. */
637 static unsigned int
638 execute_optimize_stdarg (void)
640 basic_block bb;
641 bool va_list_escapes = false;
642 bool va_list_simple_ptr;
643 struct stdarg_info si;
644 struct walk_stmt_info wi;
645 const char *funcname = NULL;
646 tree cfun_va_list;
648 cfun->va_list_gpr_size = 0;
649 cfun->va_list_fpr_size = 0;
650 memset (&si, 0, sizeof (si));
651 si.va_list_vars = BITMAP_ALLOC (NULL);
652 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
654 if (dump_file)
655 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
657 cfun_va_list = targetm.fn_abi_va_list (cfun->decl);
658 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
659 && (TREE_TYPE (cfun_va_list) == void_type_node
660 || TREE_TYPE (cfun_va_list) == char_type_node);
661 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
663 FOR_EACH_BB (bb)
665 gimple_stmt_iterator i;
667 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
669 gimple stmt = gsi_stmt (i);
670 tree callee, ap;
672 if (!is_gimple_call (stmt))
673 continue;
675 callee = gimple_call_fndecl (stmt);
676 if (!callee
677 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
678 continue;
680 switch (DECL_FUNCTION_CODE (callee))
682 case BUILT_IN_VA_START:
683 break;
684 /* If old style builtins are used, don't optimize anything. */
685 case BUILT_IN_SAVEREGS:
686 case BUILT_IN_NEXT_ARG:
687 va_list_escapes = true;
688 continue;
689 default:
690 continue;
693 si.va_start_count++;
694 ap = gimple_call_arg (stmt, 0);
696 if (TREE_CODE (ap) != ADDR_EXPR)
698 va_list_escapes = true;
699 break;
701 ap = TREE_OPERAND (ap, 0);
702 if (TREE_CODE (ap) == ARRAY_REF)
704 if (! integer_zerop (TREE_OPERAND (ap, 1)))
706 va_list_escapes = true;
707 break;
709 ap = TREE_OPERAND (ap, 0);
711 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
712 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (cfun->decl))
713 || TREE_CODE (ap) != VAR_DECL)
715 va_list_escapes = true;
716 break;
719 if (is_global_var (ap))
721 va_list_escapes = true;
722 break;
725 bitmap_set_bit (si.va_list_vars, DECL_UID (ap));
727 /* VA_START_BB and VA_START_AP will be only used if there is just
728 one va_start in the function. */
729 si.va_start_bb = bb;
730 si.va_start_ap = ap;
733 if (va_list_escapes)
734 break;
737 /* If there were no va_start uses in the function, there is no need to
738 save anything. */
739 if (si.va_start_count == 0)
740 goto finish;
742 /* If some va_list arguments weren't local, we can't optimize. */
743 if (va_list_escapes)
744 goto finish;
746 /* For void * or char * va_list, something useful can be done only
747 if there is just one va_start. */
748 if (va_list_simple_ptr && si.va_start_count > 1)
750 va_list_escapes = true;
751 goto finish;
754 /* For struct * va_list, if the backend didn't tell us what the counter fields
755 are, there is nothing more we can do. */
756 if (!va_list_simple_ptr
757 && va_list_gpr_counter_field == NULL_TREE
758 && va_list_fpr_counter_field == NULL_TREE)
760 va_list_escapes = true;
761 goto finish;
764 /* For void * or char * va_list there is just one counter
765 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
766 if (va_list_simple_ptr)
767 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
769 calculate_dominance_info (CDI_DOMINATORS);
770 memset (&wi, 0, sizeof (wi));
771 wi.info = si.va_list_vars;
773 FOR_EACH_BB (bb)
775 gimple_stmt_iterator i;
777 si.compute_sizes = -1;
778 si.bb = bb;
780 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
781 them as assignments for the purpose of escape analysis. This is
782 not needed for non-simple va_list because virtual phis don't perform
783 any real data movement. */
784 if (va_list_simple_ptr)
786 tree lhs, rhs;
787 use_operand_p uop;
788 ssa_op_iter soi;
790 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
792 gimple phi = gsi_stmt (i);
793 lhs = PHI_RESULT (phi);
795 if (!is_gimple_reg (lhs))
796 continue;
798 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
800 rhs = USE_FROM_PTR (uop);
801 if (va_list_ptr_read (&si, rhs, lhs))
802 continue;
803 else if (va_list_ptr_write (&si, lhs, rhs))
804 continue;
805 else
806 check_va_list_escapes (&si, lhs, rhs);
808 if (si.va_list_escapes)
810 if (dump_file && (dump_flags & TDF_DETAILS))
812 fputs ("va_list escapes in ", dump_file);
813 print_gimple_stmt (dump_file, phi, 0, dump_flags);
814 fputc ('\n', dump_file);
816 va_list_escapes = true;
822 for (i = gsi_start_bb (bb);
823 !gsi_end_p (i) && !va_list_escapes;
824 gsi_next (&i))
826 gimple stmt = gsi_stmt (i);
828 /* Don't look at __builtin_va_{start,end}, they are ok. */
829 if (is_gimple_call (stmt))
831 tree callee = gimple_call_fndecl (stmt);
833 if (callee
834 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
835 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
836 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
837 continue;
840 if (is_gimple_assign (stmt))
842 tree lhs = gimple_assign_lhs (stmt);
843 tree rhs = gimple_assign_rhs1 (stmt);
845 if (va_list_simple_ptr)
847 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
848 == GIMPLE_SINGLE_RHS)
850 /* Check for tem = ap. */
851 if (va_list_ptr_read (&si, rhs, lhs))
852 continue;
854 /* Check for the last insn in:
855 tem1 = ap;
856 tem2 = tem1 + CST;
857 ap = tem2;
858 sequence. */
859 else if (va_list_ptr_write (&si, lhs, rhs))
860 continue;
863 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
864 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
865 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
866 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
867 == GIMPLE_SINGLE_RHS))
868 check_va_list_escapes (&si, lhs, rhs);
870 else
872 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
873 == GIMPLE_SINGLE_RHS)
875 /* Check for ap[0].field = temp. */
876 if (va_list_counter_struct_op (&si, lhs, rhs, true))
877 continue;
879 /* Check for temp = ap[0].field. */
880 else if (va_list_counter_struct_op (&si, rhs, lhs,
881 false))
882 continue;
885 /* Do any architecture specific checking. */
886 if (targetm.stdarg_optimize_hook
887 && targetm.stdarg_optimize_hook (&si, stmt))
888 continue;
891 else if (is_gimple_debug (stmt))
892 continue;
894 /* All other uses of va_list are either va_copy (that is not handled
895 in this optimization), taking address of va_list variable or
896 passing va_list to other functions (in that case va_list might
897 escape the function and therefore va_start needs to set it up
898 fully), or some unexpected use of va_list. None of these should
899 happen in a gimplified VA_ARG_EXPR. */
900 if (si.va_list_escapes
901 || walk_gimple_op (stmt, find_va_list_reference, &wi))
903 if (dump_file && (dump_flags & TDF_DETAILS))
905 fputs ("va_list escapes in ", dump_file);
906 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
907 fputc ('\n', dump_file);
909 va_list_escapes = true;
913 if (va_list_escapes)
914 break;
917 if (! va_list_escapes
918 && va_list_simple_ptr
919 && ! bitmap_empty_p (si.va_list_escape_vars)
920 && check_all_va_list_escapes (&si))
921 va_list_escapes = true;
923 finish:
924 if (va_list_escapes)
926 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
927 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
929 BITMAP_FREE (si.va_list_vars);
930 BITMAP_FREE (si.va_list_escape_vars);
931 free (si.offsets);
932 if (dump_file)
934 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
935 funcname, (int) va_list_escapes);
936 if (cfun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
937 fputs ("all", dump_file);
938 else
939 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
940 fputs (" GPR units and ", dump_file);
941 if (cfun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
942 fputs ("all", dump_file);
943 else
944 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
945 fputs (" FPR units.\n", dump_file);
947 return 0;
951 struct gimple_opt_pass pass_stdarg =
954 GIMPLE_PASS,
955 "stdarg", /* name */
956 gate_optimize_stdarg, /* gate */
957 execute_optimize_stdarg, /* execute */
958 NULL, /* sub */
959 NULL, /* next */
960 0, /* static_pass_number */
961 TV_NONE, /* tv_id */
962 PROP_cfg | PROP_ssa, /* properties_required */
963 0, /* properties_provided */
964 0, /* properties_destroyed */
965 0, /* todo_flags_start */
966 0 /* todo_flags_finish */