Merge branch 'master' into python
[official-gcc.git] / gcc / tree-stdarg.c
blob8401747a020655c6cd7b8be0fcef88be0e39583f
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jakub Jelinek <jakub@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "function.h"
28 #include "langhooks.h"
29 #include "diagnostic.h"
30 #include "gimple-pretty-print.h"
31 #include "target.h"
32 #include "tree-flow.h"
33 #include "tree-pass.h"
34 #include "tree-stdarg.h"
36 /* A simple pass that attempts to optimize stdarg functions on architectures
37 that need to save register arguments to stack on entry to stdarg functions.
38 If the function doesn't use any va_start macros, no registers need to
39 be saved. If va_start macros are used, the va_list variables don't escape
40 the function, it is only necessary to save registers that will be used
41 in va_arg macros. E.g. if va_arg is only used with integral types
42 in the function, floating point registers don't need to be saved, etc. */
45 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
46 is executed at most as many times as VA_START_BB. */
48 static bool
49 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
51 VEC (edge, heap) *stack = NULL;
52 edge e;
53 edge_iterator ei;
54 sbitmap visited;
55 bool ret;
57 if (va_arg_bb == va_start_bb)
58 return true;
60 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
61 return false;
63 visited = sbitmap_alloc (last_basic_block);
64 sbitmap_zero (visited);
65 ret = true;
67 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
68 VEC_safe_push (edge, heap, stack, e);
70 while (! VEC_empty (edge, stack))
72 basic_block src;
74 e = VEC_pop (edge, stack);
75 src = e->src;
77 if (e->flags & EDGE_COMPLEX)
79 ret = false;
80 break;
83 if (src == va_start_bb)
84 continue;
86 /* va_arg_bb can be executed more times than va_start_bb. */
87 if (src == va_arg_bb)
89 ret = false;
90 break;
93 gcc_assert (src != ENTRY_BLOCK_PTR);
95 if (! TEST_BIT (visited, src->index))
97 SET_BIT (visited, src->index);
98 FOR_EACH_EDGE (e, ei, src->preds)
99 VEC_safe_push (edge, heap, stack, e);
103 VEC_free (edge, heap, stack);
104 sbitmap_free (visited);
105 return ret;
109 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
110 return constant, otherwise return (unsigned HOST_WIDE_INT) -1.
111 GPR_P is true if this is GPR counter. */
113 static unsigned HOST_WIDE_INT
114 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
115 bool gpr_p)
117 tree lhs, orig_lhs;
118 gimple stmt;
119 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
120 unsigned int max_size;
122 if (si->offsets == NULL)
124 unsigned int i;
126 si->offsets = XNEWVEC (int, num_ssa_names);
127 for (i = 0; i < num_ssa_names; ++i)
128 si->offsets[i] = -1;
131 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
132 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
133 orig_lhs = lhs = rhs;
134 while (lhs)
136 enum tree_code rhs_code;
138 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
140 if (counter_val >= max_size)
142 ret = max_size;
143 break;
146 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
147 break;
150 stmt = SSA_NAME_DEF_STMT (lhs);
152 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
153 return (unsigned HOST_WIDE_INT) -1;
155 rhs_code = gimple_assign_rhs_code (stmt);
156 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
157 || gimple_assign_cast_p (stmt))
158 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
160 lhs = gimple_assign_rhs1 (stmt);
161 continue;
164 if ((rhs_code == POINTER_PLUS_EXPR
165 || rhs_code == PLUS_EXPR)
166 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
167 && host_integerp (gimple_assign_rhs2 (stmt), 1))
169 ret += tree_low_cst (gimple_assign_rhs2 (stmt), 1);
170 lhs = gimple_assign_rhs1 (stmt);
171 continue;
174 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
175 return (unsigned HOST_WIDE_INT) -1;
177 rhs = gimple_assign_rhs1 (stmt);
178 if (TREE_CODE (counter) != TREE_CODE (rhs))
179 return (unsigned HOST_WIDE_INT) -1;
181 if (TREE_CODE (counter) == COMPONENT_REF)
183 if (get_base_address (counter) != get_base_address (rhs)
184 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
185 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
186 return (unsigned HOST_WIDE_INT) -1;
188 else if (counter != rhs)
189 return (unsigned HOST_WIDE_INT) -1;
191 lhs = NULL;
194 lhs = orig_lhs;
195 val = ret + counter_val;
196 while (lhs)
198 enum tree_code rhs_code;
200 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
201 break;
203 if (val >= max_size)
204 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
205 else
206 si->offsets[SSA_NAME_VERSION (lhs)] = val;
208 stmt = SSA_NAME_DEF_STMT (lhs);
210 rhs_code = gimple_assign_rhs_code (stmt);
211 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
212 || gimple_assign_cast_p (stmt))
213 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
215 lhs = gimple_assign_rhs1 (stmt);
216 continue;
219 if ((rhs_code == POINTER_PLUS_EXPR
220 || rhs_code == PLUS_EXPR)
221 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
222 && host_integerp (gimple_assign_rhs2 (stmt), 1))
224 val -= tree_low_cst (gimple_assign_rhs2 (stmt), 1);
225 lhs = gimple_assign_rhs1 (stmt);
226 continue;
229 lhs = NULL;
232 return ret;
236 /* Called by walk_tree to look for references to va_list variables. */
238 static tree
239 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
240 void *data)
242 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
243 tree var = *tp;
245 if (TREE_CODE (var) == SSA_NAME)
246 var = SSA_NAME_VAR (var);
248 if (TREE_CODE (var) == VAR_DECL
249 && bitmap_bit_p (va_list_vars, DECL_UID (var)))
250 return var;
252 return NULL_TREE;
256 /* Helper function of va_list_counter_struct_op. Compute
257 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
258 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
259 statement. GPR_P is true if AP is a GPR counter, false if it is
260 a FPR counter. */
262 static void
263 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
264 bool write_p)
266 unsigned HOST_WIDE_INT increment;
268 if (si->compute_sizes < 0)
270 si->compute_sizes = 0;
271 if (si->va_start_count == 1
272 && reachable_at_most_once (si->bb, si->va_start_bb))
273 si->compute_sizes = 1;
275 if (dump_file && (dump_flags & TDF_DETAILS))
276 fprintf (dump_file,
277 "bb%d will %sbe executed at most once for each va_start "
278 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
279 si->va_start_bb->index);
282 if (write_p
283 && si->compute_sizes
284 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
286 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
288 cfun->va_list_gpr_size += increment;
289 return;
292 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
294 cfun->va_list_fpr_size += increment;
295 return;
299 if (write_p || !si->compute_sizes)
301 if (gpr_p)
302 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
303 else
304 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
309 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
310 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
311 is false, AP has been seen in VAR = AP assignment.
312 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
313 va_arg operation that doesn't cause the va_list variable to escape
314 current function. */
316 static bool
317 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
318 bool write_p)
320 tree base;
322 if (TREE_CODE (ap) != COMPONENT_REF
323 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
324 return false;
326 if (TREE_CODE (var) != SSA_NAME
327 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (var))))
328 return false;
330 base = get_base_address (ap);
331 if (TREE_CODE (base) != VAR_DECL
332 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base)))
333 return false;
335 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
336 va_list_counter_op (si, ap, var, true, write_p);
337 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
338 va_list_counter_op (si, ap, var, false, write_p);
340 return true;
344 /* Check for TEM = AP. Return true if found and the caller shouldn't
345 search for va_list references in the statement. */
347 static bool
348 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
350 if (TREE_CODE (ap) != VAR_DECL
351 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
352 return false;
354 if (TREE_CODE (tem) != SSA_NAME
355 || bitmap_bit_p (si->va_list_vars,
356 DECL_UID (SSA_NAME_VAR (tem)))
357 || is_global_var (SSA_NAME_VAR (tem)))
358 return false;
360 if (si->compute_sizes < 0)
362 si->compute_sizes = 0;
363 if (si->va_start_count == 1
364 && reachable_at_most_once (si->bb, si->va_start_bb))
365 si->compute_sizes = 1;
367 if (dump_file && (dump_flags & TDF_DETAILS))
368 fprintf (dump_file,
369 "bb%d will %sbe executed at most once for each va_start "
370 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
371 si->va_start_bb->index);
374 /* For void * or char * va_list types, there is just one counter.
375 If va_arg is used in a loop, we don't know how many registers need
376 saving. */
377 if (! si->compute_sizes)
378 return false;
380 if (va_list_counter_bump (si, ap, tem, true) == (unsigned HOST_WIDE_INT) -1)
381 return false;
383 /* Note the temporary, as we need to track whether it doesn't escape
384 the current function. */
385 bitmap_set_bit (si->va_list_escape_vars,
386 DECL_UID (SSA_NAME_VAR (tem)));
387 return true;
391 /* Check for:
392 tem1 = AP;
393 TEM2 = tem1 + CST;
394 AP = TEM2;
395 sequence and update cfun->va_list_gpr_size. Return true if found. */
397 static bool
398 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
400 unsigned HOST_WIDE_INT increment;
402 if (TREE_CODE (ap) != VAR_DECL
403 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
404 return false;
406 if (TREE_CODE (tem2) != SSA_NAME
407 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (tem2))))
408 return false;
410 if (si->compute_sizes <= 0)
411 return false;
413 increment = va_list_counter_bump (si, ap, tem2, true);
414 if (increment + 1 <= 1)
415 return false;
417 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
418 cfun->va_list_gpr_size += increment;
419 else
420 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
422 return true;
426 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
427 containing value of some va_list variable plus optionally some constant,
428 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
429 depending whether LHS is a function local temporary. */
431 static void
432 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
434 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
435 return;
437 if (TREE_CODE (rhs) != SSA_NAME
438 || ! bitmap_bit_p (si->va_list_escape_vars,
439 DECL_UID (SSA_NAME_VAR (rhs))))
440 return;
442 if (TREE_CODE (lhs) != SSA_NAME || is_global_var (SSA_NAME_VAR (lhs)))
444 si->va_list_escapes = true;
445 return;
448 if (si->compute_sizes < 0)
450 si->compute_sizes = 0;
451 if (si->va_start_count == 1
452 && reachable_at_most_once (si->bb, si->va_start_bb))
453 si->compute_sizes = 1;
455 if (dump_file && (dump_flags & TDF_DETAILS))
456 fprintf (dump_file,
457 "bb%d will %sbe executed at most once for each va_start "
458 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
459 si->va_start_bb->index);
462 /* For void * or char * va_list types, there is just one counter.
463 If va_arg is used in a loop, we don't know how many registers need
464 saving. */
465 if (! si->compute_sizes)
467 si->va_list_escapes = true;
468 return;
471 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
472 == (unsigned HOST_WIDE_INT) -1)
474 si->va_list_escapes = true;
475 return;
478 bitmap_set_bit (si->va_list_escape_vars,
479 DECL_UID (SSA_NAME_VAR (lhs)));
483 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
484 Return true if va_list might be escaping. */
486 static bool
487 check_all_va_list_escapes (struct stdarg_info *si)
489 basic_block bb;
491 FOR_EACH_BB (bb)
493 gimple_stmt_iterator i;
495 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
497 gimple stmt = gsi_stmt (i);
498 tree use;
499 ssa_op_iter iter;
501 if (is_gimple_debug (stmt))
502 continue;
504 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
506 if (! bitmap_bit_p (si->va_list_escape_vars,
507 DECL_UID (SSA_NAME_VAR (use))))
508 continue;
510 if (is_gimple_assign (stmt))
512 tree rhs = gimple_assign_rhs1 (stmt);
513 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
515 /* x = *ap_temp; */
516 if (gimple_assign_rhs_code (stmt) == INDIRECT_REF
517 && TREE_OPERAND (rhs, 0) == use
518 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
519 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs)), 1)
520 && si->offsets[SSA_NAME_VERSION (use)] != -1)
522 unsigned HOST_WIDE_INT gpr_size;
523 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
525 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
526 + tree_low_cst (access_size, 1);
527 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
528 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
529 else if (gpr_size > cfun->va_list_gpr_size)
530 cfun->va_list_gpr_size = gpr_size;
531 continue;
534 /* va_arg sequences may contain
535 other_ap_temp = ap_temp;
536 other_ap_temp = ap_temp + constant;
537 other_ap_temp = (some_type *) ap_temp;
538 ap = ap_temp;
539 statements. */
540 if (rhs == use
541 && ((rhs_code == POINTER_PLUS_EXPR
542 && (TREE_CODE (gimple_assign_rhs2 (stmt))
543 == INTEGER_CST))
544 || gimple_assign_cast_p (stmt)
545 || (get_gimple_rhs_class (rhs_code)
546 == GIMPLE_SINGLE_RHS)))
548 tree lhs = gimple_assign_lhs (stmt);
550 if (TREE_CODE (lhs) == SSA_NAME
551 && bitmap_bit_p (si->va_list_escape_vars,
552 DECL_UID (SSA_NAME_VAR (lhs))))
553 continue;
555 if (TREE_CODE (lhs) == VAR_DECL
556 && bitmap_bit_p (si->va_list_vars,
557 DECL_UID (lhs)))
558 continue;
562 if (dump_file && (dump_flags & TDF_DETAILS))
564 fputs ("va_list escapes in ", dump_file);
565 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
566 fputc ('\n', dump_file);
568 return true;
573 return false;
577 /* Return true if this optimization pass should be done.
578 It makes only sense for stdarg functions. */
580 static bool
581 gate_optimize_stdarg (void)
583 /* This optimization is only for stdarg functions. */
584 return cfun->stdarg != 0;
588 /* Entry point to the stdarg optimization pass. */
590 static unsigned int
591 execute_optimize_stdarg (void)
593 basic_block bb;
594 bool va_list_escapes = false;
595 bool va_list_simple_ptr;
596 struct stdarg_info si;
597 struct walk_stmt_info wi;
598 const char *funcname = NULL;
599 tree cfun_va_list;
601 cfun->va_list_gpr_size = 0;
602 cfun->va_list_fpr_size = 0;
603 memset (&si, 0, sizeof (si));
604 si.va_list_vars = BITMAP_ALLOC (NULL);
605 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
607 if (dump_file)
608 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
610 cfun_va_list = targetm.fn_abi_va_list (cfun->decl);
611 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
612 && (TREE_TYPE (cfun_va_list) == void_type_node
613 || TREE_TYPE (cfun_va_list) == char_type_node);
614 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
616 FOR_EACH_BB (bb)
618 gimple_stmt_iterator i;
620 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
622 gimple stmt = gsi_stmt (i);
623 tree callee, ap;
625 if (!is_gimple_call (stmt))
626 continue;
628 callee = gimple_call_fndecl (stmt);
629 if (!callee
630 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
631 continue;
633 switch (DECL_FUNCTION_CODE (callee))
635 case BUILT_IN_VA_START:
636 break;
637 /* If old style builtins are used, don't optimize anything. */
638 case BUILT_IN_SAVEREGS:
639 case BUILT_IN_ARGS_INFO:
640 case BUILT_IN_NEXT_ARG:
641 va_list_escapes = true;
642 continue;
643 default:
644 continue;
647 si.va_start_count++;
648 ap = gimple_call_arg (stmt, 0);
650 if (TREE_CODE (ap) != ADDR_EXPR)
652 va_list_escapes = true;
653 break;
655 ap = TREE_OPERAND (ap, 0);
656 if (TREE_CODE (ap) == ARRAY_REF)
658 if (! integer_zerop (TREE_OPERAND (ap, 1)))
660 va_list_escapes = true;
661 break;
663 ap = TREE_OPERAND (ap, 0);
665 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
666 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (cfun->decl))
667 || TREE_CODE (ap) != VAR_DECL)
669 va_list_escapes = true;
670 break;
673 if (is_global_var (ap))
675 va_list_escapes = true;
676 break;
679 bitmap_set_bit (si.va_list_vars, DECL_UID (ap));
681 /* VA_START_BB and VA_START_AP will be only used if there is just
682 one va_start in the function. */
683 si.va_start_bb = bb;
684 si.va_start_ap = ap;
687 if (va_list_escapes)
688 break;
691 /* If there were no va_start uses in the function, there is no need to
692 save anything. */
693 if (si.va_start_count == 0)
694 goto finish;
696 /* If some va_list arguments weren't local, we can't optimize. */
697 if (va_list_escapes)
698 goto finish;
700 /* For void * or char * va_list, something useful can be done only
701 if there is just one va_start. */
702 if (va_list_simple_ptr && si.va_start_count > 1)
704 va_list_escapes = true;
705 goto finish;
708 /* For struct * va_list, if the backend didn't tell us what the counter fields
709 are, there is nothing more we can do. */
710 if (!va_list_simple_ptr
711 && va_list_gpr_counter_field == NULL_TREE
712 && va_list_fpr_counter_field == NULL_TREE)
714 va_list_escapes = true;
715 goto finish;
718 /* For void * or char * va_list there is just one counter
719 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
720 if (va_list_simple_ptr)
721 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
723 calculate_dominance_info (CDI_DOMINATORS);
724 memset (&wi, 0, sizeof (wi));
725 wi.info = si.va_list_vars;
727 FOR_EACH_BB (bb)
729 gimple_stmt_iterator i;
731 si.compute_sizes = -1;
732 si.bb = bb;
734 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
735 them as assignments for the purpose of escape analysis. This is
736 not needed for non-simple va_list because virtual phis don't perform
737 any real data movement. */
738 if (va_list_simple_ptr)
740 tree lhs, rhs;
741 use_operand_p uop;
742 ssa_op_iter soi;
744 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
746 gimple phi = gsi_stmt (i);
747 lhs = PHI_RESULT (phi);
749 if (!is_gimple_reg (lhs))
750 continue;
752 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
754 rhs = USE_FROM_PTR (uop);
755 if (va_list_ptr_read (&si, rhs, lhs))
756 continue;
757 else if (va_list_ptr_write (&si, lhs, rhs))
758 continue;
759 else
760 check_va_list_escapes (&si, lhs, rhs);
762 if (si.va_list_escapes)
764 if (dump_file && (dump_flags & TDF_DETAILS))
766 fputs ("va_list escapes in ", dump_file);
767 print_gimple_stmt (dump_file, phi, 0, dump_flags);
768 fputc ('\n', dump_file);
770 va_list_escapes = true;
776 for (i = gsi_start_bb (bb);
777 !gsi_end_p (i) && !va_list_escapes;
778 gsi_next (&i))
780 gimple stmt = gsi_stmt (i);
782 /* Don't look at __builtin_va_{start,end}, they are ok. */
783 if (is_gimple_call (stmt))
785 tree callee = gimple_call_fndecl (stmt);
787 if (callee
788 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
789 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
790 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
791 continue;
794 if (is_gimple_assign (stmt))
796 tree lhs = gimple_assign_lhs (stmt);
797 tree rhs = gimple_assign_rhs1 (stmt);
799 if (va_list_simple_ptr)
801 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
802 == GIMPLE_SINGLE_RHS)
804 /* Check for tem = ap. */
805 if (va_list_ptr_read (&si, rhs, lhs))
806 continue;
808 /* Check for the last insn in:
809 tem1 = ap;
810 tem2 = tem1 + CST;
811 ap = tem2;
812 sequence. */
813 else if (va_list_ptr_write (&si, lhs, rhs))
814 continue;
817 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
818 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
819 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
820 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
821 == GIMPLE_SINGLE_RHS))
822 check_va_list_escapes (&si, lhs, rhs);
824 else
826 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
827 == GIMPLE_SINGLE_RHS)
829 /* Check for ap[0].field = temp. */
830 if (va_list_counter_struct_op (&si, lhs, rhs, true))
831 continue;
833 /* Check for temp = ap[0].field. */
834 else if (va_list_counter_struct_op (&si, rhs, lhs,
835 false))
836 continue;
839 /* Do any architecture specific checking. */
840 if (targetm.stdarg_optimize_hook
841 && targetm.stdarg_optimize_hook (&si, stmt))
842 continue;
845 else if (is_gimple_debug (stmt))
846 continue;
848 /* All other uses of va_list are either va_copy (that is not handled
849 in this optimization), taking address of va_list variable or
850 passing va_list to other functions (in that case va_list might
851 escape the function and therefore va_start needs to set it up
852 fully), or some unexpected use of va_list. None of these should
853 happen in a gimplified VA_ARG_EXPR. */
854 if (si.va_list_escapes
855 || walk_gimple_op (stmt, find_va_list_reference, &wi))
857 if (dump_file && (dump_flags & TDF_DETAILS))
859 fputs ("va_list escapes in ", dump_file);
860 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
861 fputc ('\n', dump_file);
863 va_list_escapes = true;
867 if (va_list_escapes)
868 break;
871 if (! va_list_escapes
872 && va_list_simple_ptr
873 && ! bitmap_empty_p (si.va_list_escape_vars)
874 && check_all_va_list_escapes (&si))
875 va_list_escapes = true;
877 finish:
878 if (va_list_escapes)
880 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
881 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
883 BITMAP_FREE (si.va_list_vars);
884 BITMAP_FREE (si.va_list_escape_vars);
885 free (si.offsets);
886 if (dump_file)
888 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
889 funcname, (int) va_list_escapes);
890 if (cfun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
891 fputs ("all", dump_file);
892 else
893 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
894 fputs (" GPR units and ", dump_file);
895 if (cfun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
896 fputs ("all", dump_file);
897 else
898 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
899 fputs (" FPR units.\n", dump_file);
901 return 0;
905 struct gimple_opt_pass pass_stdarg =
908 GIMPLE_PASS,
909 "stdarg", /* name */
910 gate_optimize_stdarg, /* gate */
911 execute_optimize_stdarg, /* execute */
912 NULL, /* sub */
913 NULL, /* next */
914 0, /* static_pass_number */
915 TV_NONE, /* tv_id */
916 PROP_cfg | PROP_ssa, /* properties_required */
917 0, /* properties_provided */
918 0, /* properties_destroyed */
919 0, /* todo_flags_start */
920 TODO_dump_func /* todo_flags_finish */