Fix gnu11 fallout on SPARC
[official-gcc.git] / gcc / tree-stdarg.c
blob883b3b5c6f5dd25800dcd7190fc111676248b1be
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "hashtab.h"
27 #include "hash-set.h"
28 #include "vec.h"
29 #include "machmode.h"
30 #include "hard-reg-set.h"
31 #include "input.h"
32 #include "function.h"
33 #include "langhooks.h"
34 #include "gimple-pretty-print.h"
35 #include "target.h"
36 #include "bitmap.h"
37 #include "basic-block.h"
38 #include "tree-ssa-alias.h"
39 #include "internal-fn.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "gimple-iterator.h"
44 #include "gimple-walk.h"
45 #include "gimple-ssa.h"
46 #include "tree-phinodes.h"
47 #include "ssa-iterators.h"
48 #include "stringpool.h"
49 #include "tree-ssanames.h"
50 #include "sbitmap.h"
51 #include "tree-pass.h"
52 #include "tree-stdarg.h"
54 /* A simple pass that attempts to optimize stdarg functions on architectures
55 that need to save register arguments to stack on entry to stdarg functions.
56 If the function doesn't use any va_start macros, no registers need to
57 be saved. If va_start macros are used, the va_list variables don't escape
58 the function, it is only necessary to save registers that will be used
59 in va_arg macros. E.g. if va_arg is only used with integral types
60 in the function, floating point registers don't need to be saved, etc. */
63 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
64 is executed at most as many times as VA_START_BB. */
66 static bool
67 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
69 vec<edge> stack = vNULL;
70 edge e;
71 edge_iterator ei;
72 sbitmap visited;
73 bool ret;
75 if (va_arg_bb == va_start_bb)
76 return true;
78 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
79 return false;
81 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
82 bitmap_clear (visited);
83 ret = true;
85 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
86 stack.safe_push (e);
88 while (! stack.is_empty ())
90 basic_block src;
92 e = stack.pop ();
93 src = e->src;
95 if (e->flags & EDGE_COMPLEX)
97 ret = false;
98 break;
101 if (src == va_start_bb)
102 continue;
104 /* va_arg_bb can be executed more times than va_start_bb. */
105 if (src == va_arg_bb)
107 ret = false;
108 break;
111 gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
113 if (! bitmap_bit_p (visited, src->index))
115 bitmap_set_bit (visited, src->index);
116 FOR_EACH_EDGE (e, ei, src->preds)
117 stack.safe_push (e);
121 stack.release ();
122 sbitmap_free (visited);
123 return ret;
127 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
128 return constant, otherwise return HOST_WIDE_INT_M1U.
129 GPR_P is true if this is GPR counter. */
131 static unsigned HOST_WIDE_INT
132 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
133 bool gpr_p)
135 tree lhs, orig_lhs;
136 gimple stmt;
137 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
138 unsigned int max_size;
140 if (si->offsets == NULL)
142 unsigned int i;
144 si->offsets = XNEWVEC (int, num_ssa_names);
145 for (i = 0; i < num_ssa_names; ++i)
146 si->offsets[i] = -1;
149 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
150 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
151 orig_lhs = lhs = rhs;
152 while (lhs)
154 enum tree_code rhs_code;
155 tree rhs1;
157 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
159 if (counter_val >= max_size)
161 ret = max_size;
162 break;
165 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
166 break;
169 stmt = SSA_NAME_DEF_STMT (lhs);
171 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
172 return HOST_WIDE_INT_M1U;
174 rhs_code = gimple_assign_rhs_code (stmt);
175 rhs1 = gimple_assign_rhs1 (stmt);
176 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
177 || gimple_assign_cast_p (stmt))
178 && TREE_CODE (rhs1) == SSA_NAME)
180 lhs = rhs1;
181 continue;
184 if ((rhs_code == POINTER_PLUS_EXPR
185 || rhs_code == PLUS_EXPR)
186 && TREE_CODE (rhs1) == SSA_NAME
187 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
189 ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
190 lhs = rhs1;
191 continue;
194 if (rhs_code == ADDR_EXPR
195 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
196 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
197 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
199 ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
200 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
201 continue;
204 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
205 return HOST_WIDE_INT_M1U;
207 rhs = gimple_assign_rhs1 (stmt);
208 if (TREE_CODE (counter) != TREE_CODE (rhs))
209 return HOST_WIDE_INT_M1U;
211 if (TREE_CODE (counter) == COMPONENT_REF)
213 if (get_base_address (counter) != get_base_address (rhs)
214 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
215 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
216 return HOST_WIDE_INT_M1U;
218 else if (counter != rhs)
219 return HOST_WIDE_INT_M1U;
221 lhs = NULL;
224 lhs = orig_lhs;
225 val = ret + counter_val;
226 while (lhs)
228 enum tree_code rhs_code;
229 tree rhs1;
231 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
232 break;
234 if (val >= max_size)
235 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
236 else
237 si->offsets[SSA_NAME_VERSION (lhs)] = val;
239 stmt = SSA_NAME_DEF_STMT (lhs);
241 rhs_code = gimple_assign_rhs_code (stmt);
242 rhs1 = gimple_assign_rhs1 (stmt);
243 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
244 || gimple_assign_cast_p (stmt))
245 && TREE_CODE (rhs1) == SSA_NAME)
247 lhs = rhs1;
248 continue;
251 if ((rhs_code == POINTER_PLUS_EXPR
252 || rhs_code == PLUS_EXPR)
253 && TREE_CODE (rhs1) == SSA_NAME
254 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
256 val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
257 lhs = rhs1;
258 continue;
261 if (rhs_code == ADDR_EXPR
262 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
263 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
264 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
266 val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
267 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
268 continue;
271 lhs = NULL;
274 return ret;
278 /* Called by walk_tree to look for references to va_list variables. */
280 static tree
281 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
282 void *data)
284 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
285 tree var = *tp;
287 if (TREE_CODE (var) == SSA_NAME)
289 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
290 return var;
292 else if (TREE_CODE (var) == VAR_DECL)
294 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
295 return var;
298 return NULL_TREE;
302 /* Helper function of va_list_counter_struct_op. Compute
303 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
304 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
305 statement. GPR_P is true if AP is a GPR counter, false if it is
306 a FPR counter. */
308 static void
309 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
310 bool write_p)
312 unsigned HOST_WIDE_INT increment;
314 if (si->compute_sizes < 0)
316 si->compute_sizes = 0;
317 if (si->va_start_count == 1
318 && reachable_at_most_once (si->bb, si->va_start_bb))
319 si->compute_sizes = 1;
321 if (dump_file && (dump_flags & TDF_DETAILS))
322 fprintf (dump_file,
323 "bb%d will %sbe executed at most once for each va_start "
324 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
325 si->va_start_bb->index);
328 if (write_p
329 && si->compute_sizes
330 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
332 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
334 cfun->va_list_gpr_size += increment;
335 return;
338 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
340 cfun->va_list_fpr_size += increment;
341 return;
345 if (write_p || !si->compute_sizes)
347 if (gpr_p)
348 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
349 else
350 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
355 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
356 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
357 is false, AP has been seen in VAR = AP assignment.
358 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
359 va_arg operation that doesn't cause the va_list variable to escape
360 current function. */
362 static bool
363 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
364 bool write_p)
366 tree base;
368 if (TREE_CODE (ap) != COMPONENT_REF
369 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
370 return false;
372 if (TREE_CODE (var) != SSA_NAME
373 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
374 return false;
376 base = get_base_address (ap);
377 if (TREE_CODE (base) != VAR_DECL
378 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
379 return false;
381 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
382 va_list_counter_op (si, ap, var, true, write_p);
383 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
384 va_list_counter_op (si, ap, var, false, write_p);
386 return true;
390 /* Check for TEM = AP. Return true if found and the caller shouldn't
391 search for va_list references in the statement. */
393 static bool
394 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
396 if (TREE_CODE (ap) != VAR_DECL
397 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
398 return false;
400 if (TREE_CODE (tem) != SSA_NAME
401 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
402 return false;
404 if (si->compute_sizes < 0)
406 si->compute_sizes = 0;
407 if (si->va_start_count == 1
408 && reachable_at_most_once (si->bb, si->va_start_bb))
409 si->compute_sizes = 1;
411 if (dump_file && (dump_flags & TDF_DETAILS))
412 fprintf (dump_file,
413 "bb%d will %sbe executed at most once for each va_start "
414 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
415 si->va_start_bb->index);
418 /* For void * or char * va_list types, there is just one counter.
419 If va_arg is used in a loop, we don't know how many registers need
420 saving. */
421 if (! si->compute_sizes)
422 return false;
424 if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
425 return false;
427 /* Note the temporary, as we need to track whether it doesn't escape
428 the current function. */
429 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
431 return true;
435 /* Check for:
436 tem1 = AP;
437 TEM2 = tem1 + CST;
438 AP = TEM2;
439 sequence and update cfun->va_list_gpr_size. Return true if found. */
441 static bool
442 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
444 unsigned HOST_WIDE_INT increment;
446 if (TREE_CODE (ap) != VAR_DECL
447 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
448 return false;
450 if (TREE_CODE (tem2) != SSA_NAME
451 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
452 return false;
454 if (si->compute_sizes <= 0)
455 return false;
457 increment = va_list_counter_bump (si, ap, tem2, true);
458 if (increment + 1 <= 1)
459 return false;
461 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
462 cfun->va_list_gpr_size += increment;
463 else
464 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
466 return true;
470 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
471 containing value of some va_list variable plus optionally some constant,
472 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
473 depending whether LHS is a function local temporary. */
475 static void
476 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
478 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
479 return;
481 if (TREE_CODE (rhs) == SSA_NAME)
483 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
484 return;
486 else if (TREE_CODE (rhs) == ADDR_EXPR
487 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
488 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
490 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
491 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
492 return;
494 else
495 return;
497 if (TREE_CODE (lhs) != SSA_NAME)
499 si->va_list_escapes = true;
500 return;
503 if (si->compute_sizes < 0)
505 si->compute_sizes = 0;
506 if (si->va_start_count == 1
507 && reachable_at_most_once (si->bb, si->va_start_bb))
508 si->compute_sizes = 1;
510 if (dump_file && (dump_flags & TDF_DETAILS))
511 fprintf (dump_file,
512 "bb%d will %sbe executed at most once for each va_start "
513 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
514 si->va_start_bb->index);
517 /* For void * or char * va_list types, there is just one counter.
518 If va_arg is used in a loop, we don't know how many registers need
519 saving. */
520 if (! si->compute_sizes)
522 si->va_list_escapes = true;
523 return;
526 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
527 == HOST_WIDE_INT_M1U)
529 si->va_list_escapes = true;
530 return;
533 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
537 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
538 Return true if va_list might be escaping. */
540 static bool
541 check_all_va_list_escapes (struct stdarg_info *si)
543 basic_block bb;
545 FOR_EACH_BB_FN (bb, cfun)
547 gimple_stmt_iterator i;
549 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
551 tree lhs;
552 use_operand_p uop;
553 ssa_op_iter soi;
554 gimple phi = gsi_stmt (i);
556 lhs = PHI_RESULT (phi);
557 if (virtual_operand_p (lhs)
558 || bitmap_bit_p (si->va_list_escape_vars,
559 SSA_NAME_VERSION (lhs)))
560 continue;
562 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
564 tree rhs = USE_FROM_PTR (uop);
565 if (TREE_CODE (rhs) == SSA_NAME
566 && bitmap_bit_p (si->va_list_escape_vars,
567 SSA_NAME_VERSION (rhs)))
569 if (dump_file && (dump_flags & TDF_DETAILS))
571 fputs ("va_list escapes in ", dump_file);
572 print_gimple_stmt (dump_file, phi, 0, dump_flags);
573 fputc ('\n', dump_file);
575 return true;
580 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
582 gimple stmt = gsi_stmt (i);
583 tree use;
584 ssa_op_iter iter;
586 if (is_gimple_debug (stmt))
587 continue;
589 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
591 if (! bitmap_bit_p (si->va_list_escape_vars,
592 SSA_NAME_VERSION (use)))
593 continue;
595 if (is_gimple_assign (stmt))
597 tree rhs = gimple_assign_rhs1 (stmt);
598 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
600 /* x = *ap_temp; */
601 if (rhs_code == MEM_REF
602 && TREE_OPERAND (rhs, 0) == use
603 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
604 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
605 && si->offsets[SSA_NAME_VERSION (use)] != -1)
607 unsigned HOST_WIDE_INT gpr_size;
608 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
610 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
611 + tree_to_shwi (TREE_OPERAND (rhs, 1))
612 + tree_to_uhwi (access_size);
613 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
614 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
615 else if (gpr_size > cfun->va_list_gpr_size)
616 cfun->va_list_gpr_size = gpr_size;
617 continue;
620 /* va_arg sequences may contain
621 other_ap_temp = ap_temp;
622 other_ap_temp = ap_temp + constant;
623 other_ap_temp = (some_type *) ap_temp;
624 ap = ap_temp;
625 statements. */
626 if (rhs == use
627 && ((rhs_code == POINTER_PLUS_EXPR
628 && (TREE_CODE (gimple_assign_rhs2 (stmt))
629 == INTEGER_CST))
630 || gimple_assign_cast_p (stmt)
631 || (get_gimple_rhs_class (rhs_code)
632 == GIMPLE_SINGLE_RHS)))
634 tree lhs = gimple_assign_lhs (stmt);
636 if (TREE_CODE (lhs) == SSA_NAME
637 && bitmap_bit_p (si->va_list_escape_vars,
638 SSA_NAME_VERSION (lhs)))
639 continue;
641 if (TREE_CODE (lhs) == VAR_DECL
642 && bitmap_bit_p (si->va_list_vars,
643 DECL_UID (lhs) + num_ssa_names))
644 continue;
646 else if (rhs_code == ADDR_EXPR
647 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
648 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
650 tree lhs = gimple_assign_lhs (stmt);
652 if (bitmap_bit_p (si->va_list_escape_vars,
653 SSA_NAME_VERSION (lhs)))
654 continue;
658 if (dump_file && (dump_flags & TDF_DETAILS))
660 fputs ("va_list escapes in ", dump_file);
661 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
662 fputc ('\n', dump_file);
664 return true;
669 return false;
673 namespace {
675 const pass_data pass_data_stdarg =
677 GIMPLE_PASS, /* type */
678 "stdarg", /* name */
679 OPTGROUP_NONE, /* optinfo_flags */
680 TV_NONE, /* tv_id */
681 ( PROP_cfg | PROP_ssa ), /* properties_required */
682 0, /* properties_provided */
683 0, /* properties_destroyed */
684 0, /* todo_flags_start */
685 0, /* todo_flags_finish */
688 class pass_stdarg : public gimple_opt_pass
690 public:
691 pass_stdarg (gcc::context *ctxt)
692 : gimple_opt_pass (pass_data_stdarg, ctxt)
695 /* opt_pass methods: */
696 virtual bool gate (function *fun)
698 /* This optimization is only for stdarg functions. */
699 return fun->stdarg != 0;
702 virtual unsigned int execute (function *);
704 }; // class pass_stdarg
706 unsigned int
707 pass_stdarg::execute (function *fun)
709 basic_block bb;
710 bool va_list_escapes = false;
711 bool va_list_simple_ptr;
712 struct stdarg_info si;
713 struct walk_stmt_info wi;
714 const char *funcname = NULL;
715 tree cfun_va_list;
717 fun->va_list_gpr_size = 0;
718 fun->va_list_fpr_size = 0;
719 memset (&si, 0, sizeof (si));
720 si.va_list_vars = BITMAP_ALLOC (NULL);
721 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
723 if (dump_file)
724 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
726 cfun_va_list = targetm.fn_abi_va_list (fun->decl);
727 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
728 && (TREE_TYPE (cfun_va_list) == void_type_node
729 || TREE_TYPE (cfun_va_list) == char_type_node);
730 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
732 FOR_EACH_BB_FN (bb, fun)
734 gimple_stmt_iterator i;
736 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
738 gimple stmt = gsi_stmt (i);
739 tree callee, ap;
741 if (!is_gimple_call (stmt))
742 continue;
744 callee = gimple_call_fndecl (stmt);
745 if (!callee
746 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
747 continue;
749 switch (DECL_FUNCTION_CODE (callee))
751 case BUILT_IN_VA_START:
752 break;
753 /* If old style builtins are used, don't optimize anything. */
754 case BUILT_IN_SAVEREGS:
755 case BUILT_IN_NEXT_ARG:
756 va_list_escapes = true;
757 continue;
758 default:
759 continue;
762 si.va_start_count++;
763 ap = gimple_call_arg (stmt, 0);
765 if (TREE_CODE (ap) != ADDR_EXPR)
767 va_list_escapes = true;
768 break;
770 ap = TREE_OPERAND (ap, 0);
771 if (TREE_CODE (ap) == ARRAY_REF)
773 if (! integer_zerop (TREE_OPERAND (ap, 1)))
775 va_list_escapes = true;
776 break;
778 ap = TREE_OPERAND (ap, 0);
780 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
781 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl))
782 || TREE_CODE (ap) != VAR_DECL)
784 va_list_escapes = true;
785 break;
788 if (is_global_var (ap))
790 va_list_escapes = true;
791 break;
794 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
796 /* VA_START_BB and VA_START_AP will be only used if there is just
797 one va_start in the function. */
798 si.va_start_bb = bb;
799 si.va_start_ap = ap;
802 if (va_list_escapes)
803 break;
806 /* If there were no va_start uses in the function, there is no need to
807 save anything. */
808 if (si.va_start_count == 0)
809 goto finish;
811 /* If some va_list arguments weren't local, we can't optimize. */
812 if (va_list_escapes)
813 goto finish;
815 /* For void * or char * va_list, something useful can be done only
816 if there is just one va_start. */
817 if (va_list_simple_ptr && si.va_start_count > 1)
819 va_list_escapes = true;
820 goto finish;
823 /* For struct * va_list, if the backend didn't tell us what the counter fields
824 are, there is nothing more we can do. */
825 if (!va_list_simple_ptr
826 && va_list_gpr_counter_field == NULL_TREE
827 && va_list_fpr_counter_field == NULL_TREE)
829 va_list_escapes = true;
830 goto finish;
833 /* For void * or char * va_list there is just one counter
834 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
835 if (va_list_simple_ptr)
836 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
838 calculate_dominance_info (CDI_DOMINATORS);
839 memset (&wi, 0, sizeof (wi));
840 wi.info = si.va_list_vars;
842 FOR_EACH_BB_FN (bb, fun)
844 gimple_stmt_iterator i;
846 si.compute_sizes = -1;
847 si.bb = bb;
849 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
850 them as assignments for the purpose of escape analysis. This is
851 not needed for non-simple va_list because virtual phis don't perform
852 any real data movement. */
853 if (va_list_simple_ptr)
855 tree lhs, rhs;
856 use_operand_p uop;
857 ssa_op_iter soi;
859 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
861 gimple phi = gsi_stmt (i);
862 lhs = PHI_RESULT (phi);
864 if (virtual_operand_p (lhs))
865 continue;
867 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
869 rhs = USE_FROM_PTR (uop);
870 if (va_list_ptr_read (&si, rhs, lhs))
871 continue;
872 else if (va_list_ptr_write (&si, lhs, rhs))
873 continue;
874 else
875 check_va_list_escapes (&si, lhs, rhs);
877 if (si.va_list_escapes)
879 if (dump_file && (dump_flags & TDF_DETAILS))
881 fputs ("va_list escapes in ", dump_file);
882 print_gimple_stmt (dump_file, phi, 0, dump_flags);
883 fputc ('\n', dump_file);
885 va_list_escapes = true;
891 for (i = gsi_start_bb (bb);
892 !gsi_end_p (i) && !va_list_escapes;
893 gsi_next (&i))
895 gimple stmt = gsi_stmt (i);
897 /* Don't look at __builtin_va_{start,end}, they are ok. */
898 if (is_gimple_call (stmt))
900 tree callee = gimple_call_fndecl (stmt);
902 if (callee
903 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
904 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
905 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
906 continue;
909 if (is_gimple_assign (stmt))
911 tree lhs = gimple_assign_lhs (stmt);
912 tree rhs = gimple_assign_rhs1 (stmt);
914 if (va_list_simple_ptr)
916 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
917 == GIMPLE_SINGLE_RHS)
919 /* Check for ap ={v} {}. */
920 if (TREE_CLOBBER_P (rhs))
921 continue;
923 /* Check for tem = ap. */
924 else if (va_list_ptr_read (&si, rhs, lhs))
925 continue;
927 /* Check for the last insn in:
928 tem1 = ap;
929 tem2 = tem1 + CST;
930 ap = tem2;
931 sequence. */
932 else if (va_list_ptr_write (&si, lhs, rhs))
933 continue;
936 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
937 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
938 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
939 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
940 == GIMPLE_SINGLE_RHS))
941 check_va_list_escapes (&si, lhs, rhs);
943 else
945 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
946 == GIMPLE_SINGLE_RHS)
948 /* Check for ap ={v} {}. */
949 if (TREE_CLOBBER_P (rhs))
950 continue;
952 /* Check for ap[0].field = temp. */
953 else if (va_list_counter_struct_op (&si, lhs, rhs, true))
954 continue;
956 /* Check for temp = ap[0].field. */
957 else if (va_list_counter_struct_op (&si, rhs, lhs,
958 false))
959 continue;
962 /* Do any architecture specific checking. */
963 if (targetm.stdarg_optimize_hook
964 && targetm.stdarg_optimize_hook (&si, stmt))
965 continue;
968 else if (is_gimple_debug (stmt))
969 continue;
971 /* All other uses of va_list are either va_copy (that is not handled
972 in this optimization), taking address of va_list variable or
973 passing va_list to other functions (in that case va_list might
974 escape the function and therefore va_start needs to set it up
975 fully), or some unexpected use of va_list. None of these should
976 happen in a gimplified VA_ARG_EXPR. */
977 if (si.va_list_escapes
978 || walk_gimple_op (stmt, find_va_list_reference, &wi))
980 if (dump_file && (dump_flags & TDF_DETAILS))
982 fputs ("va_list escapes in ", dump_file);
983 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
984 fputc ('\n', dump_file);
986 va_list_escapes = true;
990 if (va_list_escapes)
991 break;
994 if (! va_list_escapes
995 && va_list_simple_ptr
996 && ! bitmap_empty_p (si.va_list_escape_vars)
997 && check_all_va_list_escapes (&si))
998 va_list_escapes = true;
1000 finish:
1001 if (va_list_escapes)
1003 fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
1004 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
1006 BITMAP_FREE (si.va_list_vars);
1007 BITMAP_FREE (si.va_list_escape_vars);
1008 free (si.offsets);
1009 if (dump_file)
1011 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
1012 funcname, (int) va_list_escapes);
1013 if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
1014 fputs ("all", dump_file);
1015 else
1016 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
1017 fputs (" GPR units and ", dump_file);
1018 if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
1019 fputs ("all", dump_file);
1020 else
1021 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
1022 fputs (" FPR units.\n", dump_file);
1024 return 0;
1027 } // anon namespace
1029 gimple_opt_pass *
1030 make_pass_stdarg (gcc::context *ctxt)
1032 return new pass_stdarg (ctxt);