gcc/ada/
[official-gcc.git] / gcc / tree-stdarg.c
blobf3f41349f8421c683d9db53354d6b720633a98d7
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "hard-reg-set.h"
30 #include "function.h"
31 #include "langhooks.h"
32 #include "gimple-pretty-print.h"
33 #include "target.h"
34 #include "bitmap.h"
35 #include "predict.h"
36 #include "dominance.h"
37 #include "cfg.h"
38 #include "basic-block.h"
39 #include "tree-ssa-alias.h"
40 #include "internal-fn.h"
41 #include "gimple-expr.h"
42 #include "gimple.h"
43 #include "gimple-iterator.h"
44 #include "gimple-walk.h"
45 #include "gimple-ssa.h"
46 #include "gimplify.h"
47 #include "tree-phinodes.h"
48 #include "ssa-iterators.h"
49 #include "stringpool.h"
50 #include "tree-ssanames.h"
51 #include "tree-into-ssa.h"
52 #include "sbitmap.h"
53 #include "tree-cfg.h"
54 #include "tree-pass.h"
55 #include "tree-stdarg.h"
57 /* A simple pass that attempts to optimize stdarg functions on architectures
58 that need to save register arguments to stack on entry to stdarg functions.
59 If the function doesn't use any va_start macros, no registers need to
60 be saved. If va_start macros are used, the va_list variables don't escape
61 the function, it is only necessary to save registers that will be used
62 in va_arg macros. E.g. if va_arg is only used with integral types
63 in the function, floating point registers don't need to be saved, etc. */
66 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
67 is executed at most as many times as VA_START_BB. */
69 static bool
70 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
72 vec<edge> stack = vNULL;
73 edge e;
74 edge_iterator ei;
75 sbitmap visited;
76 bool ret;
78 if (va_arg_bb == va_start_bb)
79 return true;
81 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
82 return false;
84 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
85 bitmap_clear (visited);
86 ret = true;
88 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
89 stack.safe_push (e);
91 while (! stack.is_empty ())
93 basic_block src;
95 e = stack.pop ();
96 src = e->src;
98 if (e->flags & EDGE_COMPLEX)
100 ret = false;
101 break;
104 if (src == va_start_bb)
105 continue;
107 /* va_arg_bb can be executed more times than va_start_bb. */
108 if (src == va_arg_bb)
110 ret = false;
111 break;
114 gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
116 if (! bitmap_bit_p (visited, src->index))
118 bitmap_set_bit (visited, src->index);
119 FOR_EACH_EDGE (e, ei, src->preds)
120 stack.safe_push (e);
124 stack.release ();
125 sbitmap_free (visited);
126 return ret;
130 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
131 return constant, otherwise return HOST_WIDE_INT_M1U.
132 GPR_P is true if this is GPR counter. */
134 static unsigned HOST_WIDE_INT
135 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
136 bool gpr_p)
138 tree lhs, orig_lhs;
139 gimple stmt;
140 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
141 unsigned int max_size;
143 if (si->offsets == NULL)
145 unsigned int i;
147 si->offsets = XNEWVEC (int, num_ssa_names);
148 for (i = 0; i < num_ssa_names; ++i)
149 si->offsets[i] = -1;
152 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
153 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
154 orig_lhs = lhs = rhs;
155 while (lhs)
157 enum tree_code rhs_code;
158 tree rhs1;
160 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
162 if (counter_val >= max_size)
164 ret = max_size;
165 break;
168 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
169 break;
172 stmt = SSA_NAME_DEF_STMT (lhs);
174 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
175 return HOST_WIDE_INT_M1U;
177 rhs_code = gimple_assign_rhs_code (stmt);
178 rhs1 = gimple_assign_rhs1 (stmt);
179 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
180 || gimple_assign_cast_p (stmt))
181 && TREE_CODE (rhs1) == SSA_NAME)
183 lhs = rhs1;
184 continue;
187 if ((rhs_code == POINTER_PLUS_EXPR
188 || rhs_code == PLUS_EXPR)
189 && TREE_CODE (rhs1) == SSA_NAME
190 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
192 ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
193 lhs = rhs1;
194 continue;
197 if (rhs_code == ADDR_EXPR
198 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
199 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
200 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
202 ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
203 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
204 continue;
207 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
208 return HOST_WIDE_INT_M1U;
210 rhs = gimple_assign_rhs1 (stmt);
211 if (TREE_CODE (counter) != TREE_CODE (rhs))
212 return HOST_WIDE_INT_M1U;
214 if (TREE_CODE (counter) == COMPONENT_REF)
216 if (get_base_address (counter) != get_base_address (rhs)
217 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
218 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
219 return HOST_WIDE_INT_M1U;
221 else if (counter != rhs)
222 return HOST_WIDE_INT_M1U;
224 lhs = NULL;
227 lhs = orig_lhs;
228 val = ret + counter_val;
229 while (lhs)
231 enum tree_code rhs_code;
232 tree rhs1;
234 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
235 break;
237 if (val >= max_size)
238 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
239 else
240 si->offsets[SSA_NAME_VERSION (lhs)] = val;
242 stmt = SSA_NAME_DEF_STMT (lhs);
244 rhs_code = gimple_assign_rhs_code (stmt);
245 rhs1 = gimple_assign_rhs1 (stmt);
246 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
247 || gimple_assign_cast_p (stmt))
248 && TREE_CODE (rhs1) == SSA_NAME)
250 lhs = rhs1;
251 continue;
254 if ((rhs_code == POINTER_PLUS_EXPR
255 || rhs_code == PLUS_EXPR)
256 && TREE_CODE (rhs1) == SSA_NAME
257 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
259 val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
260 lhs = rhs1;
261 continue;
264 if (rhs_code == ADDR_EXPR
265 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
266 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
267 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
269 val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
270 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
271 continue;
274 lhs = NULL;
277 return ret;
281 /* Called by walk_tree to look for references to va_list variables. */
283 static tree
284 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
285 void *data)
287 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
288 tree var = *tp;
290 if (TREE_CODE (var) == SSA_NAME)
292 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
293 return var;
295 else if (TREE_CODE (var) == VAR_DECL)
297 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
298 return var;
301 return NULL_TREE;
305 /* Helper function of va_list_counter_struct_op. Compute
306 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
307 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
308 statement. GPR_P is true if AP is a GPR counter, false if it is
309 a FPR counter. */
311 static void
312 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
313 bool write_p)
315 unsigned HOST_WIDE_INT increment;
317 if (si->compute_sizes < 0)
319 si->compute_sizes = 0;
320 if (si->va_start_count == 1
321 && reachable_at_most_once (si->bb, si->va_start_bb))
322 si->compute_sizes = 1;
324 if (dump_file && (dump_flags & TDF_DETAILS))
325 fprintf (dump_file,
326 "bb%d will %sbe executed at most once for each va_start "
327 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
328 si->va_start_bb->index);
331 if (write_p
332 && si->compute_sizes
333 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
335 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
337 cfun->va_list_gpr_size += increment;
338 return;
341 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
343 cfun->va_list_fpr_size += increment;
344 return;
348 if (write_p || !si->compute_sizes)
350 if (gpr_p)
351 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
352 else
353 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
358 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
359 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
360 is false, AP has been seen in VAR = AP assignment.
361 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
362 va_arg operation that doesn't cause the va_list variable to escape
363 current function. */
365 static bool
366 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
367 bool write_p)
369 tree base;
371 if (TREE_CODE (ap) != COMPONENT_REF
372 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
373 return false;
375 if (TREE_CODE (var) != SSA_NAME
376 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
377 return false;
379 base = get_base_address (ap);
380 if (TREE_CODE (base) != VAR_DECL
381 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
382 return false;
384 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
385 va_list_counter_op (si, ap, var, true, write_p);
386 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
387 va_list_counter_op (si, ap, var, false, write_p);
389 return true;
393 /* Check for TEM = AP. Return true if found and the caller shouldn't
394 search for va_list references in the statement. */
396 static bool
397 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
399 if (TREE_CODE (ap) != VAR_DECL
400 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
401 return false;
403 if (TREE_CODE (tem) != SSA_NAME
404 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
405 return false;
407 if (si->compute_sizes < 0)
409 si->compute_sizes = 0;
410 if (si->va_start_count == 1
411 && reachable_at_most_once (si->bb, si->va_start_bb))
412 si->compute_sizes = 1;
414 if (dump_file && (dump_flags & TDF_DETAILS))
415 fprintf (dump_file,
416 "bb%d will %sbe executed at most once for each va_start "
417 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
418 si->va_start_bb->index);
421 /* For void * or char * va_list types, there is just one counter.
422 If va_arg is used in a loop, we don't know how many registers need
423 saving. */
424 if (! si->compute_sizes)
425 return false;
427 if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
428 return false;
430 /* Note the temporary, as we need to track whether it doesn't escape
431 the current function. */
432 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
434 return true;
438 /* Check for:
439 tem1 = AP;
440 TEM2 = tem1 + CST;
441 AP = TEM2;
442 sequence and update cfun->va_list_gpr_size. Return true if found. */
444 static bool
445 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
447 unsigned HOST_WIDE_INT increment;
449 if (TREE_CODE (ap) != VAR_DECL
450 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
451 return false;
453 if (TREE_CODE (tem2) != SSA_NAME
454 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
455 return false;
457 if (si->compute_sizes <= 0)
458 return false;
460 increment = va_list_counter_bump (si, ap, tem2, true);
461 if (increment + 1 <= 1)
462 return false;
464 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
465 cfun->va_list_gpr_size += increment;
466 else
467 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
469 return true;
473 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
474 containing value of some va_list variable plus optionally some constant,
475 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
476 depending whether LHS is a function local temporary. */
478 static void
479 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
481 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
482 return;
484 if (TREE_CODE (rhs) == SSA_NAME)
486 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
487 return;
489 else if (TREE_CODE (rhs) == ADDR_EXPR
490 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
491 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
493 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
494 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
495 return;
497 else
498 return;
500 if (TREE_CODE (lhs) != SSA_NAME)
502 si->va_list_escapes = true;
503 return;
506 if (si->compute_sizes < 0)
508 si->compute_sizes = 0;
509 if (si->va_start_count == 1
510 && reachable_at_most_once (si->bb, si->va_start_bb))
511 si->compute_sizes = 1;
513 if (dump_file && (dump_flags & TDF_DETAILS))
514 fprintf (dump_file,
515 "bb%d will %sbe executed at most once for each va_start "
516 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
517 si->va_start_bb->index);
520 /* For void * or char * va_list types, there is just one counter.
521 If va_arg is used in a loop, we don't know how many registers need
522 saving. */
523 if (! si->compute_sizes)
525 si->va_list_escapes = true;
526 return;
529 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
530 == HOST_WIDE_INT_M1U)
532 si->va_list_escapes = true;
533 return;
536 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
540 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
541 Return true if va_list might be escaping. */
543 static bool
544 check_all_va_list_escapes (struct stdarg_info *si)
546 basic_block bb;
548 FOR_EACH_BB_FN (bb, cfun)
550 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
551 gsi_next (&i))
553 tree lhs;
554 use_operand_p uop;
555 ssa_op_iter soi;
556 gphi *phi = i.phi ();
558 lhs = PHI_RESULT (phi);
559 if (virtual_operand_p (lhs)
560 || bitmap_bit_p (si->va_list_escape_vars,
561 SSA_NAME_VERSION (lhs)))
562 continue;
564 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
566 tree rhs = USE_FROM_PTR (uop);
567 if (TREE_CODE (rhs) == SSA_NAME
568 && bitmap_bit_p (si->va_list_escape_vars,
569 SSA_NAME_VERSION (rhs)))
571 if (dump_file && (dump_flags & TDF_DETAILS))
573 fputs ("va_list escapes in ", dump_file);
574 print_gimple_stmt (dump_file, phi, 0, dump_flags);
575 fputc ('\n', dump_file);
577 return true;
582 for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i);
583 gsi_next (&i))
585 gimple stmt = gsi_stmt (i);
586 tree use;
587 ssa_op_iter iter;
589 if (is_gimple_debug (stmt))
590 continue;
592 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
594 if (! bitmap_bit_p (si->va_list_escape_vars,
595 SSA_NAME_VERSION (use)))
596 continue;
598 if (is_gimple_assign (stmt))
600 tree rhs = gimple_assign_rhs1 (stmt);
601 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
603 /* x = *ap_temp; */
604 if (rhs_code == MEM_REF
605 && TREE_OPERAND (rhs, 0) == use
606 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
607 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
608 && si->offsets[SSA_NAME_VERSION (use)] != -1)
610 unsigned HOST_WIDE_INT gpr_size;
611 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
613 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
614 + tree_to_shwi (TREE_OPERAND (rhs, 1))
615 + tree_to_uhwi (access_size);
616 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
617 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
618 else if (gpr_size > cfun->va_list_gpr_size)
619 cfun->va_list_gpr_size = gpr_size;
620 continue;
623 /* va_arg sequences may contain
624 other_ap_temp = ap_temp;
625 other_ap_temp = ap_temp + constant;
626 other_ap_temp = (some_type *) ap_temp;
627 ap = ap_temp;
628 statements. */
629 if (rhs == use
630 && ((rhs_code == POINTER_PLUS_EXPR
631 && (TREE_CODE (gimple_assign_rhs2 (stmt))
632 == INTEGER_CST))
633 || gimple_assign_cast_p (stmt)
634 || (get_gimple_rhs_class (rhs_code)
635 == GIMPLE_SINGLE_RHS)))
637 tree lhs = gimple_assign_lhs (stmt);
639 if (TREE_CODE (lhs) == SSA_NAME
640 && bitmap_bit_p (si->va_list_escape_vars,
641 SSA_NAME_VERSION (lhs)))
642 continue;
644 if (TREE_CODE (lhs) == VAR_DECL
645 && bitmap_bit_p (si->va_list_vars,
646 DECL_UID (lhs) + num_ssa_names))
647 continue;
649 else if (rhs_code == ADDR_EXPR
650 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
651 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
653 tree lhs = gimple_assign_lhs (stmt);
655 if (bitmap_bit_p (si->va_list_escape_vars,
656 SSA_NAME_VERSION (lhs)))
657 continue;
661 if (dump_file && (dump_flags & TDF_DETAILS))
663 fputs ("va_list escapes in ", dump_file);
664 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
665 fputc ('\n', dump_file);
667 return true;
672 return false;
675 /* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size. */
677 static void
678 optimize_va_list_gpr_fpr_size (function *fun)
680 basic_block bb;
681 bool va_list_escapes = false;
682 bool va_list_simple_ptr;
683 struct stdarg_info si;
684 struct walk_stmt_info wi;
685 const char *funcname = NULL;
686 tree cfun_va_list;
688 fun->va_list_gpr_size = 0;
689 fun->va_list_fpr_size = 0;
690 memset (&si, 0, sizeof (si));
691 si.va_list_vars = BITMAP_ALLOC (NULL);
692 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
694 if (dump_file)
695 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
697 cfun_va_list = targetm.fn_abi_va_list (fun->decl);
698 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
699 && (TREE_TYPE (cfun_va_list) == void_type_node
700 || TREE_TYPE (cfun_va_list) == char_type_node);
701 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
703 FOR_EACH_BB_FN (bb, fun)
705 gimple_stmt_iterator i;
707 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
709 gimple stmt = gsi_stmt (i);
710 tree callee, ap;
712 if (!is_gimple_call (stmt))
713 continue;
715 callee = gimple_call_fndecl (stmt);
716 if (!callee
717 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
718 continue;
720 switch (DECL_FUNCTION_CODE (callee))
722 case BUILT_IN_VA_START:
723 break;
724 /* If old style builtins are used, don't optimize anything. */
725 case BUILT_IN_SAVEREGS:
726 case BUILT_IN_NEXT_ARG:
727 va_list_escapes = true;
728 continue;
729 default:
730 continue;
733 si.va_start_count++;
734 ap = gimple_call_arg (stmt, 0);
736 if (TREE_CODE (ap) != ADDR_EXPR)
738 va_list_escapes = true;
739 break;
741 ap = TREE_OPERAND (ap, 0);
742 if (TREE_CODE (ap) == ARRAY_REF)
744 if (! integer_zerop (TREE_OPERAND (ap, 1)))
746 va_list_escapes = true;
747 break;
749 ap = TREE_OPERAND (ap, 0);
751 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
752 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl))
753 || TREE_CODE (ap) != VAR_DECL)
755 va_list_escapes = true;
756 break;
759 if (is_global_var (ap))
761 va_list_escapes = true;
762 break;
765 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
767 /* VA_START_BB and VA_START_AP will be only used if there is just
768 one va_start in the function. */
769 si.va_start_bb = bb;
770 si.va_start_ap = ap;
773 if (va_list_escapes)
774 break;
777 /* If there were no va_start uses in the function, there is no need to
778 save anything. */
779 if (si.va_start_count == 0)
780 goto finish;
782 /* If some va_list arguments weren't local, we can't optimize. */
783 if (va_list_escapes)
784 goto finish;
786 /* For void * or char * va_list, something useful can be done only
787 if there is just one va_start. */
788 if (va_list_simple_ptr && si.va_start_count > 1)
790 va_list_escapes = true;
791 goto finish;
794 /* For struct * va_list, if the backend didn't tell us what the counter fields
795 are, there is nothing more we can do. */
796 if (!va_list_simple_ptr
797 && va_list_gpr_counter_field == NULL_TREE
798 && va_list_fpr_counter_field == NULL_TREE)
800 va_list_escapes = true;
801 goto finish;
804 /* For void * or char * va_list there is just one counter
805 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
806 if (va_list_simple_ptr)
807 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
809 calculate_dominance_info (CDI_DOMINATORS);
810 memset (&wi, 0, sizeof (wi));
811 wi.info = si.va_list_vars;
813 FOR_EACH_BB_FN (bb, fun)
815 si.compute_sizes = -1;
816 si.bb = bb;
818 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
819 them as assignments for the purpose of escape analysis. This is
820 not needed for non-simple va_list because virtual phis don't perform
821 any real data movement. Also, check PHI nodes for taking address of
822 the va_list vars. */
823 tree lhs, rhs;
824 use_operand_p uop;
825 ssa_op_iter soi;
827 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
828 gsi_next (&i))
830 gphi *phi = i.phi ();
831 lhs = PHI_RESULT (phi);
833 if (virtual_operand_p (lhs))
834 continue;
836 if (va_list_simple_ptr)
838 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
840 rhs = USE_FROM_PTR (uop);
841 if (va_list_ptr_read (&si, rhs, lhs))
842 continue;
843 else if (va_list_ptr_write (&si, lhs, rhs))
844 continue;
845 else
846 check_va_list_escapes (&si, lhs, rhs);
848 if (si.va_list_escapes)
850 if (dump_file && (dump_flags & TDF_DETAILS))
852 fputs ("va_list escapes in ", dump_file);
853 print_gimple_stmt (dump_file, phi, 0, dump_flags);
854 fputc ('\n', dump_file);
856 va_list_escapes = true;
861 for (unsigned j = 0; !va_list_escapes
862 && j < gimple_phi_num_args (phi); ++j)
863 if ((!va_list_simple_ptr
864 || TREE_CODE (gimple_phi_arg_def (phi, j)) != SSA_NAME)
865 && walk_tree (gimple_phi_arg_def_ptr (phi, j),
866 find_va_list_reference, &wi, NULL))
868 if (dump_file && (dump_flags & TDF_DETAILS))
870 fputs ("va_list escapes in ", dump_file);
871 print_gimple_stmt (dump_file, phi, 0, dump_flags);
872 fputc ('\n', dump_file);
874 va_list_escapes = true;
878 for (gimple_stmt_iterator i = gsi_start_bb (bb);
879 !gsi_end_p (i) && !va_list_escapes;
880 gsi_next (&i))
882 gimple stmt = gsi_stmt (i);
884 /* Don't look at __builtin_va_{start,end}, they are ok. */
885 if (is_gimple_call (stmt))
887 tree callee = gimple_call_fndecl (stmt);
889 if (callee
890 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
891 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
892 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
893 continue;
896 if (is_gimple_assign (stmt))
898 lhs = gimple_assign_lhs (stmt);
899 rhs = gimple_assign_rhs1 (stmt);
901 if (va_list_simple_ptr)
903 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
904 == GIMPLE_SINGLE_RHS)
906 /* Check for ap ={v} {}. */
907 if (TREE_CLOBBER_P (rhs))
908 continue;
910 /* Check for tem = ap. */
911 else if (va_list_ptr_read (&si, rhs, lhs))
912 continue;
914 /* Check for the last insn in:
915 tem1 = ap;
916 tem2 = tem1 + CST;
917 ap = tem2;
918 sequence. */
919 else if (va_list_ptr_write (&si, lhs, rhs))
920 continue;
923 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
924 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
925 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
926 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
927 == GIMPLE_SINGLE_RHS))
928 check_va_list_escapes (&si, lhs, rhs);
930 else
932 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
933 == GIMPLE_SINGLE_RHS)
935 /* Check for ap ={v} {}. */
936 if (TREE_CLOBBER_P (rhs))
937 continue;
939 /* Check for ap[0].field = temp. */
940 else if (va_list_counter_struct_op (&si, lhs, rhs, true))
941 continue;
943 /* Check for temp = ap[0].field. */
944 else if (va_list_counter_struct_op (&si, rhs, lhs,
945 false))
946 continue;
949 /* Do any architecture specific checking. */
950 if (targetm.stdarg_optimize_hook
951 && targetm.stdarg_optimize_hook (&si, stmt))
952 continue;
955 else if (is_gimple_debug (stmt))
956 continue;
958 /* All other uses of va_list are either va_copy (that is not handled
959 in this optimization), taking address of va_list variable or
960 passing va_list to other functions (in that case va_list might
961 escape the function and therefore va_start needs to set it up
962 fully), or some unexpected use of va_list. None of these should
963 happen in a gimplified VA_ARG_EXPR. */
964 if (si.va_list_escapes
965 || walk_gimple_op (stmt, find_va_list_reference, &wi))
967 if (dump_file && (dump_flags & TDF_DETAILS))
969 fputs ("va_list escapes in ", dump_file);
970 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
971 fputc ('\n', dump_file);
973 va_list_escapes = true;
977 if (va_list_escapes)
978 break;
981 if (! va_list_escapes
982 && va_list_simple_ptr
983 && ! bitmap_empty_p (si.va_list_escape_vars)
984 && check_all_va_list_escapes (&si))
985 va_list_escapes = true;
987 finish:
988 if (va_list_escapes)
990 fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
991 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
993 BITMAP_FREE (si.va_list_vars);
994 BITMAP_FREE (si.va_list_escape_vars);
995 free (si.offsets);
996 if (dump_file)
998 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
999 funcname, (int) va_list_escapes);
1000 if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
1001 fputs ("all", dump_file);
1002 else
1003 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
1004 fputs (" GPR units and ", dump_file);
1005 if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
1006 fputs ("all", dump_file);
1007 else
1008 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
1009 fputs (" FPR units.\n", dump_file);
1013 /* Return true if STMT is IFN_VA_ARG. */
1015 static bool
1016 gimple_call_ifn_va_arg_p (gimple stmt)
1018 return (is_gimple_call (stmt)
1019 && gimple_call_internal_p (stmt)
1020 && gimple_call_internal_fn (stmt) == IFN_VA_ARG);
1023 /* Expand IFN_VA_ARGs in FUN. */
1025 static void
1026 expand_ifn_va_arg_1 (function *fun)
1028 bool modified = false;
1029 basic_block bb;
1030 gimple_stmt_iterator i;
1031 location_t saved_location;
1033 FOR_EACH_BB_FN (bb, fun)
1034 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1036 gimple stmt = gsi_stmt (i);
1037 tree ap, expr, lhs, type;
1038 gimple_seq pre = NULL, post = NULL;
1040 if (!gimple_call_ifn_va_arg_p (stmt))
1041 continue;
1043 modified = true;
1045 type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 1)));
1046 ap = gimple_call_arg (stmt, 0);
1048 /* Balanced out the &ap, usually added by build_va_arg. */
1049 ap = build_fold_indirect_ref (ap);
1051 push_gimplify_context (false);
1052 saved_location = input_location;
1053 input_location = gimple_location (stmt);
1055 /* Make it easier for the backends by protecting the valist argument
1056 from multiple evaluations. */
1057 gimplify_expr (&ap, &pre, &post, is_gimple_min_lval, fb_lvalue);
1059 expr = targetm.gimplify_va_arg_expr (ap, type, &pre, &post);
1061 lhs = gimple_call_lhs (stmt);
1062 if (lhs != NULL_TREE)
1064 unsigned int nargs = gimple_call_num_args (stmt);
1065 gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs), type));
1067 if (nargs == 3)
1069 /* We've transported the size of with WITH_SIZE_EXPR here as
1070 the last argument of the internal fn call. Now reinstate
1071 it. */
1072 tree size = gimple_call_arg (stmt, nargs - 1);
1073 expr = build2 (WITH_SIZE_EXPR, TREE_TYPE (expr), expr, size);
1076 /* We use gimplify_assign here, rather than gimple_build_assign,
1077 because gimple_assign knows how to deal with variable-sized
1078 types. */
1079 gimplify_assign (lhs, expr, &pre);
1081 else
1082 gimplify_expr (&expr, &pre, &post, is_gimple_lvalue, fb_lvalue);
1084 input_location = saved_location;
1085 pop_gimplify_context (NULL);
1087 gimple_seq_add_seq (&pre, post);
1088 update_modified_stmts (pre);
1090 /* Add the sequence after IFN_VA_ARG. This splits the bb right
1091 after IFN_VA_ARG, and adds the sequence in one or more new bbs
1092 inbetween. */
1093 gimple_find_sub_bbs (pre, &i);
1095 /* Remove the IFN_VA_ARG gimple_call. It's the last stmt in the
1096 bb. */
1097 gsi_remove (&i, true);
1098 gcc_assert (gsi_end_p (i));
1100 /* We're walking here into the bbs which contain the expansion of
1101 IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs
1102 expanding. We could try to skip walking these bbs, perhaps by
1103 walking backwards over gimples and bbs. */
1104 break;
1107 if (!modified)
1108 return;
1110 free_dominance_info (CDI_DOMINATORS);
1111 update_ssa (TODO_update_ssa);
1114 /* Expand IFN_VA_ARGs in FUN, if necessary. */
1116 static void
1117 expand_ifn_va_arg (function *fun)
1119 if ((fun->curr_properties & PROP_gimple_lva) == 0)
1120 expand_ifn_va_arg_1 (fun);
1122 #if ENABLE_CHECKING
1123 basic_block bb;
1124 gimple_stmt_iterator i;
1125 FOR_EACH_BB_FN (bb, fun)
1126 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1127 gcc_assert (!gimple_call_ifn_va_arg_p (gsi_stmt (i)));
1128 #endif
1131 namespace {
1133 const pass_data pass_data_stdarg =
1135 GIMPLE_PASS, /* type */
1136 "stdarg", /* name */
1137 OPTGROUP_NONE, /* optinfo_flags */
1138 TV_NONE, /* tv_id */
1139 ( PROP_cfg | PROP_ssa ), /* properties_required */
1140 PROP_gimple_lva, /* properties_provided */
1141 0, /* properties_destroyed */
1142 0, /* todo_flags_start */
1143 0, /* todo_flags_finish */
1146 class pass_stdarg : public gimple_opt_pass
1148 public:
1149 pass_stdarg (gcc::context *ctxt)
1150 : gimple_opt_pass (pass_data_stdarg, ctxt)
1153 /* opt_pass methods: */
1154 virtual bool gate (function *)
1156 /* Always run this pass, in order to expand va_arg internal_fns. We
1157 also need to do that if fun->stdarg == 0, because a va_arg may also
1158 occur in a function without varargs, f.i. if when passing a va_list to
1159 another function. */
1160 return true;
1163 virtual unsigned int execute (function *);
1165 }; // class pass_stdarg
1167 unsigned int
1168 pass_stdarg::execute (function *fun)
1170 /* TODO: Postpone expand_ifn_va_arg till after
1171 optimize_va_list_gpr_fpr_size. */
1172 expand_ifn_va_arg (fun);
1174 if (flag_stdarg_opt
1175 /* This optimization is only for stdarg functions. */
1176 && fun->stdarg != 0)
1177 optimize_va_list_gpr_fpr_size (fun);
1179 return 0;
1182 } // anon namespace
1184 gimple_opt_pass *
1185 make_pass_stdarg (gcc::context *ctxt)
1187 return new pass_stdarg (ctxt);
1190 namespace {
1192 const pass_data pass_data_lower_vaarg =
1194 GIMPLE_PASS, /* type */
1195 "lower_vaarg", /* name */
1196 OPTGROUP_NONE, /* optinfo_flags */
1197 TV_NONE, /* tv_id */
1198 ( PROP_cfg | PROP_ssa ), /* properties_required */
1199 PROP_gimple_lva, /* properties_provided */
1200 0, /* properties_destroyed */
1201 0, /* todo_flags_start */
1202 0, /* todo_flags_finish */
1205 class pass_lower_vaarg : public gimple_opt_pass
1207 public:
1208 pass_lower_vaarg (gcc::context *ctxt)
1209 : gimple_opt_pass (pass_data_lower_vaarg, ctxt)
1212 /* opt_pass methods: */
1213 virtual bool gate (function *)
1215 return (cfun->curr_properties & PROP_gimple_lva) == 0;
1218 virtual unsigned int execute (function *);
1220 }; // class pass_lower_vaarg
1222 unsigned int
1223 pass_lower_vaarg::execute (function *fun)
1225 expand_ifn_va_arg (fun);
1226 return 0;
1229 } // anon namespace
1231 gimple_opt_pass *
1232 make_pass_lower_vaarg (gcc::context *ctxt)
1234 return new pass_lower_vaarg (ctxt);