PR rtl-optimization/82913
[official-gcc.git] / gcc / tree-stdarg.c
blob923b315e79e17d008ffff9370394a68efad0f0e1
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2017 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "gimple-pretty-print.h"
31 #include "fold-const.h"
32 #include "langhooks.h"
33 #include "gimple-iterator.h"
34 #include "gimple-walk.h"
35 #include "gimplify.h"
36 #include "tree-into-ssa.h"
37 #include "tree-cfg.h"
38 #include "tree-stdarg.h"
39 #include "tree-chkp.h"
41 /* A simple pass that attempts to optimize stdarg functions on architectures
42 that need to save register arguments to stack on entry to stdarg functions.
43 If the function doesn't use any va_start macros, no registers need to
44 be saved. If va_start macros are used, the va_list variables don't escape
45 the function, it is only necessary to save registers that will be used
46 in va_arg macros. E.g. if va_arg is only used with integral types
47 in the function, floating point registers don't need to be saved, etc. */
50 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
51 is executed at most as many times as VA_START_BB. */
53 static bool
54 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
56 auto_vec<edge, 10> stack;
57 edge e;
58 edge_iterator ei;
59 bool ret;
61 if (va_arg_bb == va_start_bb)
62 return true;
64 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
65 return false;
67 auto_sbitmap visited (last_basic_block_for_fn (cfun));
68 bitmap_clear (visited);
69 ret = true;
71 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
72 stack.safe_push (e);
74 while (! stack.is_empty ())
76 basic_block src;
78 e = stack.pop ();
79 src = e->src;
81 if (e->flags & EDGE_COMPLEX)
83 ret = false;
84 break;
87 if (src == va_start_bb)
88 continue;
90 /* va_arg_bb can be executed more times than va_start_bb. */
91 if (src == va_arg_bb)
93 ret = false;
94 break;
97 gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
99 if (! bitmap_bit_p (visited, src->index))
101 bitmap_set_bit (visited, src->index);
102 FOR_EACH_EDGE (e, ei, src->preds)
103 stack.safe_push (e);
107 return ret;
111 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
112 return constant, otherwise return HOST_WIDE_INT_M1U.
113 GPR_P is true if this is GPR counter. */
115 static unsigned HOST_WIDE_INT
116 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
117 bool gpr_p)
119 tree lhs, orig_lhs;
120 gimple *stmt;
121 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
122 unsigned int max_size;
124 if (si->offsets == NULL)
126 unsigned int i;
128 si->offsets = XNEWVEC (int, num_ssa_names);
129 for (i = 0; i < num_ssa_names; ++i)
130 si->offsets[i] = -1;
133 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
134 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
135 orig_lhs = lhs = rhs;
136 while (lhs)
138 enum tree_code rhs_code;
139 tree rhs1;
141 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
143 if (counter_val >= max_size)
145 ret = max_size;
146 break;
149 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
150 break;
153 stmt = SSA_NAME_DEF_STMT (lhs);
155 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
156 return HOST_WIDE_INT_M1U;
158 rhs_code = gimple_assign_rhs_code (stmt);
159 rhs1 = gimple_assign_rhs1 (stmt);
160 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
161 || gimple_assign_cast_p (stmt))
162 && TREE_CODE (rhs1) == SSA_NAME)
164 lhs = rhs1;
165 continue;
168 if ((rhs_code == POINTER_PLUS_EXPR
169 || rhs_code == PLUS_EXPR)
170 && TREE_CODE (rhs1) == SSA_NAME
171 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
173 ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
174 lhs = rhs1;
175 continue;
178 if (rhs_code == ADDR_EXPR
179 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
180 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
181 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
183 ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
184 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
185 continue;
188 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
189 return HOST_WIDE_INT_M1U;
191 rhs = gimple_assign_rhs1 (stmt);
192 if (TREE_CODE (counter) != TREE_CODE (rhs))
193 return HOST_WIDE_INT_M1U;
195 if (TREE_CODE (counter) == COMPONENT_REF)
197 if (get_base_address (counter) != get_base_address (rhs)
198 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
199 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
200 return HOST_WIDE_INT_M1U;
202 else if (counter != rhs)
203 return HOST_WIDE_INT_M1U;
205 lhs = NULL;
208 lhs = orig_lhs;
209 val = ret + counter_val;
210 while (lhs)
212 enum tree_code rhs_code;
213 tree rhs1;
215 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
216 break;
218 if (val >= max_size)
219 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
220 else
221 si->offsets[SSA_NAME_VERSION (lhs)] = val;
223 stmt = SSA_NAME_DEF_STMT (lhs);
225 rhs_code = gimple_assign_rhs_code (stmt);
226 rhs1 = gimple_assign_rhs1 (stmt);
227 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
228 || gimple_assign_cast_p (stmt))
229 && TREE_CODE (rhs1) == SSA_NAME)
231 lhs = rhs1;
232 continue;
235 if ((rhs_code == POINTER_PLUS_EXPR
236 || rhs_code == PLUS_EXPR)
237 && TREE_CODE (rhs1) == SSA_NAME
238 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
240 val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
241 lhs = rhs1;
242 continue;
245 if (rhs_code == ADDR_EXPR
246 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
247 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
248 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
250 val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
251 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
252 continue;
255 lhs = NULL;
258 return ret;
262 /* Called by walk_tree to look for references to va_list variables. */
264 static tree
265 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
266 void *data)
268 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
269 tree var = *tp;
271 if (TREE_CODE (var) == SSA_NAME)
273 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
274 return var;
276 else if (VAR_P (var))
278 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
279 return var;
282 return NULL_TREE;
286 /* Helper function of va_list_counter_struct_op. Compute
287 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
288 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
289 statement. GPR_P is true if AP is a GPR counter, false if it is
290 a FPR counter. */
292 static void
293 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
294 bool write_p)
296 unsigned HOST_WIDE_INT increment;
298 if (si->compute_sizes < 0)
300 si->compute_sizes = 0;
301 if (si->va_start_count == 1
302 && reachable_at_most_once (si->bb, si->va_start_bb))
303 si->compute_sizes = 1;
305 if (dump_file && (dump_flags & TDF_DETAILS))
306 fprintf (dump_file,
307 "bb%d will %sbe executed at most once for each va_start "
308 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
309 si->va_start_bb->index);
312 if (write_p
313 && si->compute_sizes
314 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
316 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
318 cfun->va_list_gpr_size += increment;
319 return;
322 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
324 cfun->va_list_fpr_size += increment;
325 return;
329 if (write_p || !si->compute_sizes)
331 if (gpr_p)
332 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
333 else
334 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
339 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
340 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
341 is false, AP has been seen in VAR = AP assignment.
342 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
343 va_arg operation that doesn't cause the va_list variable to escape
344 current function. */
346 static bool
347 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
348 bool write_p)
350 tree base;
352 if (TREE_CODE (ap) != COMPONENT_REF
353 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
354 return false;
356 if (TREE_CODE (var) != SSA_NAME
357 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
358 return false;
360 base = get_base_address (ap);
361 if (!VAR_P (base)
362 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
363 return false;
365 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
366 va_list_counter_op (si, ap, var, true, write_p);
367 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
368 va_list_counter_op (si, ap, var, false, write_p);
370 return true;
374 /* Check for TEM = AP. Return true if found and the caller shouldn't
375 search for va_list references in the statement. */
377 static bool
378 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
380 if (!VAR_P (ap)
381 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
382 return false;
384 if (TREE_CODE (tem) != SSA_NAME
385 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
386 return false;
388 if (si->compute_sizes < 0)
390 si->compute_sizes = 0;
391 if (si->va_start_count == 1
392 && reachable_at_most_once (si->bb, si->va_start_bb))
393 si->compute_sizes = 1;
395 if (dump_file && (dump_flags & TDF_DETAILS))
396 fprintf (dump_file,
397 "bb%d will %sbe executed at most once for each va_start "
398 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
399 si->va_start_bb->index);
402 /* For void * or char * va_list types, there is just one counter.
403 If va_arg is used in a loop, we don't know how many registers need
404 saving. */
405 if (! si->compute_sizes)
406 return false;
408 if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
409 return false;
411 /* Note the temporary, as we need to track whether it doesn't escape
412 the current function. */
413 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
415 return true;
419 /* Check for:
420 tem1 = AP;
421 TEM2 = tem1 + CST;
422 AP = TEM2;
423 sequence and update cfun->va_list_gpr_size. Return true if found. */
425 static bool
426 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
428 unsigned HOST_WIDE_INT increment;
430 if (!VAR_P (ap)
431 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
432 return false;
434 if (TREE_CODE (tem2) != SSA_NAME
435 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
436 return false;
438 if (si->compute_sizes <= 0)
439 return false;
441 increment = va_list_counter_bump (si, ap, tem2, true);
442 if (increment + 1 <= 1)
443 return false;
445 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
446 cfun->va_list_gpr_size += increment;
447 else
448 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
450 return true;
454 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
455 containing value of some va_list variable plus optionally some constant,
456 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
457 depending whether LHS is a function local temporary. */
459 static void
460 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
462 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
463 return;
465 if (TREE_CODE (rhs) == SSA_NAME)
467 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
468 return;
470 else if (TREE_CODE (rhs) == ADDR_EXPR
471 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
472 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
474 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
475 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
476 return;
478 else
479 return;
481 if (TREE_CODE (lhs) != SSA_NAME)
483 si->va_list_escapes = true;
484 return;
487 if (si->compute_sizes < 0)
489 si->compute_sizes = 0;
490 if (si->va_start_count == 1
491 && reachable_at_most_once (si->bb, si->va_start_bb))
492 si->compute_sizes = 1;
494 if (dump_file && (dump_flags & TDF_DETAILS))
495 fprintf (dump_file,
496 "bb%d will %sbe executed at most once for each va_start "
497 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
498 si->va_start_bb->index);
501 /* For void * or char * va_list types, there is just one counter.
502 If va_arg is used in a loop, we don't know how many registers need
503 saving. */
504 if (! si->compute_sizes)
506 si->va_list_escapes = true;
507 return;
510 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
511 == HOST_WIDE_INT_M1U)
513 si->va_list_escapes = true;
514 return;
517 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
521 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
522 Return true if va_list might be escaping. */
524 static bool
525 check_all_va_list_escapes (struct stdarg_info *si)
527 basic_block bb;
529 FOR_EACH_BB_FN (bb, cfun)
531 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
532 gsi_next (&i))
534 tree lhs;
535 use_operand_p uop;
536 ssa_op_iter soi;
537 gphi *phi = i.phi ();
539 lhs = PHI_RESULT (phi);
540 if (virtual_operand_p (lhs)
541 || bitmap_bit_p (si->va_list_escape_vars,
542 SSA_NAME_VERSION (lhs)))
543 continue;
545 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
547 tree rhs = USE_FROM_PTR (uop);
548 if (TREE_CODE (rhs) == SSA_NAME
549 && bitmap_bit_p (si->va_list_escape_vars,
550 SSA_NAME_VERSION (rhs)))
552 if (dump_file && (dump_flags & TDF_DETAILS))
554 fputs ("va_list escapes in ", dump_file);
555 print_gimple_stmt (dump_file, phi, 0, dump_flags);
556 fputc ('\n', dump_file);
558 return true;
563 for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i);
564 gsi_next (&i))
566 gimple *stmt = gsi_stmt (i);
567 tree use;
568 ssa_op_iter iter;
570 if (is_gimple_debug (stmt))
571 continue;
573 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
575 if (! bitmap_bit_p (si->va_list_escape_vars,
576 SSA_NAME_VERSION (use)))
577 continue;
579 if (is_gimple_assign (stmt))
581 tree rhs = gimple_assign_rhs1 (stmt);
582 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
584 /* x = *ap_temp; */
585 if (rhs_code == MEM_REF
586 && TREE_OPERAND (rhs, 0) == use
587 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
588 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
589 && si->offsets[SSA_NAME_VERSION (use)] != -1)
591 unsigned HOST_WIDE_INT gpr_size;
592 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
594 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
595 + tree_to_shwi (TREE_OPERAND (rhs, 1))
596 + tree_to_uhwi (access_size);
597 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
598 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
599 else if (gpr_size > cfun->va_list_gpr_size)
600 cfun->va_list_gpr_size = gpr_size;
601 continue;
604 /* va_arg sequences may contain
605 other_ap_temp = ap_temp;
606 other_ap_temp = ap_temp + constant;
607 other_ap_temp = (some_type *) ap_temp;
608 ap = ap_temp;
609 statements. */
610 if (rhs == use
611 && ((rhs_code == POINTER_PLUS_EXPR
612 && (TREE_CODE (gimple_assign_rhs2 (stmt))
613 == INTEGER_CST))
614 || gimple_assign_cast_p (stmt)
615 || (get_gimple_rhs_class (rhs_code)
616 == GIMPLE_SINGLE_RHS)))
618 tree lhs = gimple_assign_lhs (stmt);
620 if (TREE_CODE (lhs) == SSA_NAME
621 && bitmap_bit_p (si->va_list_escape_vars,
622 SSA_NAME_VERSION (lhs)))
623 continue;
625 if (VAR_P (lhs)
626 && bitmap_bit_p (si->va_list_vars,
627 DECL_UID (lhs) + num_ssa_names))
628 continue;
630 else if (rhs_code == ADDR_EXPR
631 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
632 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
634 tree lhs = gimple_assign_lhs (stmt);
636 if (bitmap_bit_p (si->va_list_escape_vars,
637 SSA_NAME_VERSION (lhs)))
638 continue;
642 if (dump_file && (dump_flags & TDF_DETAILS))
644 fputs ("va_list escapes in ", dump_file);
645 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
646 fputc ('\n', dump_file);
648 return true;
653 return false;
656 /* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size. */
658 static void
659 optimize_va_list_gpr_fpr_size (function *fun)
661 basic_block bb;
662 bool va_list_escapes = false;
663 bool va_list_simple_ptr;
664 struct stdarg_info si;
665 struct walk_stmt_info wi;
666 const char *funcname = NULL;
667 tree cfun_va_list;
669 fun->va_list_gpr_size = 0;
670 fun->va_list_fpr_size = 0;
671 memset (&si, 0, sizeof (si));
672 si.va_list_vars = BITMAP_ALLOC (NULL);
673 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
675 if (dump_file)
676 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
678 cfun_va_list = targetm.fn_abi_va_list (fun->decl);
679 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
680 && (TREE_TYPE (cfun_va_list) == void_type_node
681 || TREE_TYPE (cfun_va_list) == char_type_node);
682 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
684 FOR_EACH_BB_FN (bb, fun)
686 gimple_stmt_iterator i;
688 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
690 gimple *stmt = gsi_stmt (i);
691 tree callee, ap;
693 if (!is_gimple_call (stmt))
694 continue;
696 callee = gimple_call_fndecl (stmt);
697 if (!callee
698 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
699 continue;
701 switch (DECL_FUNCTION_CODE (callee))
703 case BUILT_IN_VA_START:
704 break;
705 /* If old style builtins are used, don't optimize anything. */
706 case BUILT_IN_SAVEREGS:
707 case BUILT_IN_NEXT_ARG:
708 va_list_escapes = true;
709 continue;
710 default:
711 continue;
714 si.va_start_count++;
715 ap = gimple_call_arg (stmt, 0);
717 if (TREE_CODE (ap) != ADDR_EXPR)
719 va_list_escapes = true;
720 break;
722 ap = TREE_OPERAND (ap, 0);
723 if (TREE_CODE (ap) == ARRAY_REF)
725 if (! integer_zerop (TREE_OPERAND (ap, 1)))
727 va_list_escapes = true;
728 break;
730 ap = TREE_OPERAND (ap, 0);
732 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
733 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl))
734 || !VAR_P (ap))
736 va_list_escapes = true;
737 break;
740 if (is_global_var (ap))
742 va_list_escapes = true;
743 break;
746 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
748 /* VA_START_BB and VA_START_AP will be only used if there is just
749 one va_start in the function. */
750 si.va_start_bb = bb;
751 si.va_start_ap = ap;
754 if (va_list_escapes)
755 break;
758 /* If there were no va_start uses in the function, there is no need to
759 save anything. */
760 if (si.va_start_count == 0)
761 goto finish;
763 /* If some va_list arguments weren't local, we can't optimize. */
764 if (va_list_escapes)
765 goto finish;
767 /* For void * or char * va_list, something useful can be done only
768 if there is just one va_start. */
769 if (va_list_simple_ptr && si.va_start_count > 1)
771 va_list_escapes = true;
772 goto finish;
775 /* For struct * va_list, if the backend didn't tell us what the counter fields
776 are, there is nothing more we can do. */
777 if (!va_list_simple_ptr
778 && va_list_gpr_counter_field == NULL_TREE
779 && va_list_fpr_counter_field == NULL_TREE)
781 va_list_escapes = true;
782 goto finish;
785 /* For void * or char * va_list there is just one counter
786 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
787 if (va_list_simple_ptr)
788 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
790 calculate_dominance_info (CDI_DOMINATORS);
791 memset (&wi, 0, sizeof (wi));
792 wi.info = si.va_list_vars;
794 FOR_EACH_BB_FN (bb, fun)
796 si.compute_sizes = -1;
797 si.bb = bb;
799 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
800 them as assignments for the purpose of escape analysis. This is
801 not needed for non-simple va_list because virtual phis don't perform
802 any real data movement. Also, check PHI nodes for taking address of
803 the va_list vars. */
804 tree lhs, rhs;
805 use_operand_p uop;
806 ssa_op_iter soi;
808 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
809 gsi_next (&i))
811 gphi *phi = i.phi ();
812 lhs = PHI_RESULT (phi);
814 if (virtual_operand_p (lhs))
815 continue;
817 if (va_list_simple_ptr)
819 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
821 rhs = USE_FROM_PTR (uop);
822 if (va_list_ptr_read (&si, rhs, lhs))
823 continue;
824 else if (va_list_ptr_write (&si, lhs, rhs))
825 continue;
826 else
827 check_va_list_escapes (&si, lhs, rhs);
829 if (si.va_list_escapes)
831 if (dump_file && (dump_flags & TDF_DETAILS))
833 fputs ("va_list escapes in ", dump_file);
834 print_gimple_stmt (dump_file, phi, 0, dump_flags);
835 fputc ('\n', dump_file);
837 va_list_escapes = true;
842 for (unsigned j = 0; !va_list_escapes
843 && j < gimple_phi_num_args (phi); ++j)
844 if ((!va_list_simple_ptr
845 || TREE_CODE (gimple_phi_arg_def (phi, j)) != SSA_NAME)
846 && walk_tree (gimple_phi_arg_def_ptr (phi, j),
847 find_va_list_reference, &wi, NULL))
849 if (dump_file && (dump_flags & TDF_DETAILS))
851 fputs ("va_list escapes in ", dump_file);
852 print_gimple_stmt (dump_file, phi, 0, dump_flags);
853 fputc ('\n', dump_file);
855 va_list_escapes = true;
859 for (gimple_stmt_iterator i = gsi_start_bb (bb);
860 !gsi_end_p (i) && !va_list_escapes;
861 gsi_next (&i))
863 gimple *stmt = gsi_stmt (i);
865 /* Don't look at __builtin_va_{start,end}, they are ok. */
866 if (is_gimple_call (stmt))
868 tree callee = gimple_call_fndecl (stmt);
870 if (callee
871 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
872 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
873 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
874 continue;
877 if (is_gimple_assign (stmt))
879 lhs = gimple_assign_lhs (stmt);
880 rhs = gimple_assign_rhs1 (stmt);
882 if (va_list_simple_ptr)
884 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
885 == GIMPLE_SINGLE_RHS)
887 /* Check for ap ={v} {}. */
888 if (TREE_CLOBBER_P (rhs))
889 continue;
891 /* Check for tem = ap. */
892 else if (va_list_ptr_read (&si, rhs, lhs))
893 continue;
895 /* Check for the last insn in:
896 tem1 = ap;
897 tem2 = tem1 + CST;
898 ap = tem2;
899 sequence. */
900 else if (va_list_ptr_write (&si, lhs, rhs))
901 continue;
904 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
905 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
906 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
907 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
908 == GIMPLE_SINGLE_RHS))
909 check_va_list_escapes (&si, lhs, rhs);
911 else
913 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
914 == GIMPLE_SINGLE_RHS)
916 /* Check for ap ={v} {}. */
917 if (TREE_CLOBBER_P (rhs))
918 continue;
920 /* Check for ap[0].field = temp. */
921 else if (va_list_counter_struct_op (&si, lhs, rhs, true))
922 continue;
924 /* Check for temp = ap[0].field. */
925 else if (va_list_counter_struct_op (&si, rhs, lhs,
926 false))
927 continue;
930 /* Do any architecture specific checking. */
931 if (targetm.stdarg_optimize_hook
932 && targetm.stdarg_optimize_hook (&si, stmt))
933 continue;
936 else if (is_gimple_debug (stmt))
937 continue;
939 /* All other uses of va_list are either va_copy (that is not handled
940 in this optimization), taking address of va_list variable or
941 passing va_list to other functions (in that case va_list might
942 escape the function and therefore va_start needs to set it up
943 fully), or some unexpected use of va_list. None of these should
944 happen in a gimplified VA_ARG_EXPR. */
945 if (si.va_list_escapes
946 || walk_gimple_op (stmt, find_va_list_reference, &wi))
948 if (dump_file && (dump_flags & TDF_DETAILS))
950 fputs ("va_list escapes in ", dump_file);
951 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
952 fputc ('\n', dump_file);
954 va_list_escapes = true;
958 if (va_list_escapes)
959 break;
962 if (! va_list_escapes
963 && va_list_simple_ptr
964 && ! bitmap_empty_p (si.va_list_escape_vars)
965 && check_all_va_list_escapes (&si))
966 va_list_escapes = true;
968 finish:
969 if (va_list_escapes)
971 fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
972 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
974 BITMAP_FREE (si.va_list_vars);
975 BITMAP_FREE (si.va_list_escape_vars);
976 free (si.offsets);
977 if (dump_file)
979 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
980 funcname, (int) va_list_escapes);
981 if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
982 fputs ("all", dump_file);
983 else
984 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
985 fputs (" GPR units and ", dump_file);
986 if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
987 fputs ("all", dump_file);
988 else
989 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
990 fputs (" FPR units.\n", dump_file);
994 /* Expand IFN_VA_ARGs in FUN. */
996 static void
997 expand_ifn_va_arg_1 (function *fun)
999 bool modified = false;
1000 basic_block bb;
1001 gimple_stmt_iterator i;
1002 location_t saved_location;
1004 FOR_EACH_BB_FN (bb, fun)
1005 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1007 gimple *stmt = gsi_stmt (i);
1008 tree ap, aptype, expr, lhs, type;
1009 gimple_seq pre = NULL, post = NULL;
1011 if (!gimple_call_internal_p (stmt, IFN_VA_ARG))
1012 continue;
1014 modified = true;
1016 type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 1)));
1017 ap = gimple_call_arg (stmt, 0);
1018 aptype = TREE_TYPE (gimple_call_arg (stmt, 2));
1019 gcc_assert (POINTER_TYPE_P (aptype));
1021 /* Balanced out the &ap, usually added by build_va_arg. */
1022 ap = build2 (MEM_REF, TREE_TYPE (aptype), ap,
1023 build_int_cst (aptype, 0));
1025 push_gimplify_context (false);
1026 saved_location = input_location;
1027 input_location = gimple_location (stmt);
1029 /* Make it easier for the backends by protecting the valist argument
1030 from multiple evaluations. */
1031 gimplify_expr (&ap, &pre, &post, is_gimple_min_lval, fb_lvalue);
1033 expr = targetm.gimplify_va_arg_expr (ap, type, &pre, &post);
1035 lhs = gimple_call_lhs (stmt);
1036 if (lhs != NULL_TREE)
1038 unsigned int nargs = gimple_call_num_args (stmt);
1039 gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs), type));
1041 /* We replace call with a new expr. This may require
1042 corresponding bndret call fixup. */
1043 if (chkp_function_instrumented_p (fun->decl))
1044 chkp_fixup_inlined_call (lhs, expr);
1046 if (nargs == 4)
1048 /* We've transported the size of with WITH_SIZE_EXPR here as
1049 the last argument of the internal fn call. Now reinstate
1050 it. */
1051 tree size = gimple_call_arg (stmt, nargs - 1);
1052 expr = build2 (WITH_SIZE_EXPR, TREE_TYPE (expr), expr, size);
1055 /* We use gimplify_assign here, rather than gimple_build_assign,
1056 because gimple_assign knows how to deal with variable-sized
1057 types. */
1058 gimplify_assign (lhs, expr, &pre);
1060 else
1061 gimplify_and_add (expr, &pre);
1063 input_location = saved_location;
1064 pop_gimplify_context (NULL);
1066 gimple_seq_add_seq (&pre, post);
1067 update_modified_stmts (pre);
1069 /* Add the sequence after IFN_VA_ARG. This splits the bb right
1070 after IFN_VA_ARG, and adds the sequence in one or more new bbs
1071 inbetween. */
1072 gimple_find_sub_bbs (pre, &i);
1074 /* Remove the IFN_VA_ARG gimple_call. It's the last stmt in the
1075 bb. */
1076 unlink_stmt_vdef (stmt);
1077 release_ssa_name_fn (fun, gimple_vdef (stmt));
1078 gsi_remove (&i, true);
1079 gcc_assert (gsi_end_p (i));
1081 /* We're walking here into the bbs which contain the expansion of
1082 IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs
1083 expanding. We could try to skip walking these bbs, perhaps by
1084 walking backwards over gimples and bbs. */
1085 break;
1088 if (!modified)
1089 return;
1091 free_dominance_info (CDI_DOMINATORS);
1092 update_ssa (TODO_update_ssa);
1095 /* Expand IFN_VA_ARGs in FUN, if necessary. */
1097 static void
1098 expand_ifn_va_arg (function *fun)
1100 if ((fun->curr_properties & PROP_gimple_lva) == 0)
1101 expand_ifn_va_arg_1 (fun);
1103 if (flag_checking)
1105 basic_block bb;
1106 gimple_stmt_iterator i;
1107 FOR_EACH_BB_FN (bb, fun)
1108 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1109 gcc_assert (!gimple_call_internal_p (gsi_stmt (i), IFN_VA_ARG));
1113 namespace {
1115 const pass_data pass_data_stdarg =
1117 GIMPLE_PASS, /* type */
1118 "stdarg", /* name */
1119 OPTGROUP_NONE, /* optinfo_flags */
1120 TV_NONE, /* tv_id */
1121 ( PROP_cfg | PROP_ssa ), /* properties_required */
1122 PROP_gimple_lva, /* properties_provided */
1123 0, /* properties_destroyed */
1124 0, /* todo_flags_start */
1125 0, /* todo_flags_finish */
1128 class pass_stdarg : public gimple_opt_pass
1130 public:
1131 pass_stdarg (gcc::context *ctxt)
1132 : gimple_opt_pass (pass_data_stdarg, ctxt)
1135 /* opt_pass methods: */
1136 virtual bool gate (function *)
1138 /* Always run this pass, in order to expand va_arg internal_fns. We
1139 also need to do that if fun->stdarg == 0, because a va_arg may also
1140 occur in a function without varargs, f.i. if when passing a va_list to
1141 another function. */
1142 return true;
1145 virtual unsigned int execute (function *);
1147 }; // class pass_stdarg
1149 unsigned int
1150 pass_stdarg::execute (function *fun)
1152 /* TODO: Postpone expand_ifn_va_arg till after
1153 optimize_va_list_gpr_fpr_size. */
1154 expand_ifn_va_arg (fun);
1156 if (flag_stdarg_opt
1157 /* This optimization is only for stdarg functions. */
1158 && fun->stdarg != 0)
1159 optimize_va_list_gpr_fpr_size (fun);
1161 return 0;
1164 } // anon namespace
1166 gimple_opt_pass *
1167 make_pass_stdarg (gcc::context *ctxt)
1169 return new pass_stdarg (ctxt);
1172 namespace {
1174 const pass_data pass_data_lower_vaarg =
1176 GIMPLE_PASS, /* type */
1177 "lower_vaarg", /* name */
1178 OPTGROUP_NONE, /* optinfo_flags */
1179 TV_NONE, /* tv_id */
1180 ( PROP_cfg | PROP_ssa ), /* properties_required */
1181 PROP_gimple_lva, /* properties_provided */
1182 0, /* properties_destroyed */
1183 0, /* todo_flags_start */
1184 0, /* todo_flags_finish */
1187 class pass_lower_vaarg : public gimple_opt_pass
1189 public:
1190 pass_lower_vaarg (gcc::context *ctxt)
1191 : gimple_opt_pass (pass_data_lower_vaarg, ctxt)
1194 /* opt_pass methods: */
1195 virtual bool gate (function *)
1197 return (cfun->curr_properties & PROP_gimple_lva) == 0;
1200 virtual unsigned int execute (function *);
1202 }; // class pass_lower_vaarg
1204 unsigned int
1205 pass_lower_vaarg::execute (function *fun)
1207 expand_ifn_va_arg (fun);
1208 return 0;
1211 } // anon namespace
1213 gimple_opt_pass *
1214 make_pass_lower_vaarg (gcc::context *ctxt)
1216 return new pass_lower_vaarg (ctxt);