PR target/49868
[official-gcc.git] / gcc / tree-tailcall.c
blob02e1113f4832a0bd4991e81698be6755047c59e4
1 /* Tail call optimization on trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "function.h"
29 #include "tree-flow.h"
30 #include "tree-dump.h"
31 #include "gimple-pretty-print.h"
32 #include "except.h"
33 #include "tree-pass.h"
34 #include "flags.h"
35 #include "langhooks.h"
36 #include "dbgcnt.h"
37 #include "target.h"
38 #include "common/common-target.h"
40 /* The file implements the tail recursion elimination. It is also used to
41 analyze the tail calls in general, passing the results to the rtl level
42 where they are used for sibcall optimization.
44 In addition to the standard tail recursion elimination, we handle the most
45 trivial cases of making the call tail recursive by creating accumulators.
46 For example the following function
48 int sum (int n)
50 if (n > 0)
51 return n + sum (n - 1);
52 else
53 return 0;
56 is transformed into
58 int sum (int n)
60 int acc = 0;
62 while (n > 0)
63 acc += n--;
65 return acc;
68 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
69 when we reach the return x statement, we should return a_acc + x * m_acc
70 instead. They are initially initialized to 0 and 1, respectively,
71 so the semantics of the function is obviously preserved. If we are
72 guaranteed that the value of the accumulator never change, we
73 omit the accumulator.
75 There are three cases how the function may exit. The first one is
76 handled in adjust_return_value, the other two in adjust_accumulator_values
77 (the second case is actually a special case of the third one and we
78 present it separately just for clarity):
80 1) Just return x, where x is not in any of the remaining special shapes.
81 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
83 2) return f (...), where f is the current function, is rewritten in a
84 classical tail-recursion elimination way, into assignment of arguments
85 and jump to the start of the function. Values of the accumulators
86 are unchanged.
88 3) return a + m * f(...), where a and m do not depend on call to f.
89 To preserve the semantics described before we want this to be rewritten
90 in such a way that we finally return
92 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
94 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
95 eliminate the tail call to f. Special cases when the value is just
96 added or just multiplied are obtained by setting a = 0 or m = 1.
98 TODO -- it is possible to do similar tricks for other operations. */
100 /* A structure that describes the tailcall. */
102 struct tailcall
104 /* The iterator pointing to the call statement. */
105 gimple_stmt_iterator call_gsi;
107 /* True if it is a call to the current function. */
108 bool tail_recursion;
110 /* The return value of the caller is mult * f + add, where f is the return
111 value of the call. */
112 tree mult, add;
114 /* Next tailcall in the chain. */
115 struct tailcall *next;
118 /* The variables holding the value of multiplicative and additive
119 accumulator. */
120 static tree m_acc, a_acc;
122 static bool suitable_for_tail_opt_p (void);
123 static bool optimize_tail_call (struct tailcall *, bool);
124 static void eliminate_tail_call (struct tailcall *);
125 static void find_tail_calls (basic_block, struct tailcall **);
127 /* Returns false when the function is not suitable for tail call optimization
128 from some reason (e.g. if it takes variable number of arguments). */
130 static bool
131 suitable_for_tail_opt_p (void)
133 if (cfun->stdarg)
134 return false;
136 return true;
138 /* Returns false when the function is not suitable for tail call optimization
139 from some reason (e.g. if it takes variable number of arguments).
140 This test must pass in addition to suitable_for_tail_opt_p in order to make
141 tail call discovery happen. */
143 static bool
144 suitable_for_tail_call_opt_p (void)
146 tree param;
148 /* alloca (until we have stack slot life analysis) inhibits
149 sibling call optimizations, but not tail recursion. */
150 if (cfun->calls_alloca)
151 return false;
153 /* If we are using sjlj exceptions, we may need to add a call to
154 _Unwind_SjLj_Unregister at exit of the function. Which means
155 that we cannot do any sibcall transformations. */
156 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
157 && current_function_has_exception_handlers ())
158 return false;
160 /* Any function that calls setjmp might have longjmp called from
161 any called function. ??? We really should represent this
162 properly in the CFG so that this needn't be special cased. */
163 if (cfun->calls_setjmp)
164 return false;
166 /* ??? It is OK if the argument of a function is taken in some cases,
167 but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
168 for (param = DECL_ARGUMENTS (current_function_decl);
169 param;
170 param = DECL_CHAIN (param))
171 if (TREE_ADDRESSABLE (param))
172 return false;
174 return true;
177 /* Checks whether the expression EXPR in stmt AT is independent of the
178 statement pointed to by GSI (in a sense that we already know EXPR's value
179 at GSI). We use the fact that we are only called from the chain of
180 basic blocks that have only single successor. Returns the expression
181 containing the value of EXPR at GSI. */
183 static tree
184 independent_of_stmt_p (tree expr, gimple at, gimple_stmt_iterator gsi)
186 basic_block bb, call_bb, at_bb;
187 edge e;
188 edge_iterator ei;
190 if (is_gimple_min_invariant (expr))
191 return expr;
193 if (TREE_CODE (expr) != SSA_NAME)
194 return NULL_TREE;
196 /* Mark the blocks in the chain leading to the end. */
197 at_bb = gimple_bb (at);
198 call_bb = gimple_bb (gsi_stmt (gsi));
199 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
200 bb->aux = &bb->aux;
201 bb->aux = &bb->aux;
203 while (1)
205 at = SSA_NAME_DEF_STMT (expr);
206 bb = gimple_bb (at);
208 /* The default definition or defined before the chain. */
209 if (!bb || !bb->aux)
210 break;
212 if (bb == call_bb)
214 for (; !gsi_end_p (gsi); gsi_next (&gsi))
215 if (gsi_stmt (gsi) == at)
216 break;
218 if (!gsi_end_p (gsi))
219 expr = NULL_TREE;
220 break;
223 if (gimple_code (at) != GIMPLE_PHI)
225 expr = NULL_TREE;
226 break;
229 FOR_EACH_EDGE (e, ei, bb->preds)
230 if (e->src->aux)
231 break;
232 gcc_assert (e);
234 expr = PHI_ARG_DEF_FROM_EDGE (at, e);
235 if (TREE_CODE (expr) != SSA_NAME)
237 /* The value is a constant. */
238 break;
242 /* Unmark the blocks. */
243 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
244 bb->aux = NULL;
245 bb->aux = NULL;
247 return expr;
250 /* Simulates the effect of an assignment STMT on the return value of the tail
251 recursive CALL passed in ASS_VAR. M and A are the multiplicative and the
252 additive factor for the real return value. */
254 static bool
255 process_assignment (gimple stmt, gimple_stmt_iterator call, tree *m,
256 tree *a, tree *ass_var)
258 tree op0, op1 = NULL_TREE, non_ass_var = NULL_TREE;
259 tree dest = gimple_assign_lhs (stmt);
260 enum tree_code code = gimple_assign_rhs_code (stmt);
261 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
262 tree src_var = gimple_assign_rhs1 (stmt);
264 /* See if this is a simple copy operation of an SSA name to the function
265 result. In that case we may have a simple tail call. Ignore type
266 conversions that can never produce extra code between the function
267 call and the function return. */
268 if ((rhs_class == GIMPLE_SINGLE_RHS || gimple_assign_cast_p (stmt))
269 && (TREE_CODE (src_var) == SSA_NAME))
271 /* Reject a tailcall if the type conversion might need
272 additional code. */
273 if (gimple_assign_cast_p (stmt)
274 && TYPE_MODE (TREE_TYPE (dest)) != TYPE_MODE (TREE_TYPE (src_var)))
275 return false;
277 if (src_var != *ass_var)
278 return false;
280 *ass_var = dest;
281 return true;
284 switch (rhs_class)
286 case GIMPLE_BINARY_RHS:
287 op1 = gimple_assign_rhs2 (stmt);
289 /* Fall through. */
291 case GIMPLE_UNARY_RHS:
292 op0 = gimple_assign_rhs1 (stmt);
293 break;
295 default:
296 return false;
299 /* Accumulator optimizations will reverse the order of operations.
300 We can only do that for floating-point types if we're assuming
301 that addition and multiplication are associative. */
302 if (!flag_associative_math)
303 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
304 return false;
306 if (rhs_class == GIMPLE_UNARY_RHS)
308 else if (op0 == *ass_var
309 && (non_ass_var = independent_of_stmt_p (op1, stmt, call)))
311 else if (op1 == *ass_var
312 && (non_ass_var = independent_of_stmt_p (op0, stmt, call)))
314 else
315 return false;
317 switch (code)
319 case PLUS_EXPR:
320 *a = non_ass_var;
321 *ass_var = dest;
322 return true;
324 case MULT_EXPR:
325 *m = non_ass_var;
326 *ass_var = dest;
327 return true;
329 case NEGATE_EXPR:
330 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
331 *m = build_real (TREE_TYPE (op0), dconstm1);
332 else
333 *m = build_int_cst (TREE_TYPE (op0), -1);
335 *ass_var = dest;
336 return true;
338 case MINUS_EXPR:
339 if (*ass_var == op0)
340 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
341 else
343 if (FLOAT_TYPE_P (TREE_TYPE (non_ass_var)))
344 *m = build_real (TREE_TYPE (non_ass_var), dconstm1);
345 else
346 *m = build_int_cst (TREE_TYPE (non_ass_var), -1);
348 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
351 *ass_var = dest;
352 return true;
354 /* TODO -- Handle POINTER_PLUS_EXPR. */
356 default:
357 return false;
361 /* Propagate VAR through phis on edge E. */
363 static tree
364 propagate_through_phis (tree var, edge e)
366 basic_block dest = e->dest;
367 gimple_stmt_iterator gsi;
369 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
371 gimple phi = gsi_stmt (gsi);
372 if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
373 return PHI_RESULT (phi);
375 return var;
378 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
379 added to the start of RET. */
381 static void
382 find_tail_calls (basic_block bb, struct tailcall **ret)
384 tree ass_var = NULL_TREE, ret_var, func, param;
385 gimple stmt, call = NULL;
386 gimple_stmt_iterator gsi, agsi;
387 bool tail_recursion;
388 struct tailcall *nw;
389 edge e;
390 tree m, a;
391 basic_block abb;
392 size_t idx;
393 tree var;
394 referenced_var_iterator rvi;
396 if (!single_succ_p (bb))
397 return;
399 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
401 stmt = gsi_stmt (gsi);
403 /* Ignore labels, returns, clobbers and debug stmts. */
404 if (gimple_code (stmt) == GIMPLE_LABEL
405 || gimple_code (stmt) == GIMPLE_RETURN
406 || gimple_clobber_p (stmt)
407 || is_gimple_debug (stmt))
408 continue;
410 /* Check for a call. */
411 if (is_gimple_call (stmt))
413 call = stmt;
414 ass_var = gimple_call_lhs (stmt);
415 break;
418 /* If the statement references memory or volatile operands, fail. */
419 if (gimple_references_memory_p (stmt)
420 || gimple_has_volatile_ops (stmt))
421 return;
424 if (gsi_end_p (gsi))
426 edge_iterator ei;
427 /* Recurse to the predecessors. */
428 FOR_EACH_EDGE (e, ei, bb->preds)
429 find_tail_calls (e->src, ret);
431 return;
434 /* If the LHS of our call is not just a simple register, we can't
435 transform this into a tail or sibling call. This situation happens,
436 in (e.g.) "*p = foo()" where foo returns a struct. In this case
437 we won't have a temporary here, but we need to carry out the side
438 effect anyway, so tailcall is impossible.
440 ??? In some situations (when the struct is returned in memory via
441 invisible argument) we could deal with this, e.g. by passing 'p'
442 itself as that argument to foo, but it's too early to do this here,
443 and expand_call() will not handle it anyway. If it ever can, then
444 we need to revisit this here, to allow that situation. */
445 if (ass_var && !is_gimple_reg (ass_var))
446 return;
448 /* We found the call, check whether it is suitable. */
449 tail_recursion = false;
450 func = gimple_call_fndecl (call);
451 if (func == current_function_decl)
453 tree arg;
455 for (param = DECL_ARGUMENTS (func), idx = 0;
456 param && idx < gimple_call_num_args (call);
457 param = DECL_CHAIN (param), idx ++)
459 arg = gimple_call_arg (call, idx);
460 if (param != arg)
462 /* Make sure there are no problems with copying. The parameter
463 have a copyable type and the two arguments must have reasonably
464 equivalent types. The latter requirement could be relaxed if
465 we emitted a suitable type conversion statement. */
466 if (!is_gimple_reg_type (TREE_TYPE (param))
467 || !useless_type_conversion_p (TREE_TYPE (param),
468 TREE_TYPE (arg)))
469 break;
471 /* The parameter should be a real operand, so that phi node
472 created for it at the start of the function has the meaning
473 of copying the value. This test implies is_gimple_reg_type
474 from the previous condition, however this one could be
475 relaxed by being more careful with copying the new value
476 of the parameter (emitting appropriate GIMPLE_ASSIGN and
477 updating the virtual operands). */
478 if (!is_gimple_reg (param))
479 break;
482 if (idx == gimple_call_num_args (call) && !param)
483 tail_recursion = true;
486 /* Make sure the tail invocation of this function does not refer
487 to local variables. */
488 FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
490 if (TREE_CODE (var) != PARM_DECL
491 && auto_var_in_fn_p (var, cfun->decl)
492 && (ref_maybe_used_by_stmt_p (call, var)
493 || call_may_clobber_ref_p (call, var)))
494 return;
497 /* Now check the statements after the call. None of them has virtual
498 operands, so they may only depend on the call through its return
499 value. The return value should also be dependent on each of them,
500 since we are running after dce. */
501 m = NULL_TREE;
502 a = NULL_TREE;
504 abb = bb;
505 agsi = gsi;
506 while (1)
508 tree tmp_a = NULL_TREE;
509 tree tmp_m = NULL_TREE;
510 gsi_next (&agsi);
512 while (gsi_end_p (agsi))
514 ass_var = propagate_through_phis (ass_var, single_succ_edge (abb));
515 abb = single_succ (abb);
516 agsi = gsi_start_bb (abb);
519 stmt = gsi_stmt (agsi);
521 if (gimple_code (stmt) == GIMPLE_LABEL)
522 continue;
524 if (gimple_code (stmt) == GIMPLE_RETURN)
525 break;
527 if (gimple_clobber_p (stmt))
528 continue;
530 if (is_gimple_debug (stmt))
531 continue;
533 if (gimple_code (stmt) != GIMPLE_ASSIGN)
534 return;
536 /* This is a gimple assign. */
537 if (! process_assignment (stmt, gsi, &tmp_m, &tmp_a, &ass_var))
538 return;
540 if (tmp_a)
542 tree type = TREE_TYPE (tmp_a);
543 if (a)
544 a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a);
545 else
546 a = tmp_a;
548 if (tmp_m)
550 tree type = TREE_TYPE (tmp_m);
551 if (m)
552 m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m);
553 else
554 m = tmp_m;
556 if (a)
557 a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m);
561 /* See if this is a tail call we can handle. */
562 ret_var = gimple_return_retval (stmt);
564 /* We may proceed if there either is no return value, or the return value
565 is identical to the call's return. */
566 if (ret_var
567 && (ret_var != ass_var))
568 return;
570 /* If this is not a tail recursive call, we cannot handle addends or
571 multiplicands. */
572 if (!tail_recursion && (m || a))
573 return;
575 nw = XNEW (struct tailcall);
577 nw->call_gsi = gsi;
579 nw->tail_recursion = tail_recursion;
581 nw->mult = m;
582 nw->add = a;
584 nw->next = *ret;
585 *ret = nw;
588 /* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E. */
590 static void
591 add_successor_phi_arg (edge e, tree var, tree phi_arg)
593 gimple_stmt_iterator gsi;
595 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
596 if (PHI_RESULT (gsi_stmt (gsi)) == var)
597 break;
599 gcc_assert (!gsi_end_p (gsi));
600 add_phi_arg (gsi_stmt (gsi), phi_arg, e, UNKNOWN_LOCATION);
603 /* Creates a GIMPLE statement which computes the operation specified by
604 CODE, OP0 and OP1 to a new variable with name LABEL and inserts the
605 statement in the position specified by GSI and UPDATE. Returns the
606 tree node of the statement's result. */
608 static tree
609 adjust_return_value_with_ops (enum tree_code code, const char *label,
610 tree acc, tree op1, gimple_stmt_iterator gsi)
613 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
614 tree tmp = create_tmp_reg (ret_type, label);
615 gimple stmt;
616 tree result;
618 add_referenced_var (tmp);
620 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
621 stmt = gimple_build_assign_with_ops (code, tmp, acc, op1);
622 else
624 tree rhs = fold_convert (TREE_TYPE (acc),
625 fold_build2 (code,
626 TREE_TYPE (op1),
627 fold_convert (TREE_TYPE (op1), acc),
628 op1));
629 rhs = force_gimple_operand_gsi (&gsi, rhs,
630 false, NULL, true, GSI_CONTINUE_LINKING);
631 stmt = gimple_build_assign (NULL_TREE, rhs);
634 result = make_ssa_name (tmp, stmt);
635 gimple_assign_set_lhs (stmt, result);
636 update_stmt (stmt);
637 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
638 return result;
641 /* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
642 the computation specified by CODE and OP1 and insert the statement
643 at the position specified by GSI as a new statement. Returns new SSA name
644 of updated accumulator. */
646 static tree
647 update_accumulator_with_ops (enum tree_code code, tree acc, tree op1,
648 gimple_stmt_iterator gsi)
650 gimple stmt;
651 tree var;
652 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
653 stmt = gimple_build_assign_with_ops (code, SSA_NAME_VAR (acc), acc, op1);
654 else
656 tree rhs = fold_convert (TREE_TYPE (acc),
657 fold_build2 (code,
658 TREE_TYPE (op1),
659 fold_convert (TREE_TYPE (op1), acc),
660 op1));
661 rhs = force_gimple_operand_gsi (&gsi, rhs,
662 false, NULL, false, GSI_CONTINUE_LINKING);
663 stmt = gimple_build_assign (NULL_TREE, rhs);
665 var = make_ssa_name (SSA_NAME_VAR (acc), stmt);
666 gimple_assign_set_lhs (stmt, var);
667 update_stmt (stmt);
668 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
669 return var;
672 /* Adjust the accumulator values according to A and M after GSI, and update
673 the phi nodes on edge BACK. */
675 static void
676 adjust_accumulator_values (gimple_stmt_iterator gsi, tree m, tree a, edge back)
678 tree var, a_acc_arg, m_acc_arg;
680 if (m)
681 m = force_gimple_operand_gsi (&gsi, m, true, NULL, true, GSI_SAME_STMT);
682 if (a)
683 a = force_gimple_operand_gsi (&gsi, a, true, NULL, true, GSI_SAME_STMT);
685 a_acc_arg = a_acc;
686 m_acc_arg = m_acc;
687 if (a)
689 if (m_acc)
691 if (integer_onep (a))
692 var = m_acc;
693 else
694 var = adjust_return_value_with_ops (MULT_EXPR, "acc_tmp", m_acc,
695 a, gsi);
697 else
698 var = a;
700 a_acc_arg = update_accumulator_with_ops (PLUS_EXPR, a_acc, var, gsi);
703 if (m)
704 m_acc_arg = update_accumulator_with_ops (MULT_EXPR, m_acc, m, gsi);
706 if (a_acc)
707 add_successor_phi_arg (back, a_acc, a_acc_arg);
709 if (m_acc)
710 add_successor_phi_arg (back, m_acc, m_acc_arg);
713 /* Adjust value of the return at the end of BB according to M and A
714 accumulators. */
716 static void
717 adjust_return_value (basic_block bb, tree m, tree a)
719 tree retval;
720 gimple ret_stmt = gimple_seq_last_stmt (bb_seq (bb));
721 gimple_stmt_iterator gsi = gsi_last_bb (bb);
723 gcc_assert (gimple_code (ret_stmt) == GIMPLE_RETURN);
725 retval = gimple_return_retval (ret_stmt);
726 if (!retval || retval == error_mark_node)
727 return;
729 if (m)
730 retval = adjust_return_value_with_ops (MULT_EXPR, "mul_tmp", m_acc, retval,
731 gsi);
732 if (a)
733 retval = adjust_return_value_with_ops (PLUS_EXPR, "acc_tmp", a_acc, retval,
734 gsi);
735 gimple_return_set_retval (ret_stmt, retval);
736 update_stmt (ret_stmt);
739 /* Subtract COUNT and FREQUENCY from the basic block and it's
740 outgoing edge. */
741 static void
742 decrease_profile (basic_block bb, gcov_type count, int frequency)
744 edge e;
745 bb->count -= count;
746 if (bb->count < 0)
747 bb->count = 0;
748 bb->frequency -= frequency;
749 if (bb->frequency < 0)
750 bb->frequency = 0;
751 if (!single_succ_p (bb))
753 gcc_assert (!EDGE_COUNT (bb->succs));
754 return;
756 e = single_succ_edge (bb);
757 e->count -= count;
758 if (e->count < 0)
759 e->count = 0;
762 /* Returns true if argument PARAM of the tail recursive call needs to be copied
763 when the call is eliminated. */
765 static bool
766 arg_needs_copy_p (tree param)
768 tree def;
770 if (!is_gimple_reg (param) || !var_ann (param))
771 return false;
773 /* Parameters that are only defined but never used need not be copied. */
774 def = gimple_default_def (cfun, param);
775 if (!def)
776 return false;
778 return true;
781 /* Eliminates tail call described by T. TMP_VARS is a list of
782 temporary variables used to copy the function arguments. */
784 static void
785 eliminate_tail_call (struct tailcall *t)
787 tree param, rslt;
788 gimple stmt, call;
789 tree arg;
790 size_t idx;
791 basic_block bb, first;
792 edge e;
793 gimple phi;
794 gimple_stmt_iterator gsi;
795 gimple orig_stmt;
797 stmt = orig_stmt = gsi_stmt (t->call_gsi);
798 bb = gsi_bb (t->call_gsi);
800 if (dump_file && (dump_flags & TDF_DETAILS))
802 fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
803 bb->index);
804 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
805 fprintf (dump_file, "\n");
808 gcc_assert (is_gimple_call (stmt));
810 first = single_succ (ENTRY_BLOCK_PTR);
812 /* Remove the code after call_gsi that will become unreachable. The
813 possibly unreachable code in other blocks is removed later in
814 cfg cleanup. */
815 gsi = t->call_gsi;
816 gsi_next (&gsi);
817 while (!gsi_end_p (gsi))
819 gimple t = gsi_stmt (gsi);
820 /* Do not remove the return statement, so that redirect_edge_and_branch
821 sees how the block ends. */
822 if (gimple_code (t) == GIMPLE_RETURN)
823 break;
825 gsi_remove (&gsi, true);
826 release_defs (t);
829 /* Number of executions of function has reduced by the tailcall. */
830 e = single_succ_edge (gsi_bb (t->call_gsi));
831 decrease_profile (EXIT_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
832 decrease_profile (ENTRY_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
833 if (e->dest != EXIT_BLOCK_PTR)
834 decrease_profile (e->dest, e->count, EDGE_FREQUENCY (e));
836 /* Replace the call by a jump to the start of function. */
837 e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)),
838 first);
839 gcc_assert (e);
840 PENDING_STMT (e) = NULL;
842 /* Add phi node entries for arguments. The ordering of the phi nodes should
843 be the same as the ordering of the arguments. */
844 for (param = DECL_ARGUMENTS (current_function_decl),
845 idx = 0, gsi = gsi_start_phis (first);
846 param;
847 param = DECL_CHAIN (param), idx++)
849 if (!arg_needs_copy_p (param))
850 continue;
852 arg = gimple_call_arg (stmt, idx);
853 phi = gsi_stmt (gsi);
854 gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));
856 add_phi_arg (phi, arg, e, gimple_location (stmt));
857 gsi_next (&gsi);
860 /* Update the values of accumulators. */
861 adjust_accumulator_values (t->call_gsi, t->mult, t->add, e);
863 call = gsi_stmt (t->call_gsi);
864 rslt = gimple_call_lhs (call);
865 if (rslt != NULL_TREE)
867 /* Result of the call will no longer be defined. So adjust the
868 SSA_NAME_DEF_STMT accordingly. */
869 SSA_NAME_DEF_STMT (rslt) = gimple_build_nop ();
872 gsi_remove (&t->call_gsi, true);
873 release_defs (call);
876 /* Add phi nodes for the virtual operands defined in the function to the
877 header of the loop created by tail recursion elimination.
879 Originally, we used to add phi nodes only for call clobbered variables,
880 as the value of the non-call clobbered ones obviously cannot be used
881 or changed within the recursive call. However, the local variables
882 from multiple calls now share the same location, so the virtual ssa form
883 requires us to say that the location dies on further iterations of the loop,
884 which requires adding phi nodes.
886 static void
887 add_virtual_phis (void)
889 referenced_var_iterator rvi;
890 tree var;
892 /* The problematic part is that there is no way how to know what
893 to put into phi nodes (there in fact does not have to be such
894 ssa name available). A solution would be to have an artificial
895 use/kill for all virtual operands in EXIT node. Unless we have
896 this, we cannot do much better than to rebuild the ssa form for
897 possibly affected virtual ssa names from scratch. */
899 FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
901 if (!is_gimple_reg (var) && gimple_default_def (cfun, var) != NULL_TREE)
902 mark_sym_for_renaming (var);
906 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
907 mark the tailcalls for the sibcall optimization. */
909 static bool
910 optimize_tail_call (struct tailcall *t, bool opt_tailcalls)
912 if (t->tail_recursion)
914 eliminate_tail_call (t);
915 return true;
918 if (opt_tailcalls)
920 gimple stmt = gsi_stmt (t->call_gsi);
922 gimple_call_set_tail (stmt, true);
923 if (dump_file && (dump_flags & TDF_DETAILS))
925 fprintf (dump_file, "Found tail call ");
926 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
927 fprintf (dump_file, " in bb %i\n", (gsi_bb (t->call_gsi))->index);
931 return false;
934 /* Creates a tail-call accumulator of the same type as the return type of the
935 current function. LABEL is the name used to creating the temporary
936 variable for the accumulator. The accumulator will be inserted in the
937 phis of a basic block BB with single predecessor with an initial value
938 INIT converted to the current function return type. */
940 static tree
941 create_tailcall_accumulator (const char *label, basic_block bb, tree init)
943 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
944 tree tmp = create_tmp_reg (ret_type, label);
945 gimple phi;
947 add_referenced_var (tmp);
948 phi = create_phi_node (tmp, bb);
949 /* RET_TYPE can be a float when -ffast-maths is enabled. */
950 add_phi_arg (phi, fold_convert (ret_type, init), single_pred_edge (bb),
951 UNKNOWN_LOCATION);
952 return PHI_RESULT (phi);
955 /* Optimizes tail calls in the function, turning the tail recursion
956 into iteration. */
958 static unsigned int
959 tree_optimize_tail_calls_1 (bool opt_tailcalls)
961 edge e;
962 bool phis_constructed = false;
963 struct tailcall *tailcalls = NULL, *act, *next;
964 bool changed = false;
965 basic_block first = single_succ (ENTRY_BLOCK_PTR);
966 tree param;
967 gimple stmt;
968 edge_iterator ei;
970 if (!suitable_for_tail_opt_p ())
971 return 0;
972 if (opt_tailcalls)
973 opt_tailcalls = suitable_for_tail_call_opt_p ();
975 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
977 /* Only traverse the normal exits, i.e. those that end with return
978 statement. */
979 stmt = last_stmt (e->src);
981 if (stmt
982 && gimple_code (stmt) == GIMPLE_RETURN)
983 find_tail_calls (e->src, &tailcalls);
986 /* Construct the phi nodes and accumulators if necessary. */
987 a_acc = m_acc = NULL_TREE;
988 for (act = tailcalls; act; act = act->next)
990 if (!act->tail_recursion)
991 continue;
993 if (!phis_constructed)
995 /* Ensure that there is only one predecessor of the block
996 or if there are existing degenerate PHI nodes. */
997 if (!single_pred_p (first)
998 || !gimple_seq_empty_p (phi_nodes (first)))
999 first = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
1001 /* Copy the args if needed. */
1002 for (param = DECL_ARGUMENTS (current_function_decl);
1003 param;
1004 param = DECL_CHAIN (param))
1005 if (arg_needs_copy_p (param))
1007 tree name = gimple_default_def (cfun, param);
1008 tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
1009 gimple phi;
1011 set_default_def (param, new_name);
1012 phi = create_phi_node (name, first);
1013 SSA_NAME_DEF_STMT (name) = phi;
1014 add_phi_arg (phi, new_name, single_pred_edge (first),
1015 EXPR_LOCATION (param));
1017 phis_constructed = true;
1020 if (act->add && !a_acc)
1021 a_acc = create_tailcall_accumulator ("add_acc", first,
1022 integer_zero_node);
1024 if (act->mult && !m_acc)
1025 m_acc = create_tailcall_accumulator ("mult_acc", first,
1026 integer_one_node);
1029 if (a_acc || m_acc)
1031 /* When the tail call elimination using accumulators is performed,
1032 statements adding the accumulated value are inserted at all exits.
1033 This turns all other tail calls to non-tail ones. */
1034 opt_tailcalls = false;
1037 for (; tailcalls; tailcalls = next)
1039 next = tailcalls->next;
1040 changed |= optimize_tail_call (tailcalls, opt_tailcalls);
1041 free (tailcalls);
1044 if (a_acc || m_acc)
1046 /* Modify the remaining return statements. */
1047 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1049 stmt = last_stmt (e->src);
1051 if (stmt
1052 && gimple_code (stmt) == GIMPLE_RETURN)
1053 adjust_return_value (e->src, m_acc, a_acc);
1057 if (changed)
1058 free_dominance_info (CDI_DOMINATORS);
1060 if (phis_constructed)
1061 add_virtual_phis ();
1062 if (changed)
1063 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
1064 return 0;
1067 static unsigned int
1068 execute_tail_recursion (void)
1070 return tree_optimize_tail_calls_1 (false);
1073 static bool
1074 gate_tail_calls (void)
1076 return flag_optimize_sibling_calls != 0 && dbg_cnt (tail_call);
1079 static unsigned int
1080 execute_tail_calls (void)
1082 return tree_optimize_tail_calls_1 (true);
1085 struct gimple_opt_pass pass_tail_recursion =
1088 GIMPLE_PASS,
1089 "tailr", /* name */
1090 gate_tail_calls, /* gate */
1091 execute_tail_recursion, /* execute */
1092 NULL, /* sub */
1093 NULL, /* next */
1094 0, /* static_pass_number */
1095 TV_NONE, /* tv_id */
1096 PROP_cfg | PROP_ssa, /* properties_required */
1097 0, /* properties_provided */
1098 0, /* properties_destroyed */
1099 0, /* todo_flags_start */
1100 TODO_verify_ssa /* todo_flags_finish */
1104 struct gimple_opt_pass pass_tail_calls =
1107 GIMPLE_PASS,
1108 "tailc", /* name */
1109 gate_tail_calls, /* gate */
1110 execute_tail_calls, /* execute */
1111 NULL, /* sub */
1112 NULL, /* next */
1113 0, /* static_pass_number */
1114 TV_NONE, /* tv_id */
1115 PROP_cfg | PROP_ssa, /* properties_required */
1116 0, /* properties_provided */
1117 0, /* properties_destroyed */
1118 0, /* todo_flags_start */
1119 TODO_verify_ssa /* todo_flags_finish */