* config/i386/i386.h (ACCUMULATE_OUTGOING_ARGS): Disable accumulation
[official-gcc.git] / gcc / tree-tailcall.c
blob415d19acbbbb549f0d79c42f1a44d07f0ef00e25
1 /* Tail call optimization on trees.
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "tm_p.h"
26 #include "basic-block.h"
27 #include "function.h"
28 #include "tree-ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "except.h"
31 #include "tree-pass.h"
32 #include "flags.h"
33 #include "langhooks.h"
34 #include "dbgcnt.h"
35 #include "target.h"
36 #include "cfgloop.h"
37 #include "common/common-target.h"
38 #include "ipa-utils.h"
40 /* The file implements the tail recursion elimination. It is also used to
41 analyze the tail calls in general, passing the results to the rtl level
42 where they are used for sibcall optimization.
44 In addition to the standard tail recursion elimination, we handle the most
45 trivial cases of making the call tail recursive by creating accumulators.
46 For example the following function
48 int sum (int n)
50 if (n > 0)
51 return n + sum (n - 1);
52 else
53 return 0;
56 is transformed into
58 int sum (int n)
60 int acc = 0;
62 while (n > 0)
63 acc += n--;
65 return acc;
68 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
69 when we reach the return x statement, we should return a_acc + x * m_acc
70 instead. They are initially initialized to 0 and 1, respectively,
71 so the semantics of the function is obviously preserved. If we are
72 guaranteed that the value of the accumulator never change, we
73 omit the accumulator.
75 There are three cases how the function may exit. The first one is
76 handled in adjust_return_value, the other two in adjust_accumulator_values
77 (the second case is actually a special case of the third one and we
78 present it separately just for clarity):
80 1) Just return x, where x is not in any of the remaining special shapes.
81 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
83 2) return f (...), where f is the current function, is rewritten in a
84 classical tail-recursion elimination way, into assignment of arguments
85 and jump to the start of the function. Values of the accumulators
86 are unchanged.
88 3) return a + m * f(...), where a and m do not depend on call to f.
89 To preserve the semantics described before we want this to be rewritten
90 in such a way that we finally return
92 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
94 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
95 eliminate the tail call to f. Special cases when the value is just
96 added or just multiplied are obtained by setting a = 0 or m = 1.
98 TODO -- it is possible to do similar tricks for other operations. */
100 /* A structure that describes the tailcall. */
102 struct tailcall
104 /* The iterator pointing to the call statement. */
105 gimple_stmt_iterator call_gsi;
107 /* True if it is a call to the current function. */
108 bool tail_recursion;
110 /* The return value of the caller is mult * f + add, where f is the return
111 value of the call. */
112 tree mult, add;
114 /* Next tailcall in the chain. */
115 struct tailcall *next;
118 /* The variables holding the value of multiplicative and additive
119 accumulator. */
120 static tree m_acc, a_acc;
122 static bool suitable_for_tail_opt_p (void);
123 static bool optimize_tail_call (struct tailcall *, bool);
124 static void eliminate_tail_call (struct tailcall *);
125 static void find_tail_calls (basic_block, struct tailcall **);
127 /* Returns false when the function is not suitable for tail call optimization
128 from some reason (e.g. if it takes variable number of arguments). */
130 static bool
131 suitable_for_tail_opt_p (void)
133 if (cfun->stdarg)
134 return false;
136 return true;
138 /* Returns false when the function is not suitable for tail call optimization
139 from some reason (e.g. if it takes variable number of arguments).
140 This test must pass in addition to suitable_for_tail_opt_p in order to make
141 tail call discovery happen. */
143 static bool
144 suitable_for_tail_call_opt_p (void)
146 tree param;
148 /* alloca (until we have stack slot life analysis) inhibits
149 sibling call optimizations, but not tail recursion. */
150 if (cfun->calls_alloca)
151 return false;
153 /* If we are using sjlj exceptions, we may need to add a call to
154 _Unwind_SjLj_Unregister at exit of the function. Which means
155 that we cannot do any sibcall transformations. */
156 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
157 && current_function_has_exception_handlers ())
158 return false;
160 /* Any function that calls setjmp might have longjmp called from
161 any called function. ??? We really should represent this
162 properly in the CFG so that this needn't be special cased. */
163 if (cfun->calls_setjmp)
164 return false;
166 /* ??? It is OK if the argument of a function is taken in some cases,
167 but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
168 for (param = DECL_ARGUMENTS (current_function_decl);
169 param;
170 param = DECL_CHAIN (param))
171 if (TREE_ADDRESSABLE (param))
172 return false;
174 return true;
177 /* Checks whether the expression EXPR in stmt AT is independent of the
178 statement pointed to by GSI (in a sense that we already know EXPR's value
179 at GSI). We use the fact that we are only called from the chain of
180 basic blocks that have only single successor. Returns the expression
181 containing the value of EXPR at GSI. */
183 static tree
184 independent_of_stmt_p (tree expr, gimple at, gimple_stmt_iterator gsi)
186 basic_block bb, call_bb, at_bb;
187 edge e;
188 edge_iterator ei;
190 if (is_gimple_min_invariant (expr))
191 return expr;
193 if (TREE_CODE (expr) != SSA_NAME)
194 return NULL_TREE;
196 /* Mark the blocks in the chain leading to the end. */
197 at_bb = gimple_bb (at);
198 call_bb = gimple_bb (gsi_stmt (gsi));
199 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
200 bb->aux = &bb->aux;
201 bb->aux = &bb->aux;
203 while (1)
205 at = SSA_NAME_DEF_STMT (expr);
206 bb = gimple_bb (at);
208 /* The default definition or defined before the chain. */
209 if (!bb || !bb->aux)
210 break;
212 if (bb == call_bb)
214 for (; !gsi_end_p (gsi); gsi_next (&gsi))
215 if (gsi_stmt (gsi) == at)
216 break;
218 if (!gsi_end_p (gsi))
219 expr = NULL_TREE;
220 break;
223 if (gimple_code (at) != GIMPLE_PHI)
225 expr = NULL_TREE;
226 break;
229 FOR_EACH_EDGE (e, ei, bb->preds)
230 if (e->src->aux)
231 break;
232 gcc_assert (e);
234 expr = PHI_ARG_DEF_FROM_EDGE (at, e);
235 if (TREE_CODE (expr) != SSA_NAME)
237 /* The value is a constant. */
238 break;
242 /* Unmark the blocks. */
243 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
244 bb->aux = NULL;
245 bb->aux = NULL;
247 return expr;
250 /* Simulates the effect of an assignment STMT on the return value of the tail
251 recursive CALL passed in ASS_VAR. M and A are the multiplicative and the
252 additive factor for the real return value. */
254 static bool
255 process_assignment (gimple stmt, gimple_stmt_iterator call, tree *m,
256 tree *a, tree *ass_var)
258 tree op0, op1 = NULL_TREE, non_ass_var = NULL_TREE;
259 tree dest = gimple_assign_lhs (stmt);
260 enum tree_code code = gimple_assign_rhs_code (stmt);
261 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
262 tree src_var = gimple_assign_rhs1 (stmt);
264 /* See if this is a simple copy operation of an SSA name to the function
265 result. In that case we may have a simple tail call. Ignore type
266 conversions that can never produce extra code between the function
267 call and the function return. */
268 if ((rhs_class == GIMPLE_SINGLE_RHS || gimple_assign_cast_p (stmt))
269 && (TREE_CODE (src_var) == SSA_NAME))
271 /* Reject a tailcall if the type conversion might need
272 additional code. */
273 if (gimple_assign_cast_p (stmt)
274 && TYPE_MODE (TREE_TYPE (dest)) != TYPE_MODE (TREE_TYPE (src_var)))
275 return false;
277 if (src_var != *ass_var)
278 return false;
280 *ass_var = dest;
281 return true;
284 switch (rhs_class)
286 case GIMPLE_BINARY_RHS:
287 op1 = gimple_assign_rhs2 (stmt);
289 /* Fall through. */
291 case GIMPLE_UNARY_RHS:
292 op0 = gimple_assign_rhs1 (stmt);
293 break;
295 default:
296 return false;
299 /* Accumulator optimizations will reverse the order of operations.
300 We can only do that for floating-point types if we're assuming
301 that addition and multiplication are associative. */
302 if (!flag_associative_math)
303 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
304 return false;
306 if (rhs_class == GIMPLE_UNARY_RHS)
308 else if (op0 == *ass_var
309 && (non_ass_var = independent_of_stmt_p (op1, stmt, call)))
311 else if (op1 == *ass_var
312 && (non_ass_var = independent_of_stmt_p (op0, stmt, call)))
314 else
315 return false;
317 switch (code)
319 case PLUS_EXPR:
320 *a = non_ass_var;
321 *ass_var = dest;
322 return true;
324 case POINTER_PLUS_EXPR:
325 if (op0 != *ass_var)
326 return false;
327 *a = non_ass_var;
328 *ass_var = dest;
329 return true;
331 case MULT_EXPR:
332 *m = non_ass_var;
333 *ass_var = dest;
334 return true;
336 case NEGATE_EXPR:
337 *m = build_minus_one_cst (TREE_TYPE (op0));
338 *ass_var = dest;
339 return true;
341 case MINUS_EXPR:
342 if (*ass_var == op0)
343 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
344 else
346 *m = build_minus_one_cst (TREE_TYPE (non_ass_var));
347 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
350 *ass_var = dest;
351 return true;
353 /* TODO -- Handle POINTER_PLUS_EXPR. */
355 default:
356 return false;
360 /* Propagate VAR through phis on edge E. */
362 static tree
363 propagate_through_phis (tree var, edge e)
365 basic_block dest = e->dest;
366 gimple_stmt_iterator gsi;
368 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
370 gimple phi = gsi_stmt (gsi);
371 if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
372 return PHI_RESULT (phi);
374 return var;
377 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
378 added to the start of RET. */
380 static void
381 find_tail_calls (basic_block bb, struct tailcall **ret)
383 tree ass_var = NULL_TREE, ret_var, func, param;
384 gimple stmt, call = NULL;
385 gimple_stmt_iterator gsi, agsi;
386 bool tail_recursion;
387 struct tailcall *nw;
388 edge e;
389 tree m, a;
390 basic_block abb;
391 size_t idx;
392 tree var;
394 if (!single_succ_p (bb))
395 return;
397 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
399 stmt = gsi_stmt (gsi);
401 /* Ignore labels, returns, clobbers and debug stmts. */
402 if (gimple_code (stmt) == GIMPLE_LABEL
403 || gimple_code (stmt) == GIMPLE_RETURN
404 || gimple_clobber_p (stmt)
405 || is_gimple_debug (stmt))
406 continue;
408 /* Check for a call. */
409 if (is_gimple_call (stmt))
411 call = stmt;
412 ass_var = gimple_call_lhs (stmt);
413 break;
416 /* If the statement references memory or volatile operands, fail. */
417 if (gimple_references_memory_p (stmt)
418 || gimple_has_volatile_ops (stmt))
419 return;
422 if (gsi_end_p (gsi))
424 edge_iterator ei;
425 /* Recurse to the predecessors. */
426 FOR_EACH_EDGE (e, ei, bb->preds)
427 find_tail_calls (e->src, ret);
429 return;
432 /* If the LHS of our call is not just a simple register, we can't
433 transform this into a tail or sibling call. This situation happens,
434 in (e.g.) "*p = foo()" where foo returns a struct. In this case
435 we won't have a temporary here, but we need to carry out the side
436 effect anyway, so tailcall is impossible.
438 ??? In some situations (when the struct is returned in memory via
439 invisible argument) we could deal with this, e.g. by passing 'p'
440 itself as that argument to foo, but it's too early to do this here,
441 and expand_call() will not handle it anyway. If it ever can, then
442 we need to revisit this here, to allow that situation. */
443 if (ass_var && !is_gimple_reg (ass_var))
444 return;
446 /* We found the call, check whether it is suitable. */
447 tail_recursion = false;
448 func = gimple_call_fndecl (call);
449 if (func
450 && !DECL_BUILT_IN (func)
451 && recursive_call_p (current_function_decl, func))
453 tree arg;
455 for (param = DECL_ARGUMENTS (func), idx = 0;
456 param && idx < gimple_call_num_args (call);
457 param = DECL_CHAIN (param), idx ++)
459 arg = gimple_call_arg (call, idx);
460 if (param != arg)
462 /* Make sure there are no problems with copying. The parameter
463 have a copyable type and the two arguments must have reasonably
464 equivalent types. The latter requirement could be relaxed if
465 we emitted a suitable type conversion statement. */
466 if (!is_gimple_reg_type (TREE_TYPE (param))
467 || !useless_type_conversion_p (TREE_TYPE (param),
468 TREE_TYPE (arg)))
469 break;
471 /* The parameter should be a real operand, so that phi node
472 created for it at the start of the function has the meaning
473 of copying the value. This test implies is_gimple_reg_type
474 from the previous condition, however this one could be
475 relaxed by being more careful with copying the new value
476 of the parameter (emitting appropriate GIMPLE_ASSIGN and
477 updating the virtual operands). */
478 if (!is_gimple_reg (param))
479 break;
482 if (idx == gimple_call_num_args (call) && !param)
483 tail_recursion = true;
486 /* Make sure the tail invocation of this function does not refer
487 to local variables. */
488 FOR_EACH_LOCAL_DECL (cfun, idx, var)
490 if (TREE_CODE (var) != PARM_DECL
491 && auto_var_in_fn_p (var, cfun->decl)
492 && (ref_maybe_used_by_stmt_p (call, var)
493 || call_may_clobber_ref_p (call, var)))
494 return;
497 /* Now check the statements after the call. None of them has virtual
498 operands, so they may only depend on the call through its return
499 value. The return value should also be dependent on each of them,
500 since we are running after dce. */
501 m = NULL_TREE;
502 a = NULL_TREE;
504 abb = bb;
505 agsi = gsi;
506 while (1)
508 tree tmp_a = NULL_TREE;
509 tree tmp_m = NULL_TREE;
510 gsi_next (&agsi);
512 while (gsi_end_p (agsi))
514 ass_var = propagate_through_phis (ass_var, single_succ_edge (abb));
515 abb = single_succ (abb);
516 agsi = gsi_start_bb (abb);
519 stmt = gsi_stmt (agsi);
521 if (gimple_code (stmt) == GIMPLE_LABEL)
522 continue;
524 if (gimple_code (stmt) == GIMPLE_RETURN)
525 break;
527 if (gimple_clobber_p (stmt))
528 continue;
530 if (is_gimple_debug (stmt))
531 continue;
533 if (gimple_code (stmt) != GIMPLE_ASSIGN)
534 return;
536 /* This is a gimple assign. */
537 if (! process_assignment (stmt, gsi, &tmp_m, &tmp_a, &ass_var))
538 return;
540 if (tmp_a)
542 tree type = TREE_TYPE (tmp_a);
543 if (a)
544 a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a);
545 else
546 a = tmp_a;
548 if (tmp_m)
550 tree type = TREE_TYPE (tmp_m);
551 if (m)
552 m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m);
553 else
554 m = tmp_m;
556 if (a)
557 a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m);
561 /* See if this is a tail call we can handle. */
562 ret_var = gimple_return_retval (stmt);
564 /* We may proceed if there either is no return value, or the return value
565 is identical to the call's return. */
566 if (ret_var
567 && (ret_var != ass_var))
568 return;
570 /* If this is not a tail recursive call, we cannot handle addends or
571 multiplicands. */
572 if (!tail_recursion && (m || a))
573 return;
575 /* For pointers only allow additions. */
576 if (m && POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
577 return;
579 nw = XNEW (struct tailcall);
581 nw->call_gsi = gsi;
583 nw->tail_recursion = tail_recursion;
585 nw->mult = m;
586 nw->add = a;
588 nw->next = *ret;
589 *ret = nw;
592 /* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E. */
594 static void
595 add_successor_phi_arg (edge e, tree var, tree phi_arg)
597 gimple_stmt_iterator gsi;
599 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
600 if (PHI_RESULT (gsi_stmt (gsi)) == var)
601 break;
603 gcc_assert (!gsi_end_p (gsi));
604 add_phi_arg (gsi_stmt (gsi), phi_arg, e, UNKNOWN_LOCATION);
607 /* Creates a GIMPLE statement which computes the operation specified by
608 CODE, ACC and OP1 to a new variable with name LABEL and inserts the
609 statement in the position specified by GSI. Returns the
610 tree node of the statement's result. */
612 static tree
613 adjust_return_value_with_ops (enum tree_code code, const char *label,
614 tree acc, tree op1, gimple_stmt_iterator gsi)
617 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
618 tree result = make_temp_ssa_name (ret_type, NULL, label);
619 gimple stmt;
621 if (POINTER_TYPE_P (ret_type))
623 gcc_assert (code == PLUS_EXPR && TREE_TYPE (acc) == sizetype);
624 code = POINTER_PLUS_EXPR;
626 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1))
627 && code != POINTER_PLUS_EXPR)
628 stmt = gimple_build_assign_with_ops (code, result, acc, op1);
629 else
631 tree tem;
632 if (code == POINTER_PLUS_EXPR)
633 tem = fold_build2 (code, TREE_TYPE (op1), op1, acc);
634 else
635 tem = fold_build2 (code, TREE_TYPE (op1),
636 fold_convert (TREE_TYPE (op1), acc), op1);
637 tree rhs = fold_convert (ret_type, tem);
638 rhs = force_gimple_operand_gsi (&gsi, rhs,
639 false, NULL, true, GSI_SAME_STMT);
640 stmt = gimple_build_assign (result, rhs);
643 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
644 return result;
647 /* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
648 the computation specified by CODE and OP1 and insert the statement
649 at the position specified by GSI as a new statement. Returns new SSA name
650 of updated accumulator. */
652 static tree
653 update_accumulator_with_ops (enum tree_code code, tree acc, tree op1,
654 gimple_stmt_iterator gsi)
656 gimple stmt;
657 tree var = copy_ssa_name (acc, NULL);
658 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
659 stmt = gimple_build_assign_with_ops (code, var, acc, op1);
660 else
662 tree rhs = fold_convert (TREE_TYPE (acc),
663 fold_build2 (code,
664 TREE_TYPE (op1),
665 fold_convert (TREE_TYPE (op1), acc),
666 op1));
667 rhs = force_gimple_operand_gsi (&gsi, rhs,
668 false, NULL, false, GSI_CONTINUE_LINKING);
669 stmt = gimple_build_assign (var, rhs);
671 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
672 return var;
675 /* Adjust the accumulator values according to A and M after GSI, and update
676 the phi nodes on edge BACK. */
678 static void
679 adjust_accumulator_values (gimple_stmt_iterator gsi, tree m, tree a, edge back)
681 tree var, a_acc_arg, m_acc_arg;
683 if (m)
684 m = force_gimple_operand_gsi (&gsi, m, true, NULL, true, GSI_SAME_STMT);
685 if (a)
686 a = force_gimple_operand_gsi (&gsi, a, true, NULL, true, GSI_SAME_STMT);
688 a_acc_arg = a_acc;
689 m_acc_arg = m_acc;
690 if (a)
692 if (m_acc)
694 if (integer_onep (a))
695 var = m_acc;
696 else
697 var = adjust_return_value_with_ops (MULT_EXPR, "acc_tmp", m_acc,
698 a, gsi);
700 else
701 var = a;
703 a_acc_arg = update_accumulator_with_ops (PLUS_EXPR, a_acc, var, gsi);
706 if (m)
707 m_acc_arg = update_accumulator_with_ops (MULT_EXPR, m_acc, m, gsi);
709 if (a_acc)
710 add_successor_phi_arg (back, a_acc, a_acc_arg);
712 if (m_acc)
713 add_successor_phi_arg (back, m_acc, m_acc_arg);
716 /* Adjust value of the return at the end of BB according to M and A
717 accumulators. */
719 static void
720 adjust_return_value (basic_block bb, tree m, tree a)
722 tree retval;
723 gimple ret_stmt = gimple_seq_last_stmt (bb_seq (bb));
724 gimple_stmt_iterator gsi = gsi_last_bb (bb);
726 gcc_assert (gimple_code (ret_stmt) == GIMPLE_RETURN);
728 retval = gimple_return_retval (ret_stmt);
729 if (!retval || retval == error_mark_node)
730 return;
732 if (m)
733 retval = adjust_return_value_with_ops (MULT_EXPR, "mul_tmp", m_acc, retval,
734 gsi);
735 if (a)
736 retval = adjust_return_value_with_ops (PLUS_EXPR, "acc_tmp", a_acc, retval,
737 gsi);
738 gimple_return_set_retval (ret_stmt, retval);
739 update_stmt (ret_stmt);
742 /* Subtract COUNT and FREQUENCY from the basic block and it's
743 outgoing edge. */
744 static void
745 decrease_profile (basic_block bb, gcov_type count, int frequency)
747 edge e;
748 bb->count -= count;
749 if (bb->count < 0)
750 bb->count = 0;
751 bb->frequency -= frequency;
752 if (bb->frequency < 0)
753 bb->frequency = 0;
754 if (!single_succ_p (bb))
756 gcc_assert (!EDGE_COUNT (bb->succs));
757 return;
759 e = single_succ_edge (bb);
760 e->count -= count;
761 if (e->count < 0)
762 e->count = 0;
765 /* Returns true if argument PARAM of the tail recursive call needs to be copied
766 when the call is eliminated. */
768 static bool
769 arg_needs_copy_p (tree param)
771 tree def;
773 if (!is_gimple_reg (param))
774 return false;
776 /* Parameters that are only defined but never used need not be copied. */
777 def = ssa_default_def (cfun, param);
778 if (!def)
779 return false;
781 return true;
784 /* Eliminates tail call described by T. TMP_VARS is a list of
785 temporary variables used to copy the function arguments. */
787 static void
788 eliminate_tail_call (struct tailcall *t)
790 tree param, rslt;
791 gimple stmt, call;
792 tree arg;
793 size_t idx;
794 basic_block bb, first;
795 edge e;
796 gimple phi;
797 gimple_stmt_iterator gsi;
798 gimple orig_stmt;
800 stmt = orig_stmt = gsi_stmt (t->call_gsi);
801 bb = gsi_bb (t->call_gsi);
803 if (dump_file && (dump_flags & TDF_DETAILS))
805 fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
806 bb->index);
807 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
808 fprintf (dump_file, "\n");
811 gcc_assert (is_gimple_call (stmt));
813 first = single_succ (ENTRY_BLOCK_PTR);
815 /* Remove the code after call_gsi that will become unreachable. The
816 possibly unreachable code in other blocks is removed later in
817 cfg cleanup. */
818 gsi = t->call_gsi;
819 gsi_next (&gsi);
820 while (!gsi_end_p (gsi))
822 gimple t = gsi_stmt (gsi);
823 /* Do not remove the return statement, so that redirect_edge_and_branch
824 sees how the block ends. */
825 if (gimple_code (t) == GIMPLE_RETURN)
826 break;
828 gsi_remove (&gsi, true);
829 release_defs (t);
832 /* Number of executions of function has reduced by the tailcall. */
833 e = single_succ_edge (gsi_bb (t->call_gsi));
834 decrease_profile (EXIT_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
835 decrease_profile (ENTRY_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
836 if (e->dest != EXIT_BLOCK_PTR)
837 decrease_profile (e->dest, e->count, EDGE_FREQUENCY (e));
839 /* Replace the call by a jump to the start of function. */
840 e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)),
841 first);
842 gcc_assert (e);
843 PENDING_STMT (e) = NULL;
845 /* Add phi node entries for arguments. The ordering of the phi nodes should
846 be the same as the ordering of the arguments. */
847 for (param = DECL_ARGUMENTS (current_function_decl),
848 idx = 0, gsi = gsi_start_phis (first);
849 param;
850 param = DECL_CHAIN (param), idx++)
852 if (!arg_needs_copy_p (param))
853 continue;
855 arg = gimple_call_arg (stmt, idx);
856 phi = gsi_stmt (gsi);
857 gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));
859 add_phi_arg (phi, arg, e, gimple_location (stmt));
860 gsi_next (&gsi);
863 /* Update the values of accumulators. */
864 adjust_accumulator_values (t->call_gsi, t->mult, t->add, e);
866 call = gsi_stmt (t->call_gsi);
867 rslt = gimple_call_lhs (call);
868 if (rslt != NULL_TREE)
870 /* Result of the call will no longer be defined. So adjust the
871 SSA_NAME_DEF_STMT accordingly. */
872 SSA_NAME_DEF_STMT (rslt) = gimple_build_nop ();
875 gsi_remove (&t->call_gsi, true);
876 release_defs (call);
879 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
880 mark the tailcalls for the sibcall optimization. */
882 static bool
883 optimize_tail_call (struct tailcall *t, bool opt_tailcalls)
885 if (t->tail_recursion)
887 eliminate_tail_call (t);
888 return true;
891 if (opt_tailcalls)
893 gimple stmt = gsi_stmt (t->call_gsi);
895 gimple_call_set_tail (stmt, true);
896 if (dump_file && (dump_flags & TDF_DETAILS))
898 fprintf (dump_file, "Found tail call ");
899 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
900 fprintf (dump_file, " in bb %i\n", (gsi_bb (t->call_gsi))->index);
904 return false;
907 /* Creates a tail-call accumulator of the same type as the return type of the
908 current function. LABEL is the name used to creating the temporary
909 variable for the accumulator. The accumulator will be inserted in the
910 phis of a basic block BB with single predecessor with an initial value
911 INIT converted to the current function return type. */
913 static tree
914 create_tailcall_accumulator (const char *label, basic_block bb, tree init)
916 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
917 if (POINTER_TYPE_P (ret_type))
918 ret_type = sizetype;
920 tree tmp = make_temp_ssa_name (ret_type, NULL, label);
921 gimple phi;
923 phi = create_phi_node (tmp, bb);
924 /* RET_TYPE can be a float when -ffast-maths is enabled. */
925 add_phi_arg (phi, fold_convert (ret_type, init), single_pred_edge (bb),
926 UNKNOWN_LOCATION);
927 return PHI_RESULT (phi);
930 /* Optimizes tail calls in the function, turning the tail recursion
931 into iteration. */
933 static unsigned int
934 tree_optimize_tail_calls_1 (bool opt_tailcalls)
936 edge e;
937 bool phis_constructed = false;
938 struct tailcall *tailcalls = NULL, *act, *next;
939 bool changed = false;
940 basic_block first = single_succ (ENTRY_BLOCK_PTR);
941 tree param;
942 gimple stmt;
943 edge_iterator ei;
945 if (!suitable_for_tail_opt_p ())
946 return 0;
947 if (opt_tailcalls)
948 opt_tailcalls = suitable_for_tail_call_opt_p ();
950 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
952 /* Only traverse the normal exits, i.e. those that end with return
953 statement. */
954 stmt = last_stmt (e->src);
956 if (stmt
957 && gimple_code (stmt) == GIMPLE_RETURN)
958 find_tail_calls (e->src, &tailcalls);
961 /* Construct the phi nodes and accumulators if necessary. */
962 a_acc = m_acc = NULL_TREE;
963 for (act = tailcalls; act; act = act->next)
965 if (!act->tail_recursion)
966 continue;
968 if (!phis_constructed)
970 /* Ensure that there is only one predecessor of the block
971 or if there are existing degenerate PHI nodes. */
972 if (!single_pred_p (first)
973 || !gimple_seq_empty_p (phi_nodes (first)))
974 first = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
976 /* Copy the args if needed. */
977 for (param = DECL_ARGUMENTS (current_function_decl);
978 param;
979 param = DECL_CHAIN (param))
980 if (arg_needs_copy_p (param))
982 tree name = ssa_default_def (cfun, param);
983 tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
984 gimple phi;
986 set_ssa_default_def (cfun, param, new_name);
987 phi = create_phi_node (name, first);
988 add_phi_arg (phi, new_name, single_pred_edge (first),
989 EXPR_LOCATION (param));
991 phis_constructed = true;
994 if (act->add && !a_acc)
995 a_acc = create_tailcall_accumulator ("add_acc", first,
996 integer_zero_node);
998 if (act->mult && !m_acc)
999 m_acc = create_tailcall_accumulator ("mult_acc", first,
1000 integer_one_node);
1003 if (a_acc || m_acc)
1005 /* When the tail call elimination using accumulators is performed,
1006 statements adding the accumulated value are inserted at all exits.
1007 This turns all other tail calls to non-tail ones. */
1008 opt_tailcalls = false;
1011 for (; tailcalls; tailcalls = next)
1013 next = tailcalls->next;
1014 changed |= optimize_tail_call (tailcalls, opt_tailcalls);
1015 free (tailcalls);
1018 if (a_acc || m_acc)
1020 /* Modify the remaining return statements. */
1021 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1023 stmt = last_stmt (e->src);
1025 if (stmt
1026 && gimple_code (stmt) == GIMPLE_RETURN)
1027 adjust_return_value (e->src, m_acc, a_acc);
1031 if (changed)
1033 /* We may have created new loops. Make them magically appear. */
1034 if (current_loops)
1035 loops_state_set (LOOPS_NEED_FIXUP);
1036 free_dominance_info (CDI_DOMINATORS);
1039 /* Add phi nodes for the virtual operands defined in the function to the
1040 header of the loop created by tail recursion elimination. Do so
1041 by triggering the SSA renamer. */
1042 if (phis_constructed)
1043 mark_virtual_operands_for_renaming (cfun);
1045 if (changed)
1046 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
1047 return 0;
1050 static unsigned int
1051 execute_tail_recursion (void)
1053 return tree_optimize_tail_calls_1 (false);
1056 static bool
1057 gate_tail_calls (void)
1059 return flag_optimize_sibling_calls != 0 && dbg_cnt (tail_call);
1062 static unsigned int
1063 execute_tail_calls (void)
1065 return tree_optimize_tail_calls_1 (true);
1068 namespace {
1070 const pass_data pass_data_tail_recursion =
1072 GIMPLE_PASS, /* type */
1073 "tailr", /* name */
1074 OPTGROUP_NONE, /* optinfo_flags */
1075 true, /* has_gate */
1076 true, /* has_execute */
1077 TV_NONE, /* tv_id */
1078 ( PROP_cfg | PROP_ssa ), /* properties_required */
1079 0, /* properties_provided */
1080 0, /* properties_destroyed */
1081 0, /* todo_flags_start */
1082 TODO_verify_ssa, /* todo_flags_finish */
1085 class pass_tail_recursion : public gimple_opt_pass
1087 public:
1088 pass_tail_recursion (gcc::context *ctxt)
1089 : gimple_opt_pass (pass_data_tail_recursion, ctxt)
1092 /* opt_pass methods: */
1093 opt_pass * clone () { return new pass_tail_recursion (m_ctxt); }
1094 bool gate () { return gate_tail_calls (); }
1095 unsigned int execute () { return execute_tail_recursion (); }
1097 }; // class pass_tail_recursion
1099 } // anon namespace
1101 gimple_opt_pass *
1102 make_pass_tail_recursion (gcc::context *ctxt)
1104 return new pass_tail_recursion (ctxt);
1107 namespace {
1109 const pass_data pass_data_tail_calls =
1111 GIMPLE_PASS, /* type */
1112 "tailc", /* name */
1113 OPTGROUP_NONE, /* optinfo_flags */
1114 true, /* has_gate */
1115 true, /* has_execute */
1116 TV_NONE, /* tv_id */
1117 ( PROP_cfg | PROP_ssa ), /* properties_required */
1118 0, /* properties_provided */
1119 0, /* properties_destroyed */
1120 0, /* todo_flags_start */
1121 TODO_verify_ssa, /* todo_flags_finish */
1124 class pass_tail_calls : public gimple_opt_pass
1126 public:
1127 pass_tail_calls (gcc::context *ctxt)
1128 : gimple_opt_pass (pass_data_tail_calls, ctxt)
1131 /* opt_pass methods: */
1132 bool gate () { return gate_tail_calls (); }
1133 unsigned int execute () { return execute_tail_calls (); }
1135 }; // class pass_tail_calls
1137 } // anon namespace
1139 gimple_opt_pass *
1140 make_pass_tail_calls (gcc::context *ctxt)
1142 return new pass_tail_calls (ctxt);