rephrase text
[official-gcc.git] / gcc / tree-tailcall.c
blobe0d3f4844e98f006d59b71726f319eb53f65e310
1 /* Tail call optimization on trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "function.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "diagnostic.h"
34 #include "except.h"
35 #include "tree-pass.h"
36 #include "flags.h"
37 #include "langhooks.h"
38 #include "dbgcnt.h"
40 /* The file implements the tail recursion elimination. It is also used to
41 analyze the tail calls in general, passing the results to the rtl level
42 where they are used for sibcall optimization.
44 In addition to the standard tail recursion elimination, we handle the most
45 trivial cases of making the call tail recursive by creating accumulators.
46 For example the following function
48 int sum (int n)
50 if (n > 0)
51 return n + sum (n - 1);
52 else
53 return 0;
56 is transformed into
58 int sum (int n)
60 int acc = 0;
62 while (n > 0)
63 acc += n--;
65 return acc;
68 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
69 when we reach the return x statement, we should return a_acc + x * m_acc
70 instead. They are initially initialized to 0 and 1, respectively,
71 so the semantics of the function is obviously preserved. If we are
72 guaranteed that the value of the accumulator never change, we
73 omit the accumulator.
75 There are three cases how the function may exit. The first one is
76 handled in adjust_return_value, the other two in adjust_accumulator_values
77 (the second case is actually a special case of the third one and we
78 present it separately just for clarity):
80 1) Just return x, where x is not in any of the remaining special shapes.
81 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
83 2) return f (...), where f is the current function, is rewritten in a
84 classical tail-recursion elimination way, into assignment of arguments
85 and jump to the start of the function. Values of the accumulators
86 are unchanged.
88 3) return a + m * f(...), where a and m do not depend on call to f.
89 To preserve the semantics described before we want this to be rewritten
90 in such a way that we finally return
92 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
94 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
95 eliminate the tail call to f. Special cases when the value is just
96 added or just multiplied are obtained by setting a = 0 or m = 1.
98 TODO -- it is possible to do similar tricks for other operations. */
100 /* A structure that describes the tailcall. */
102 struct tailcall
104 /* The iterator pointing to the call statement. */
105 gimple_stmt_iterator call_gsi;
107 /* True if it is a call to the current function. */
108 bool tail_recursion;
110 /* The return value of the caller is mult * f + add, where f is the return
111 value of the call. */
112 tree mult, add;
114 /* Next tailcall in the chain. */
115 struct tailcall *next;
118 /* The variables holding the value of multiplicative and additive
119 accumulator. */
120 static tree m_acc, a_acc;
122 static bool suitable_for_tail_opt_p (void);
123 static bool optimize_tail_call (struct tailcall *, bool);
124 static void eliminate_tail_call (struct tailcall *);
125 static void find_tail_calls (basic_block, struct tailcall **);
127 /* Returns false when the function is not suitable for tail call optimization
128 from some reason (e.g. if it takes variable number of arguments). */
130 static bool
131 suitable_for_tail_opt_p (void)
133 referenced_var_iterator rvi;
134 tree var;
136 if (cfun->stdarg)
137 return false;
139 /* No local variable nor structure field should escape to callees. */
140 FOR_EACH_REFERENCED_VAR (var, rvi)
142 if (!is_global_var (var)
143 /* ??? We do not have a suitable predicate for escaping to
144 callees. With IPA-PTA the following might be incorrect.
145 We want to catch
146 foo {
147 int i;
148 bar (&i);
149 foo ();
151 where bar might store &i somewhere and in the next
152 recursion should not be able to tell if it got the
153 same (with tail-recursion applied) or a different
154 address. */
155 && is_call_clobbered (var))
156 return false;
159 return true;
161 /* Returns false when the function is not suitable for tail call optimization
162 from some reason (e.g. if it takes variable number of arguments).
163 This test must pass in addition to suitable_for_tail_opt_p in order to make
164 tail call discovery happen. */
166 static bool
167 suitable_for_tail_call_opt_p (void)
169 tree param;
171 /* alloca (until we have stack slot life analysis) inhibits
172 sibling call optimizations, but not tail recursion. */
173 if (cfun->calls_alloca)
174 return false;
176 /* If we are using sjlj exceptions, we may need to add a call to
177 _Unwind_SjLj_Unregister at exit of the function. Which means
178 that we cannot do any sibcall transformations. */
179 if (USING_SJLJ_EXCEPTIONS && current_function_has_exception_handlers ())
180 return false;
182 /* Any function that calls setjmp might have longjmp called from
183 any called function. ??? We really should represent this
184 properly in the CFG so that this needn't be special cased. */
185 if (cfun->calls_setjmp)
186 return false;
188 /* ??? It is OK if the argument of a function is taken in some cases,
189 but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
190 for (param = DECL_ARGUMENTS (current_function_decl);
191 param;
192 param = TREE_CHAIN (param))
193 if (TREE_ADDRESSABLE (param))
194 return false;
196 return true;
199 /* Checks whether the expression EXPR in stmt AT is independent of the
200 statement pointed to by GSI (in a sense that we already know EXPR's value
201 at GSI). We use the fact that we are only called from the chain of
202 basic blocks that have only single successor. Returns the expression
203 containing the value of EXPR at GSI. */
205 static tree
206 independent_of_stmt_p (tree expr, gimple at, gimple_stmt_iterator gsi)
208 basic_block bb, call_bb, at_bb;
209 edge e;
210 edge_iterator ei;
212 if (is_gimple_min_invariant (expr))
213 return expr;
215 if (TREE_CODE (expr) != SSA_NAME)
216 return NULL_TREE;
218 /* Mark the blocks in the chain leading to the end. */
219 at_bb = gimple_bb (at);
220 call_bb = gimple_bb (gsi_stmt (gsi));
221 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
222 bb->aux = &bb->aux;
223 bb->aux = &bb->aux;
225 while (1)
227 at = SSA_NAME_DEF_STMT (expr);
228 bb = gimple_bb (at);
230 /* The default definition or defined before the chain. */
231 if (!bb || !bb->aux)
232 break;
234 if (bb == call_bb)
236 for (; !gsi_end_p (gsi); gsi_next (&gsi))
237 if (gsi_stmt (gsi) == at)
238 break;
240 if (!gsi_end_p (gsi))
241 expr = NULL_TREE;
242 break;
245 if (gimple_code (at) != GIMPLE_PHI)
247 expr = NULL_TREE;
248 break;
251 FOR_EACH_EDGE (e, ei, bb->preds)
252 if (e->src->aux)
253 break;
254 gcc_assert (e);
256 expr = PHI_ARG_DEF_FROM_EDGE (at, e);
257 if (TREE_CODE (expr) != SSA_NAME)
259 /* The value is a constant. */
260 break;
264 /* Unmark the blocks. */
265 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
266 bb->aux = NULL;
267 bb->aux = NULL;
269 return expr;
272 /* Simulates the effect of an assignment STMT on the return value of the tail
273 recursive CALL passed in ASS_VAR. M and A are the multiplicative and the
274 additive factor for the real return value. */
276 static bool
277 process_assignment (gimple stmt, gimple_stmt_iterator call, tree *m,
278 tree *a, tree *ass_var)
280 tree op0, op1, non_ass_var;
281 tree dest = gimple_assign_lhs (stmt);
282 enum tree_code code = gimple_assign_rhs_code (stmt);
283 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
284 tree src_var = gimple_assign_rhs1 (stmt);
286 /* See if this is a simple copy operation of an SSA name to the function
287 result. In that case we may have a simple tail call. Ignore type
288 conversions that can never produce extra code between the function
289 call and the function return. */
290 if ((rhs_class == GIMPLE_SINGLE_RHS || gimple_assign_cast_p (stmt))
291 && (TREE_CODE (src_var) == SSA_NAME))
293 /* Reject a tailcall if the type conversion might need
294 additional code. */
295 if (gimple_assign_cast_p (stmt)
296 && TYPE_MODE (TREE_TYPE (dest)) != TYPE_MODE (TREE_TYPE (src_var)))
297 return false;
299 if (src_var != *ass_var)
300 return false;
302 *ass_var = dest;
303 return true;
306 if (rhs_class != GIMPLE_BINARY_RHS)
307 return false;
309 /* Accumulator optimizations will reverse the order of operations.
310 We can only do that for floating-point types if we're assuming
311 that addition and multiplication are associative. */
312 if (!flag_associative_math)
313 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
314 return false;
316 /* We only handle the code like
318 x = call ();
319 y = m * x;
320 z = y + a;
321 return z;
323 TODO -- Extend it for cases where the linear transformation of the output
324 is expressed in a more complicated way. */
326 op0 = gimple_assign_rhs1 (stmt);
327 op1 = gimple_assign_rhs2 (stmt);
329 if (op0 == *ass_var
330 && (non_ass_var = independent_of_stmt_p (op1, stmt, call)))
332 else if (op1 == *ass_var
333 && (non_ass_var = independent_of_stmt_p (op0, stmt, call)))
335 else
336 return false;
338 switch (code)
340 case PLUS_EXPR:
341 *a = non_ass_var;
342 *ass_var = dest;
343 return true;
345 case MULT_EXPR:
346 *m = non_ass_var;
347 *ass_var = dest;
348 return true;
350 /* TODO -- Handle other codes (NEGATE_EXPR, MINUS_EXPR,
351 POINTER_PLUS_EXPR). */
353 default:
354 return false;
358 /* Propagate VAR through phis on edge E. */
360 static tree
361 propagate_through_phis (tree var, edge e)
363 basic_block dest = e->dest;
364 gimple_stmt_iterator gsi;
366 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
368 gimple phi = gsi_stmt (gsi);
369 if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
370 return PHI_RESULT (phi);
372 return var;
375 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
376 added to the start of RET. */
378 static void
379 find_tail_calls (basic_block bb, struct tailcall **ret)
381 tree ass_var = NULL_TREE, ret_var, func, param;
382 gimple stmt, call = NULL;
383 gimple_stmt_iterator gsi, agsi;
384 bool tail_recursion;
385 struct tailcall *nw;
386 edge e;
387 tree m, a;
388 basic_block abb;
389 size_t idx;
391 if (!single_succ_p (bb))
392 return;
394 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
396 stmt = gsi_stmt (gsi);
398 /* Ignore labels. */
399 if (gimple_code (stmt) == GIMPLE_LABEL || is_gimple_debug (stmt))
400 continue;
402 /* Check for a call. */
403 if (is_gimple_call (stmt))
405 call = stmt;
406 ass_var = gimple_call_lhs (stmt);
407 break;
410 /* If the statement references memory or volatile operands, fail. */
411 if (gimple_references_memory_p (stmt)
412 || gimple_has_volatile_ops (stmt))
413 return;
416 if (gsi_end_p (gsi))
418 edge_iterator ei;
419 /* Recurse to the predecessors. */
420 FOR_EACH_EDGE (e, ei, bb->preds)
421 find_tail_calls (e->src, ret);
423 return;
426 /* If the LHS of our call is not just a simple register, we can't
427 transform this into a tail or sibling call. This situation happens,
428 in (e.g.) "*p = foo()" where foo returns a struct. In this case
429 we won't have a temporary here, but we need to carry out the side
430 effect anyway, so tailcall is impossible.
432 ??? In some situations (when the struct is returned in memory via
433 invisible argument) we could deal with this, e.g. by passing 'p'
434 itself as that argument to foo, but it's too early to do this here,
435 and expand_call() will not handle it anyway. If it ever can, then
436 we need to revisit this here, to allow that situation. */
437 if (ass_var && !is_gimple_reg (ass_var))
438 return;
440 /* We found the call, check whether it is suitable. */
441 tail_recursion = false;
442 func = gimple_call_fndecl (call);
443 if (func == current_function_decl)
445 tree arg, var;
446 referenced_var_iterator rvi;
448 for (param = DECL_ARGUMENTS (func), idx = 0;
449 param && idx < gimple_call_num_args (call);
450 param = TREE_CHAIN (param), idx ++)
452 arg = gimple_call_arg (call, idx);
453 if (param != arg)
455 /* Make sure there are no problems with copying. The parameter
456 have a copyable type and the two arguments must have reasonably
457 equivalent types. The latter requirement could be relaxed if
458 we emitted a suitable type conversion statement. */
459 if (!is_gimple_reg_type (TREE_TYPE (param))
460 || !useless_type_conversion_p (TREE_TYPE (param),
461 TREE_TYPE (arg)))
462 break;
464 /* The parameter should be a real operand, so that phi node
465 created for it at the start of the function has the meaning
466 of copying the value. This test implies is_gimple_reg_type
467 from the previous condition, however this one could be
468 relaxed by being more careful with copying the new value
469 of the parameter (emitting appropriate GIMPLE_ASSIGN and
470 updating the virtual operands). */
471 if (!is_gimple_reg (param))
472 break;
475 if (idx == gimple_call_num_args (call) && !param)
476 tail_recursion = true;
478 /* Make sure the tail invocation of this function does not refer
479 to local variables. */
480 FOR_EACH_REFERENCED_VAR (var, rvi)
482 if (!is_global_var (var)
483 && ref_maybe_used_by_stmt_p (call, var))
484 return;
488 /* Now check the statements after the call. None of them has virtual
489 operands, so they may only depend on the call through its return
490 value. The return value should also be dependent on each of them,
491 since we are running after dce. */
492 m = NULL_TREE;
493 a = NULL_TREE;
495 abb = bb;
496 agsi = gsi;
497 while (1)
499 tree tmp_a = NULL_TREE;
500 tree tmp_m = NULL_TREE;
501 gsi_next (&agsi);
503 while (gsi_end_p (agsi))
505 ass_var = propagate_through_phis (ass_var, single_succ_edge (abb));
506 abb = single_succ (abb);
507 agsi = gsi_start_bb (abb);
510 stmt = gsi_stmt (agsi);
512 if (gimple_code (stmt) == GIMPLE_LABEL)
513 continue;
515 if (gimple_code (stmt) == GIMPLE_RETURN)
516 break;
518 if (is_gimple_debug (stmt))
519 continue;
521 if (gimple_code (stmt) != GIMPLE_ASSIGN)
522 return;
524 /* This is a gimple assign. */
525 if (! process_assignment (stmt, gsi, &tmp_m, &tmp_a, &ass_var))
526 return;
528 if (tmp_a)
530 if (a)
531 a = fold_build2 (PLUS_EXPR, TREE_TYPE (tmp_a), a, tmp_a);
532 else
533 a = tmp_a;
535 if (tmp_m)
537 if (m)
538 m = fold_build2 (MULT_EXPR, TREE_TYPE (tmp_m), m, tmp_m);
539 else
540 m = tmp_m;
542 if (a)
543 a = fold_build2 (MULT_EXPR, TREE_TYPE (tmp_m), a, tmp_m);
547 /* See if this is a tail call we can handle. */
548 ret_var = gimple_return_retval (stmt);
550 /* We may proceed if there either is no return value, or the return value
551 is identical to the call's return. */
552 if (ret_var
553 && (ret_var != ass_var))
554 return;
556 /* If this is not a tail recursive call, we cannot handle addends or
557 multiplicands. */
558 if (!tail_recursion && (m || a))
559 return;
561 nw = XNEW (struct tailcall);
563 nw->call_gsi = gsi;
565 nw->tail_recursion = tail_recursion;
567 nw->mult = m;
568 nw->add = a;
570 nw->next = *ret;
571 *ret = nw;
574 /* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E. */
576 static void
577 add_successor_phi_arg (edge e, tree var, tree phi_arg)
579 gimple_stmt_iterator gsi;
581 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
582 if (PHI_RESULT (gsi_stmt (gsi)) == var)
583 break;
585 gcc_assert (!gsi_end_p (gsi));
586 add_phi_arg (gsi_stmt (gsi), phi_arg, e, UNKNOWN_LOCATION);
589 /* Creates a GIMPLE statement which computes the operation specified by
590 CODE, OP0 and OP1 to a new variable with name LABEL and inserts the
591 statement in the position specified by GSI and UPDATE. Returns the
592 tree node of the statement's result. */
594 static tree
595 adjust_return_value_with_ops (enum tree_code code, const char *label,
596 tree acc, tree op1, gimple_stmt_iterator gsi)
599 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
600 tree tmp = create_tmp_var (ret_type, label);
601 gimple stmt;
602 tree result;
604 if (TREE_CODE (ret_type) == COMPLEX_TYPE
605 || TREE_CODE (ret_type) == VECTOR_TYPE)
606 DECL_GIMPLE_REG_P (tmp) = 1;
607 add_referenced_var (tmp);
609 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
610 stmt = gimple_build_assign_with_ops (code, tmp, acc, op1);
611 else
613 tree rhs = fold_convert (TREE_TYPE (acc),
614 fold_build2 (code,
615 TREE_TYPE (op1),
616 fold_convert (TREE_TYPE (op1), acc),
617 op1));
618 rhs = force_gimple_operand_gsi (&gsi, rhs,
619 false, NULL, true, GSI_CONTINUE_LINKING);
620 stmt = gimple_build_assign (NULL_TREE, rhs);
623 result = make_ssa_name (tmp, stmt);
624 gimple_assign_set_lhs (stmt, result);
625 update_stmt (stmt);
626 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
627 return result;
630 /* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
631 the computation specified by CODE and OP1 and insert the statement
632 at the position specified by GSI as a new statement. Returns new SSA name
633 of updated accumulator. */
635 static tree
636 update_accumulator_with_ops (enum tree_code code, tree acc, tree op1,
637 gimple_stmt_iterator gsi)
639 gimple stmt;
640 tree var;
641 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
642 stmt = gimple_build_assign_with_ops (code, SSA_NAME_VAR (acc), acc, op1);
643 else
645 tree rhs = fold_convert (TREE_TYPE (acc),
646 fold_build2 (code,
647 TREE_TYPE (op1),
648 fold_convert (TREE_TYPE (op1), acc),
649 op1));
650 rhs = force_gimple_operand_gsi (&gsi, rhs,
651 false, NULL, false, GSI_CONTINUE_LINKING);
652 stmt = gimple_build_assign (NULL_TREE, rhs);
654 var = make_ssa_name (SSA_NAME_VAR (acc), stmt);
655 gimple_assign_set_lhs (stmt, var);
656 update_stmt (stmt);
657 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
658 return var;
661 /* Adjust the accumulator values according to A and M after GSI, and update
662 the phi nodes on edge BACK. */
664 static void
665 adjust_accumulator_values (gimple_stmt_iterator gsi, tree m, tree a, edge back)
667 tree var, a_acc_arg, m_acc_arg;
669 if (m)
670 m = force_gimple_operand_gsi (&gsi, m, true, NULL, true, GSI_SAME_STMT);
671 if (a)
672 a = force_gimple_operand_gsi (&gsi, a, true, NULL, true, GSI_SAME_STMT);
674 a_acc_arg = a_acc;
675 m_acc_arg = m_acc;
676 if (a)
678 if (m_acc)
680 if (integer_onep (a))
681 var = m_acc;
682 else
683 var = adjust_return_value_with_ops (MULT_EXPR, "acc_tmp", m_acc,
684 a, gsi);
686 else
687 var = a;
689 a_acc_arg = update_accumulator_with_ops (PLUS_EXPR, a_acc, var, gsi);
692 if (m)
693 m_acc_arg = update_accumulator_with_ops (MULT_EXPR, m_acc, m, gsi);
695 if (a_acc)
696 add_successor_phi_arg (back, a_acc, a_acc_arg);
698 if (m_acc)
699 add_successor_phi_arg (back, m_acc, m_acc_arg);
702 /* Adjust value of the return at the end of BB according to M and A
703 accumulators. */
705 static void
706 adjust_return_value (basic_block bb, tree m, tree a)
708 tree retval;
709 gimple ret_stmt = gimple_seq_last_stmt (bb_seq (bb));
710 gimple_stmt_iterator gsi = gsi_last_bb (bb);
712 gcc_assert (gimple_code (ret_stmt) == GIMPLE_RETURN);
714 retval = gimple_return_retval (ret_stmt);
715 if (!retval || retval == error_mark_node)
716 return;
718 if (m)
719 retval = adjust_return_value_with_ops (MULT_EXPR, "mul_tmp", m_acc, retval,
720 gsi);
721 if (a)
722 retval = adjust_return_value_with_ops (PLUS_EXPR, "acc_tmp", a_acc, retval,
723 gsi);
724 gimple_return_set_retval (ret_stmt, retval);
725 update_stmt (ret_stmt);
728 /* Subtract COUNT and FREQUENCY from the basic block and it's
729 outgoing edge. */
730 static void
731 decrease_profile (basic_block bb, gcov_type count, int frequency)
733 edge e;
734 bb->count -= count;
735 if (bb->count < 0)
736 bb->count = 0;
737 bb->frequency -= frequency;
738 if (bb->frequency < 0)
739 bb->frequency = 0;
740 if (!single_succ_p (bb))
742 gcc_assert (!EDGE_COUNT (bb->succs));
743 return;
745 e = single_succ_edge (bb);
746 e->count -= count;
747 if (e->count < 0)
748 e->count = 0;
751 /* Returns true if argument PARAM of the tail recursive call needs to be copied
752 when the call is eliminated. */
754 static bool
755 arg_needs_copy_p (tree param)
757 tree def;
759 if (!is_gimple_reg (param) || !var_ann (param))
760 return false;
762 /* Parameters that are only defined but never used need not be copied. */
763 def = gimple_default_def (cfun, param);
764 if (!def)
765 return false;
767 return true;
770 /* Eliminates tail call described by T. TMP_VARS is a list of
771 temporary variables used to copy the function arguments. */
773 static void
774 eliminate_tail_call (struct tailcall *t)
776 tree param, rslt;
777 gimple stmt, call;
778 tree arg;
779 size_t idx;
780 basic_block bb, first;
781 edge e;
782 gimple phi;
783 gimple_stmt_iterator gsi;
784 gimple orig_stmt;
786 stmt = orig_stmt = gsi_stmt (t->call_gsi);
787 bb = gsi_bb (t->call_gsi);
789 if (dump_file && (dump_flags & TDF_DETAILS))
791 fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
792 bb->index);
793 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
794 fprintf (dump_file, "\n");
797 gcc_assert (is_gimple_call (stmt));
799 first = single_succ (ENTRY_BLOCK_PTR);
801 /* Remove the code after call_gsi that will become unreachable. The
802 possibly unreachable code in other blocks is removed later in
803 cfg cleanup. */
804 gsi = t->call_gsi;
805 gsi_next (&gsi);
806 while (!gsi_end_p (gsi))
808 gimple t = gsi_stmt (gsi);
809 /* Do not remove the return statement, so that redirect_edge_and_branch
810 sees how the block ends. */
811 if (gimple_code (t) == GIMPLE_RETURN)
812 break;
814 gsi_remove (&gsi, true);
815 release_defs (t);
818 /* Number of executions of function has reduced by the tailcall. */
819 e = single_succ_edge (gsi_bb (t->call_gsi));
820 decrease_profile (EXIT_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
821 decrease_profile (ENTRY_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
822 if (e->dest != EXIT_BLOCK_PTR)
823 decrease_profile (e->dest, e->count, EDGE_FREQUENCY (e));
825 /* Replace the call by a jump to the start of function. */
826 e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)),
827 first);
828 gcc_assert (e);
829 PENDING_STMT (e) = NULL;
831 /* Add phi node entries for arguments. The ordering of the phi nodes should
832 be the same as the ordering of the arguments. */
833 for (param = DECL_ARGUMENTS (current_function_decl),
834 idx = 0, gsi = gsi_start_phis (first);
835 param;
836 param = TREE_CHAIN (param), idx++)
838 if (!arg_needs_copy_p (param))
839 continue;
841 arg = gimple_call_arg (stmt, idx);
842 phi = gsi_stmt (gsi);
843 gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));
845 add_phi_arg (phi, arg, e, gimple_location (stmt));
846 gsi_next (&gsi);
849 /* Update the values of accumulators. */
850 adjust_accumulator_values (t->call_gsi, t->mult, t->add, e);
852 call = gsi_stmt (t->call_gsi);
853 rslt = gimple_call_lhs (call);
854 if (rslt != NULL_TREE)
856 /* Result of the call will no longer be defined. So adjust the
857 SSA_NAME_DEF_STMT accordingly. */
858 SSA_NAME_DEF_STMT (rslt) = gimple_build_nop ();
861 gsi_remove (&t->call_gsi, true);
862 release_defs (call);
865 /* Add phi nodes for the virtual operands defined in the function to the
866 header of the loop created by tail recursion elimination.
868 Originally, we used to add phi nodes only for call clobbered variables,
869 as the value of the non-call clobbered ones obviously cannot be used
870 or changed within the recursive call. However, the local variables
871 from multiple calls now share the same location, so the virtual ssa form
872 requires us to say that the location dies on further iterations of the loop,
873 which requires adding phi nodes.
875 static void
876 add_virtual_phis (void)
878 referenced_var_iterator rvi;
879 tree var;
881 /* The problematic part is that there is no way how to know what
882 to put into phi nodes (there in fact does not have to be such
883 ssa name available). A solution would be to have an artificial
884 use/kill for all virtual operands in EXIT node. Unless we have
885 this, we cannot do much better than to rebuild the ssa form for
886 possibly affected virtual ssa names from scratch. */
888 FOR_EACH_REFERENCED_VAR (var, rvi)
890 if (!is_gimple_reg (var) && gimple_default_def (cfun, var) != NULL_TREE)
891 mark_sym_for_renaming (var);
895 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
896 mark the tailcalls for the sibcall optimization. */
898 static bool
899 optimize_tail_call (struct tailcall *t, bool opt_tailcalls)
901 if (t->tail_recursion)
903 eliminate_tail_call (t);
904 return true;
907 if (opt_tailcalls)
909 gimple stmt = gsi_stmt (t->call_gsi);
911 gimple_call_set_tail (stmt, true);
912 if (dump_file && (dump_flags & TDF_DETAILS))
914 fprintf (dump_file, "Found tail call ");
915 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
916 fprintf (dump_file, " in bb %i\n", (gsi_bb (t->call_gsi))->index);
920 return false;
923 /* Creates a tail-call accumulator of the same type as the return type of the
924 current function. LABEL is the name used to creating the temporary
925 variable for the accumulator. The accumulator will be inserted in the
926 phis of a basic block BB with single predecessor with an initial value
927 INIT converted to the current function return type. */
929 static tree
930 create_tailcall_accumulator (const char *label, basic_block bb, tree init)
932 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
933 tree tmp = create_tmp_var (ret_type, label);
934 gimple phi;
936 if (TREE_CODE (ret_type) == COMPLEX_TYPE
937 || TREE_CODE (ret_type) == VECTOR_TYPE)
938 DECL_GIMPLE_REG_P (tmp) = 1;
939 add_referenced_var (tmp);
940 phi = create_phi_node (tmp, bb);
941 /* RET_TYPE can be a float when -ffast-maths is enabled. */
942 add_phi_arg (phi, fold_convert (ret_type, init), single_pred_edge (bb),
943 UNKNOWN_LOCATION);
944 return PHI_RESULT (phi);
947 /* Optimizes tail calls in the function, turning the tail recursion
948 into iteration. */
950 static unsigned int
951 tree_optimize_tail_calls_1 (bool opt_tailcalls)
953 edge e;
954 bool phis_constructed = false;
955 struct tailcall *tailcalls = NULL, *act, *next;
956 bool changed = false;
957 basic_block first = single_succ (ENTRY_BLOCK_PTR);
958 tree param;
959 gimple stmt;
960 edge_iterator ei;
962 if (!suitable_for_tail_opt_p ())
963 return 0;
964 if (opt_tailcalls)
965 opt_tailcalls = suitable_for_tail_call_opt_p ();
967 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
969 /* Only traverse the normal exits, i.e. those that end with return
970 statement. */
971 stmt = last_stmt (e->src);
973 if (stmt
974 && gimple_code (stmt) == GIMPLE_RETURN)
975 find_tail_calls (e->src, &tailcalls);
978 /* Construct the phi nodes and accumulators if necessary. */
979 a_acc = m_acc = NULL_TREE;
980 for (act = tailcalls; act; act = act->next)
982 if (!act->tail_recursion)
983 continue;
985 if (!phis_constructed)
987 /* Ensure that there is only one predecessor of the block
988 or if there are existing degenerate PHI nodes. */
989 if (!single_pred_p (first)
990 || !gimple_seq_empty_p (phi_nodes (first)))
991 first = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
993 /* Copy the args if needed. */
994 for (param = DECL_ARGUMENTS (current_function_decl);
995 param;
996 param = TREE_CHAIN (param))
997 if (arg_needs_copy_p (param))
999 tree name = gimple_default_def (cfun, param);
1000 tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
1001 gimple phi;
1003 set_default_def (param, new_name);
1004 phi = create_phi_node (name, first);
1005 SSA_NAME_DEF_STMT (name) = phi;
1006 add_phi_arg (phi, new_name, single_pred_edge (first),
1007 EXPR_LOCATION (param));
1009 phis_constructed = true;
1012 if (act->add && !a_acc)
1013 a_acc = create_tailcall_accumulator ("add_acc", first,
1014 integer_zero_node);
1016 if (act->mult && !m_acc)
1017 m_acc = create_tailcall_accumulator ("mult_acc", first,
1018 integer_one_node);
1021 for (; tailcalls; tailcalls = next)
1023 next = tailcalls->next;
1024 changed |= optimize_tail_call (tailcalls, opt_tailcalls);
1025 free (tailcalls);
1028 if (a_acc || m_acc)
1030 /* Modify the remaining return statements. */
1031 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1033 stmt = last_stmt (e->src);
1035 if (stmt
1036 && gimple_code (stmt) == GIMPLE_RETURN)
1037 adjust_return_value (e->src, m_acc, a_acc);
1041 if (changed)
1042 free_dominance_info (CDI_DOMINATORS);
1044 if (phis_constructed)
1045 add_virtual_phis ();
1046 if (changed)
1047 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
1048 return 0;
1051 static unsigned int
1052 execute_tail_recursion (void)
1054 return tree_optimize_tail_calls_1 (false);
1057 static bool
1058 gate_tail_calls (void)
1060 return flag_optimize_sibling_calls != 0 && dbg_cnt (tail_call);
1063 static unsigned int
1064 execute_tail_calls (void)
1066 return tree_optimize_tail_calls_1 (true);
1069 struct gimple_opt_pass pass_tail_recursion =
1072 GIMPLE_PASS,
1073 "tailr", /* name */
1074 gate_tail_calls, /* gate */
1075 execute_tail_recursion, /* execute */
1076 NULL, /* sub */
1077 NULL, /* next */
1078 0, /* static_pass_number */
1079 TV_NONE, /* tv_id */
1080 PROP_cfg | PROP_ssa, /* properties_required */
1081 0, /* properties_provided */
1082 0, /* properties_destroyed */
1083 0, /* todo_flags_start */
1084 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
1088 struct gimple_opt_pass pass_tail_calls =
1091 GIMPLE_PASS,
1092 "tailc", /* name */
1093 gate_tail_calls, /* gate */
1094 execute_tail_calls, /* execute */
1095 NULL, /* sub */
1096 NULL, /* next */
1097 0, /* static_pass_number */
1098 TV_NONE, /* tv_id */
1099 PROP_cfg | PROP_ssa, /* properties_required */
1100 0, /* properties_provided */
1101 0, /* properties_destroyed */
1102 0, /* todo_flags_start */
1103 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */