config.gcc (powerpc*-*-*): Add support for a new configure option --with-advance...
[official-gcc.git] / gcc / tree-tailcall.c
blob013972dbfdd64ec109793e2c33cc3d9f560fef4e
1 /* Tail call optimization on trees.
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "hash-set.h"
25 #include "machmode.h"
26 #include "vec.h"
27 #include "double-int.h"
28 #include "input.h"
29 #include "alias.h"
30 #include "symtab.h"
31 #include "wide-int.h"
32 #include "inchash.h"
33 #include "tree.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "tm_p.h"
37 #include "predict.h"
38 #include "hard-reg-set.h"
39 #include "function.h"
40 #include "dominance.h"
41 #include "cfg.h"
42 #include "basic-block.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-expr.h"
46 #include "is-a.h"
47 #include "gimple.h"
48 #include "gimple-iterator.h"
49 #include "gimplify-me.h"
50 #include "gimple-ssa.h"
51 #include "tree-cfg.h"
52 #include "tree-phinodes.h"
53 #include "stringpool.h"
54 #include "tree-ssanames.h"
55 #include "tree-into-ssa.h"
56 #include "hashtab.h"
57 #include "rtl.h"
58 #include "flags.h"
59 #include "statistics.h"
60 #include "real.h"
61 #include "fixed-value.h"
62 #include "insn-config.h"
63 #include "expmed.h"
64 #include "dojump.h"
65 #include "explow.h"
66 #include "calls.h"
67 #include "emit-rtl.h"
68 #include "varasm.h"
69 #include "stmt.h"
70 #include "expr.h"
71 #include "tree-dfa.h"
72 #include "gimple-pretty-print.h"
73 #include "except.h"
74 #include "tree-pass.h"
75 #include "langhooks.h"
76 #include "dbgcnt.h"
77 #include "target.h"
78 #include "cfgloop.h"
79 #include "common/common-target.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "ipa-utils.h"
86 /* The file implements the tail recursion elimination. It is also used to
87 analyze the tail calls in general, passing the results to the rtl level
88 where they are used for sibcall optimization.
90 In addition to the standard tail recursion elimination, we handle the most
91 trivial cases of making the call tail recursive by creating accumulators.
92 For example the following function
94 int sum (int n)
96 if (n > 0)
97 return n + sum (n - 1);
98 else
99 return 0;
102 is transformed into
104 int sum (int n)
106 int acc = 0;
108 while (n > 0)
109 acc += n--;
111 return acc;
114 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
115 when we reach the return x statement, we should return a_acc + x * m_acc
116 instead. They are initially initialized to 0 and 1, respectively,
117 so the semantics of the function is obviously preserved. If we are
118 guaranteed that the value of the accumulator never change, we
119 omit the accumulator.
121 There are three cases how the function may exit. The first one is
122 handled in adjust_return_value, the other two in adjust_accumulator_values
123 (the second case is actually a special case of the third one and we
124 present it separately just for clarity):
126 1) Just return x, where x is not in any of the remaining special shapes.
127 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
129 2) return f (...), where f is the current function, is rewritten in a
130 classical tail-recursion elimination way, into assignment of arguments
131 and jump to the start of the function. Values of the accumulators
132 are unchanged.
134 3) return a + m * f(...), where a and m do not depend on call to f.
135 To preserve the semantics described before we want this to be rewritten
136 in such a way that we finally return
138 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
140 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
141 eliminate the tail call to f. Special cases when the value is just
142 added or just multiplied are obtained by setting a = 0 or m = 1.
144 TODO -- it is possible to do similar tricks for other operations. */
146 /* A structure that describes the tailcall. */
148 struct tailcall
150 /* The iterator pointing to the call statement. */
151 gimple_stmt_iterator call_gsi;
153 /* True if it is a call to the current function. */
154 bool tail_recursion;
156 /* The return value of the caller is mult * f + add, where f is the return
157 value of the call. */
158 tree mult, add;
160 /* Next tailcall in the chain. */
161 struct tailcall *next;
164 /* The variables holding the value of multiplicative and additive
165 accumulator. */
166 static tree m_acc, a_acc;
168 static bool optimize_tail_call (struct tailcall *, bool);
169 static void eliminate_tail_call (struct tailcall *);
171 /* Returns false when the function is not suitable for tail call optimization
172 from some reason (e.g. if it takes variable number of arguments). */
174 static bool
175 suitable_for_tail_opt_p (void)
177 if (cfun->stdarg)
178 return false;
180 return true;
182 /* Returns false when the function is not suitable for tail call optimization
183 for some reason (e.g. if it takes variable number of arguments).
184 This test must pass in addition to suitable_for_tail_opt_p in order to make
185 tail call discovery happen. */
187 static bool
188 suitable_for_tail_call_opt_p (void)
190 tree param;
192 /* alloca (until we have stack slot life analysis) inhibits
193 sibling call optimizations, but not tail recursion. */
194 if (cfun->calls_alloca)
195 return false;
197 /* If we are using sjlj exceptions, we may need to add a call to
198 _Unwind_SjLj_Unregister at exit of the function. Which means
199 that we cannot do any sibcall transformations. */
200 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
201 && current_function_has_exception_handlers ())
202 return false;
204 /* Any function that calls setjmp might have longjmp called from
205 any called function. ??? We really should represent this
206 properly in the CFG so that this needn't be special cased. */
207 if (cfun->calls_setjmp)
208 return false;
210 /* ??? It is OK if the argument of a function is taken in some cases,
211 but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
212 for (param = DECL_ARGUMENTS (current_function_decl);
213 param;
214 param = DECL_CHAIN (param))
215 if (TREE_ADDRESSABLE (param))
216 return false;
218 return true;
221 /* Checks whether the expression EXPR in stmt AT is independent of the
222 statement pointed to by GSI (in a sense that we already know EXPR's value
223 at GSI). We use the fact that we are only called from the chain of
224 basic blocks that have only single successor. Returns the expression
225 containing the value of EXPR at GSI. */
227 static tree
228 independent_of_stmt_p (tree expr, gimple at, gimple_stmt_iterator gsi)
230 basic_block bb, call_bb, at_bb;
231 edge e;
232 edge_iterator ei;
234 if (is_gimple_min_invariant (expr))
235 return expr;
237 if (TREE_CODE (expr) != SSA_NAME)
238 return NULL_TREE;
240 /* Mark the blocks in the chain leading to the end. */
241 at_bb = gimple_bb (at);
242 call_bb = gimple_bb (gsi_stmt (gsi));
243 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
244 bb->aux = &bb->aux;
245 bb->aux = &bb->aux;
247 while (1)
249 at = SSA_NAME_DEF_STMT (expr);
250 bb = gimple_bb (at);
252 /* The default definition or defined before the chain. */
253 if (!bb || !bb->aux)
254 break;
256 if (bb == call_bb)
258 for (; !gsi_end_p (gsi); gsi_next (&gsi))
259 if (gsi_stmt (gsi) == at)
260 break;
262 if (!gsi_end_p (gsi))
263 expr = NULL_TREE;
264 break;
267 if (gimple_code (at) != GIMPLE_PHI)
269 expr = NULL_TREE;
270 break;
273 FOR_EACH_EDGE (e, ei, bb->preds)
274 if (e->src->aux)
275 break;
276 gcc_assert (e);
278 expr = PHI_ARG_DEF_FROM_EDGE (at, e);
279 if (TREE_CODE (expr) != SSA_NAME)
281 /* The value is a constant. */
282 break;
286 /* Unmark the blocks. */
287 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
288 bb->aux = NULL;
289 bb->aux = NULL;
291 return expr;
294 /* Simulates the effect of an assignment STMT on the return value of the tail
295 recursive CALL passed in ASS_VAR. M and A are the multiplicative and the
296 additive factor for the real return value. */
298 static bool
299 process_assignment (gassign *stmt, gimple_stmt_iterator call, tree *m,
300 tree *a, tree *ass_var)
302 tree op0, op1 = NULL_TREE, non_ass_var = NULL_TREE;
303 tree dest = gimple_assign_lhs (stmt);
304 enum tree_code code = gimple_assign_rhs_code (stmt);
305 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
306 tree src_var = gimple_assign_rhs1 (stmt);
308 /* See if this is a simple copy operation of an SSA name to the function
309 result. In that case we may have a simple tail call. Ignore type
310 conversions that can never produce extra code between the function
311 call and the function return. */
312 if ((rhs_class == GIMPLE_SINGLE_RHS || gimple_assign_cast_p (stmt))
313 && (TREE_CODE (src_var) == SSA_NAME))
315 /* Reject a tailcall if the type conversion might need
316 additional code. */
317 if (gimple_assign_cast_p (stmt))
319 if (TYPE_MODE (TREE_TYPE (dest)) != TYPE_MODE (TREE_TYPE (src_var)))
320 return false;
322 /* Even if the type modes are the same, if the precision of the
323 type is smaller than mode's precision,
324 reduce_to_bit_field_precision would generate additional code. */
325 if (INTEGRAL_TYPE_P (TREE_TYPE (dest))
326 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (dest)))
327 > TYPE_PRECISION (TREE_TYPE (dest))))
328 return false;
331 if (src_var != *ass_var)
332 return false;
334 *ass_var = dest;
335 return true;
338 switch (rhs_class)
340 case GIMPLE_BINARY_RHS:
341 op1 = gimple_assign_rhs2 (stmt);
343 /* Fall through. */
345 case GIMPLE_UNARY_RHS:
346 op0 = gimple_assign_rhs1 (stmt);
347 break;
349 default:
350 return false;
353 /* Accumulator optimizations will reverse the order of operations.
354 We can only do that for floating-point types if we're assuming
355 that addition and multiplication are associative. */
356 if (!flag_associative_math)
357 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
358 return false;
360 if (rhs_class == GIMPLE_UNARY_RHS)
362 else if (op0 == *ass_var
363 && (non_ass_var = independent_of_stmt_p (op1, stmt, call)))
365 else if (op1 == *ass_var
366 && (non_ass_var = independent_of_stmt_p (op0, stmt, call)))
368 else
369 return false;
371 switch (code)
373 case PLUS_EXPR:
374 *a = non_ass_var;
375 *ass_var = dest;
376 return true;
378 case POINTER_PLUS_EXPR:
379 if (op0 != *ass_var)
380 return false;
381 *a = non_ass_var;
382 *ass_var = dest;
383 return true;
385 case MULT_EXPR:
386 *m = non_ass_var;
387 *ass_var = dest;
388 return true;
390 case NEGATE_EXPR:
391 *m = build_minus_one_cst (TREE_TYPE (op0));
392 *ass_var = dest;
393 return true;
395 case MINUS_EXPR:
396 if (*ass_var == op0)
397 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
398 else
400 *m = build_minus_one_cst (TREE_TYPE (non_ass_var));
401 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
404 *ass_var = dest;
405 return true;
407 /* TODO -- Handle POINTER_PLUS_EXPR. */
409 default:
410 return false;
414 /* Propagate VAR through phis on edge E. */
416 static tree
417 propagate_through_phis (tree var, edge e)
419 basic_block dest = e->dest;
420 gphi_iterator gsi;
422 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
424 gphi *phi = gsi.phi ();
425 if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
426 return PHI_RESULT (phi);
428 return var;
431 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
432 added to the start of RET. */
434 static void
435 find_tail_calls (basic_block bb, struct tailcall **ret)
437 tree ass_var = NULL_TREE, ret_var, func, param;
438 gimple stmt;
439 gcall *call = NULL;
440 gimple_stmt_iterator gsi, agsi;
441 bool tail_recursion;
442 struct tailcall *nw;
443 edge e;
444 tree m, a;
445 basic_block abb;
446 size_t idx;
447 tree var;
449 if (!single_succ_p (bb))
450 return;
452 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
454 stmt = gsi_stmt (gsi);
456 /* Ignore labels, returns, clobbers and debug stmts. */
457 if (gimple_code (stmt) == GIMPLE_LABEL
458 || gimple_code (stmt) == GIMPLE_RETURN
459 || gimple_clobber_p (stmt)
460 || is_gimple_debug (stmt))
461 continue;
463 /* Check for a call. */
464 if (is_gimple_call (stmt))
466 call = as_a <gcall *> (stmt);
467 ass_var = gimple_call_lhs (call);
468 break;
471 /* If the statement references memory or volatile operands, fail. */
472 if (gimple_references_memory_p (stmt)
473 || gimple_has_volatile_ops (stmt))
474 return;
477 if (gsi_end_p (gsi))
479 edge_iterator ei;
480 /* Recurse to the predecessors. */
481 FOR_EACH_EDGE (e, ei, bb->preds)
482 find_tail_calls (e->src, ret);
484 return;
487 /* If the LHS of our call is not just a simple register, we can't
488 transform this into a tail or sibling call. This situation happens,
489 in (e.g.) "*p = foo()" where foo returns a struct. In this case
490 we won't have a temporary here, but we need to carry out the side
491 effect anyway, so tailcall is impossible.
493 ??? In some situations (when the struct is returned in memory via
494 invisible argument) we could deal with this, e.g. by passing 'p'
495 itself as that argument to foo, but it's too early to do this here,
496 and expand_call() will not handle it anyway. If it ever can, then
497 we need to revisit this here, to allow that situation. */
498 if (ass_var && !is_gimple_reg (ass_var))
499 return;
501 /* We found the call, check whether it is suitable. */
502 tail_recursion = false;
503 func = gimple_call_fndecl (call);
504 if (func
505 && !DECL_BUILT_IN (func)
506 && recursive_call_p (current_function_decl, func))
508 tree arg;
510 for (param = DECL_ARGUMENTS (func), idx = 0;
511 param && idx < gimple_call_num_args (call);
512 param = DECL_CHAIN (param), idx ++)
514 arg = gimple_call_arg (call, idx);
515 if (param != arg)
517 /* Make sure there are no problems with copying. The parameter
518 have a copyable type and the two arguments must have reasonably
519 equivalent types. The latter requirement could be relaxed if
520 we emitted a suitable type conversion statement. */
521 if (!is_gimple_reg_type (TREE_TYPE (param))
522 || !useless_type_conversion_p (TREE_TYPE (param),
523 TREE_TYPE (arg)))
524 break;
526 /* The parameter should be a real operand, so that phi node
527 created for it at the start of the function has the meaning
528 of copying the value. This test implies is_gimple_reg_type
529 from the previous condition, however this one could be
530 relaxed by being more careful with copying the new value
531 of the parameter (emitting appropriate GIMPLE_ASSIGN and
532 updating the virtual operands). */
533 if (!is_gimple_reg (param))
534 break;
537 if (idx == gimple_call_num_args (call) && !param)
538 tail_recursion = true;
541 /* Make sure the tail invocation of this function does not refer
542 to local variables. */
543 FOR_EACH_LOCAL_DECL (cfun, idx, var)
545 if (TREE_CODE (var) != PARM_DECL
546 && auto_var_in_fn_p (var, cfun->decl)
547 && (ref_maybe_used_by_stmt_p (call, var)
548 || call_may_clobber_ref_p (call, var)))
549 return;
552 /* Now check the statements after the call. None of them has virtual
553 operands, so they may only depend on the call through its return
554 value. The return value should also be dependent on each of them,
555 since we are running after dce. */
556 m = NULL_TREE;
557 a = NULL_TREE;
559 abb = bb;
560 agsi = gsi;
561 while (1)
563 tree tmp_a = NULL_TREE;
564 tree tmp_m = NULL_TREE;
565 gsi_next (&agsi);
567 while (gsi_end_p (agsi))
569 ass_var = propagate_through_phis (ass_var, single_succ_edge (abb));
570 abb = single_succ (abb);
571 agsi = gsi_start_bb (abb);
574 stmt = gsi_stmt (agsi);
576 if (gimple_code (stmt) == GIMPLE_LABEL)
577 continue;
579 if (gimple_code (stmt) == GIMPLE_RETURN)
580 break;
582 if (gimple_clobber_p (stmt))
583 continue;
585 if (is_gimple_debug (stmt))
586 continue;
588 if (gimple_code (stmt) != GIMPLE_ASSIGN)
589 return;
591 /* This is a gimple assign. */
592 if (! process_assignment (as_a <gassign *> (stmt), gsi, &tmp_m,
593 &tmp_a, &ass_var))
594 return;
596 if (tmp_a)
598 tree type = TREE_TYPE (tmp_a);
599 if (a)
600 a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a);
601 else
602 a = tmp_a;
604 if (tmp_m)
606 tree type = TREE_TYPE (tmp_m);
607 if (m)
608 m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m);
609 else
610 m = tmp_m;
612 if (a)
613 a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m);
617 /* See if this is a tail call we can handle. */
618 ret_var = gimple_return_retval (as_a <greturn *> (stmt));
620 /* We may proceed if there either is no return value, or the return value
621 is identical to the call's return. */
622 if (ret_var
623 && (ret_var != ass_var))
624 return;
626 /* If this is not a tail recursive call, we cannot handle addends or
627 multiplicands. */
628 if (!tail_recursion && (m || a))
629 return;
631 /* For pointers only allow additions. */
632 if (m && POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
633 return;
635 nw = XNEW (struct tailcall);
637 nw->call_gsi = gsi;
639 nw->tail_recursion = tail_recursion;
641 nw->mult = m;
642 nw->add = a;
644 nw->next = *ret;
645 *ret = nw;
648 /* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E. */
650 static void
651 add_successor_phi_arg (edge e, tree var, tree phi_arg)
653 gphi_iterator gsi;
655 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
656 if (PHI_RESULT (gsi.phi ()) == var)
657 break;
659 gcc_assert (!gsi_end_p (gsi));
660 add_phi_arg (gsi.phi (), phi_arg, e, UNKNOWN_LOCATION);
663 /* Creates a GIMPLE statement which computes the operation specified by
664 CODE, ACC and OP1 to a new variable with name LABEL and inserts the
665 statement in the position specified by GSI. Returns the
666 tree node of the statement's result. */
668 static tree
669 adjust_return_value_with_ops (enum tree_code code, const char *label,
670 tree acc, tree op1, gimple_stmt_iterator gsi)
673 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
674 tree result = make_temp_ssa_name (ret_type, NULL, label);
675 gassign *stmt;
677 if (POINTER_TYPE_P (ret_type))
679 gcc_assert (code == PLUS_EXPR && TREE_TYPE (acc) == sizetype);
680 code = POINTER_PLUS_EXPR;
682 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1))
683 && code != POINTER_PLUS_EXPR)
684 stmt = gimple_build_assign (result, code, acc, op1);
685 else
687 tree tem;
688 if (code == POINTER_PLUS_EXPR)
689 tem = fold_build2 (code, TREE_TYPE (op1), op1, acc);
690 else
691 tem = fold_build2 (code, TREE_TYPE (op1),
692 fold_convert (TREE_TYPE (op1), acc), op1);
693 tree rhs = fold_convert (ret_type, tem);
694 rhs = force_gimple_operand_gsi (&gsi, rhs,
695 false, NULL, true, GSI_SAME_STMT);
696 stmt = gimple_build_assign (result, rhs);
699 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
700 return result;
703 /* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
704 the computation specified by CODE and OP1 and insert the statement
705 at the position specified by GSI as a new statement. Returns new SSA name
706 of updated accumulator. */
708 static tree
709 update_accumulator_with_ops (enum tree_code code, tree acc, tree op1,
710 gimple_stmt_iterator gsi)
712 gassign *stmt;
713 tree var = copy_ssa_name (acc);
714 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
715 stmt = gimple_build_assign (var, code, acc, op1);
716 else
718 tree rhs = fold_convert (TREE_TYPE (acc),
719 fold_build2 (code,
720 TREE_TYPE (op1),
721 fold_convert (TREE_TYPE (op1), acc),
722 op1));
723 rhs = force_gimple_operand_gsi (&gsi, rhs,
724 false, NULL, false, GSI_CONTINUE_LINKING);
725 stmt = gimple_build_assign (var, rhs);
727 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
728 return var;
731 /* Adjust the accumulator values according to A and M after GSI, and update
732 the phi nodes on edge BACK. */
734 static void
735 adjust_accumulator_values (gimple_stmt_iterator gsi, tree m, tree a, edge back)
737 tree var, a_acc_arg, m_acc_arg;
739 if (m)
740 m = force_gimple_operand_gsi (&gsi, m, true, NULL, true, GSI_SAME_STMT);
741 if (a)
742 a = force_gimple_operand_gsi (&gsi, a, true, NULL, true, GSI_SAME_STMT);
744 a_acc_arg = a_acc;
745 m_acc_arg = m_acc;
746 if (a)
748 if (m_acc)
750 if (integer_onep (a))
751 var = m_acc;
752 else
753 var = adjust_return_value_with_ops (MULT_EXPR, "acc_tmp", m_acc,
754 a, gsi);
756 else
757 var = a;
759 a_acc_arg = update_accumulator_with_ops (PLUS_EXPR, a_acc, var, gsi);
762 if (m)
763 m_acc_arg = update_accumulator_with_ops (MULT_EXPR, m_acc, m, gsi);
765 if (a_acc)
766 add_successor_phi_arg (back, a_acc, a_acc_arg);
768 if (m_acc)
769 add_successor_phi_arg (back, m_acc, m_acc_arg);
772 /* Adjust value of the return at the end of BB according to M and A
773 accumulators. */
775 static void
776 adjust_return_value (basic_block bb, tree m, tree a)
778 tree retval;
779 greturn *ret_stmt = as_a <greturn *> (gimple_seq_last_stmt (bb_seq (bb)));
780 gimple_stmt_iterator gsi = gsi_last_bb (bb);
782 gcc_assert (gimple_code (ret_stmt) == GIMPLE_RETURN);
784 retval = gimple_return_retval (ret_stmt);
785 if (!retval || retval == error_mark_node)
786 return;
788 if (m)
789 retval = adjust_return_value_with_ops (MULT_EXPR, "mul_tmp", m_acc, retval,
790 gsi);
791 if (a)
792 retval = adjust_return_value_with_ops (PLUS_EXPR, "acc_tmp", a_acc, retval,
793 gsi);
794 gimple_return_set_retval (ret_stmt, retval);
795 update_stmt (ret_stmt);
798 /* Subtract COUNT and FREQUENCY from the basic block and it's
799 outgoing edge. */
800 static void
801 decrease_profile (basic_block bb, gcov_type count, int frequency)
803 edge e;
804 bb->count -= count;
805 if (bb->count < 0)
806 bb->count = 0;
807 bb->frequency -= frequency;
808 if (bb->frequency < 0)
809 bb->frequency = 0;
810 if (!single_succ_p (bb))
812 gcc_assert (!EDGE_COUNT (bb->succs));
813 return;
815 e = single_succ_edge (bb);
816 e->count -= count;
817 if (e->count < 0)
818 e->count = 0;
821 /* Returns true if argument PARAM of the tail recursive call needs to be copied
822 when the call is eliminated. */
824 static bool
825 arg_needs_copy_p (tree param)
827 tree def;
829 if (!is_gimple_reg (param))
830 return false;
832 /* Parameters that are only defined but never used need not be copied. */
833 def = ssa_default_def (cfun, param);
834 if (!def)
835 return false;
837 return true;
840 /* Eliminates tail call described by T. TMP_VARS is a list of
841 temporary variables used to copy the function arguments. */
843 static void
844 eliminate_tail_call (struct tailcall *t)
846 tree param, rslt;
847 gimple stmt, call;
848 tree arg;
849 size_t idx;
850 basic_block bb, first;
851 edge e;
852 gphi *phi;
853 gphi_iterator gpi;
854 gimple_stmt_iterator gsi;
855 gimple orig_stmt;
857 stmt = orig_stmt = gsi_stmt (t->call_gsi);
858 bb = gsi_bb (t->call_gsi);
860 if (dump_file && (dump_flags & TDF_DETAILS))
862 fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
863 bb->index);
864 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
865 fprintf (dump_file, "\n");
868 gcc_assert (is_gimple_call (stmt));
870 first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
872 /* Remove the code after call_gsi that will become unreachable. The
873 possibly unreachable code in other blocks is removed later in
874 cfg cleanup. */
875 gsi = t->call_gsi;
876 gsi_next (&gsi);
877 while (!gsi_end_p (gsi))
879 gimple t = gsi_stmt (gsi);
880 /* Do not remove the return statement, so that redirect_edge_and_branch
881 sees how the block ends. */
882 if (gimple_code (t) == GIMPLE_RETURN)
883 break;
885 gsi_remove (&gsi, true);
886 release_defs (t);
889 /* Number of executions of function has reduced by the tailcall. */
890 e = single_succ_edge (gsi_bb (t->call_gsi));
891 decrease_profile (EXIT_BLOCK_PTR_FOR_FN (cfun), e->count, EDGE_FREQUENCY (e));
892 decrease_profile (ENTRY_BLOCK_PTR_FOR_FN (cfun), e->count,
893 EDGE_FREQUENCY (e));
894 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
895 decrease_profile (e->dest, e->count, EDGE_FREQUENCY (e));
897 /* Replace the call by a jump to the start of function. */
898 e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)),
899 first);
900 gcc_assert (e);
901 PENDING_STMT (e) = NULL;
903 /* Add phi node entries for arguments. The ordering of the phi nodes should
904 be the same as the ordering of the arguments. */
905 for (param = DECL_ARGUMENTS (current_function_decl),
906 idx = 0, gpi = gsi_start_phis (first);
907 param;
908 param = DECL_CHAIN (param), idx++)
910 if (!arg_needs_copy_p (param))
911 continue;
913 arg = gimple_call_arg (stmt, idx);
914 phi = gpi.phi ();
915 gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));
917 add_phi_arg (phi, arg, e, gimple_location (stmt));
918 gsi_next (&gpi);
921 /* Update the values of accumulators. */
922 adjust_accumulator_values (t->call_gsi, t->mult, t->add, e);
924 call = gsi_stmt (t->call_gsi);
925 rslt = gimple_call_lhs (call);
926 if (rslt != NULL_TREE)
928 /* Result of the call will no longer be defined. So adjust the
929 SSA_NAME_DEF_STMT accordingly. */
930 SSA_NAME_DEF_STMT (rslt) = gimple_build_nop ();
933 gsi_remove (&t->call_gsi, true);
934 release_defs (call);
937 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
938 mark the tailcalls for the sibcall optimization. */
940 static bool
941 optimize_tail_call (struct tailcall *t, bool opt_tailcalls)
943 if (t->tail_recursion)
945 eliminate_tail_call (t);
946 return true;
949 if (opt_tailcalls)
951 gcall *stmt = as_a <gcall *> (gsi_stmt (t->call_gsi));
953 gimple_call_set_tail (stmt, true);
954 cfun->tail_call_marked = true;
955 if (dump_file && (dump_flags & TDF_DETAILS))
957 fprintf (dump_file, "Found tail call ");
958 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
959 fprintf (dump_file, " in bb %i\n", (gsi_bb (t->call_gsi))->index);
963 return false;
966 /* Creates a tail-call accumulator of the same type as the return type of the
967 current function. LABEL is the name used to creating the temporary
968 variable for the accumulator. The accumulator will be inserted in the
969 phis of a basic block BB with single predecessor with an initial value
970 INIT converted to the current function return type. */
972 static tree
973 create_tailcall_accumulator (const char *label, basic_block bb, tree init)
975 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
976 if (POINTER_TYPE_P (ret_type))
977 ret_type = sizetype;
979 tree tmp = make_temp_ssa_name (ret_type, NULL, label);
980 gphi *phi;
982 phi = create_phi_node (tmp, bb);
983 /* RET_TYPE can be a float when -ffast-maths is enabled. */
984 add_phi_arg (phi, fold_convert (ret_type, init), single_pred_edge (bb),
985 UNKNOWN_LOCATION);
986 return PHI_RESULT (phi);
989 /* Optimizes tail calls in the function, turning the tail recursion
990 into iteration. */
992 static unsigned int
993 tree_optimize_tail_calls_1 (bool opt_tailcalls)
995 edge e;
996 bool phis_constructed = false;
997 struct tailcall *tailcalls = NULL, *act, *next;
998 bool changed = false;
999 basic_block first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
1000 tree param;
1001 gimple stmt;
1002 edge_iterator ei;
1004 if (!suitable_for_tail_opt_p ())
1005 return 0;
1006 if (opt_tailcalls)
1007 opt_tailcalls = suitable_for_tail_call_opt_p ();
1009 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
1011 /* Only traverse the normal exits, i.e. those that end with return
1012 statement. */
1013 stmt = last_stmt (e->src);
1015 if (stmt
1016 && gimple_code (stmt) == GIMPLE_RETURN)
1017 find_tail_calls (e->src, &tailcalls);
1020 /* Construct the phi nodes and accumulators if necessary. */
1021 a_acc = m_acc = NULL_TREE;
1022 for (act = tailcalls; act; act = act->next)
1024 if (!act->tail_recursion)
1025 continue;
1027 if (!phis_constructed)
1029 /* Ensure that there is only one predecessor of the block
1030 or if there are existing degenerate PHI nodes. */
1031 if (!single_pred_p (first)
1032 || !gimple_seq_empty_p (phi_nodes (first)))
1033 first =
1034 split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1036 /* Copy the args if needed. */
1037 for (param = DECL_ARGUMENTS (current_function_decl);
1038 param;
1039 param = DECL_CHAIN (param))
1040 if (arg_needs_copy_p (param))
1042 tree name = ssa_default_def (cfun, param);
1043 tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
1044 gphi *phi;
1046 set_ssa_default_def (cfun, param, new_name);
1047 phi = create_phi_node (name, first);
1048 add_phi_arg (phi, new_name, single_pred_edge (first),
1049 EXPR_LOCATION (param));
1051 phis_constructed = true;
1054 if (act->add && !a_acc)
1055 a_acc = create_tailcall_accumulator ("add_acc", first,
1056 integer_zero_node);
1058 if (act->mult && !m_acc)
1059 m_acc = create_tailcall_accumulator ("mult_acc", first,
1060 integer_one_node);
1063 if (a_acc || m_acc)
1065 /* When the tail call elimination using accumulators is performed,
1066 statements adding the accumulated value are inserted at all exits.
1067 This turns all other tail calls to non-tail ones. */
1068 opt_tailcalls = false;
1071 for (; tailcalls; tailcalls = next)
1073 next = tailcalls->next;
1074 changed |= optimize_tail_call (tailcalls, opt_tailcalls);
1075 free (tailcalls);
1078 if (a_acc || m_acc)
1080 /* Modify the remaining return statements. */
1081 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
1083 stmt = last_stmt (e->src);
1085 if (stmt
1086 && gimple_code (stmt) == GIMPLE_RETURN)
1087 adjust_return_value (e->src, m_acc, a_acc);
1091 if (changed)
1093 /* We may have created new loops. Make them magically appear. */
1094 loops_state_set (LOOPS_NEED_FIXUP);
1095 free_dominance_info (CDI_DOMINATORS);
1098 /* Add phi nodes for the virtual operands defined in the function to the
1099 header of the loop created by tail recursion elimination. Do so
1100 by triggering the SSA renamer. */
1101 if (phis_constructed)
1102 mark_virtual_operands_for_renaming (cfun);
1104 if (changed)
1105 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
1106 return 0;
1109 static bool
1110 gate_tail_calls (void)
1112 return flag_optimize_sibling_calls != 0 && dbg_cnt (tail_call);
1115 static unsigned int
1116 execute_tail_calls (void)
1118 return tree_optimize_tail_calls_1 (true);
1121 namespace {
1123 const pass_data pass_data_tail_recursion =
1125 GIMPLE_PASS, /* type */
1126 "tailr", /* name */
1127 OPTGROUP_NONE, /* optinfo_flags */
1128 TV_NONE, /* tv_id */
1129 ( PROP_cfg | PROP_ssa ), /* properties_required */
1130 0, /* properties_provided */
1131 0, /* properties_destroyed */
1132 0, /* todo_flags_start */
1133 0, /* todo_flags_finish */
1136 class pass_tail_recursion : public gimple_opt_pass
1138 public:
1139 pass_tail_recursion (gcc::context *ctxt)
1140 : gimple_opt_pass (pass_data_tail_recursion, ctxt)
1143 /* opt_pass methods: */
1144 opt_pass * clone () { return new pass_tail_recursion (m_ctxt); }
1145 virtual bool gate (function *) { return gate_tail_calls (); }
1146 virtual unsigned int execute (function *)
1148 return tree_optimize_tail_calls_1 (false);
1151 }; // class pass_tail_recursion
1153 } // anon namespace
1155 gimple_opt_pass *
1156 make_pass_tail_recursion (gcc::context *ctxt)
1158 return new pass_tail_recursion (ctxt);
1161 namespace {
1163 const pass_data pass_data_tail_calls =
1165 GIMPLE_PASS, /* type */
1166 "tailc", /* name */
1167 OPTGROUP_NONE, /* optinfo_flags */
1168 TV_NONE, /* tv_id */
1169 ( PROP_cfg | PROP_ssa ), /* properties_required */
1170 0, /* properties_provided */
1171 0, /* properties_destroyed */
1172 0, /* todo_flags_start */
1173 0, /* todo_flags_finish */
1176 class pass_tail_calls : public gimple_opt_pass
1178 public:
1179 pass_tail_calls (gcc::context *ctxt)
1180 : gimple_opt_pass (pass_data_tail_calls, ctxt)
1183 /* opt_pass methods: */
1184 virtual bool gate (function *) { return gate_tail_calls (); }
1185 virtual unsigned int execute (function *) { return execute_tail_calls (); }
1187 }; // class pass_tail_calls
1189 } // anon namespace
1191 gimple_opt_pass *
1192 make_pass_tail_calls (gcc::context *ctxt)
1194 return new pass_tail_calls (ctxt);