PR fortran/60928
[official-gcc.git] / gcc / tree-tailcall.c
blob6f6c758c356bdf6cbe94a125002a576b010dcb7f
1 /* Tail call optimization on trees.
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "stor-layout.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "function.h"
29 #include "tree-ssa-alias.h"
30 #include "internal-fn.h"
31 #include "gimple-expr.h"
32 #include "is-a.h"
33 #include "gimple.h"
34 #include "gimple-iterator.h"
35 #include "gimplify-me.h"
36 #include "gimple-ssa.h"
37 #include "tree-cfg.h"
38 #include "tree-phinodes.h"
39 #include "stringpool.h"
40 #include "tree-ssanames.h"
41 #include "tree-into-ssa.h"
42 #include "expr.h"
43 #include "tree-dfa.h"
44 #include "gimple-pretty-print.h"
45 #include "except.h"
46 #include "tree-pass.h"
47 #include "flags.h"
48 #include "langhooks.h"
49 #include "dbgcnt.h"
50 #include "target.h"
51 #include "cfgloop.h"
52 #include "common/common-target.h"
53 #include "ipa-utils.h"
55 /* The file implements the tail recursion elimination. It is also used to
56 analyze the tail calls in general, passing the results to the rtl level
57 where they are used for sibcall optimization.
59 In addition to the standard tail recursion elimination, we handle the most
60 trivial cases of making the call tail recursive by creating accumulators.
61 For example the following function
63 int sum (int n)
65 if (n > 0)
66 return n + sum (n - 1);
67 else
68 return 0;
71 is transformed into
73 int sum (int n)
75 int acc = 0;
77 while (n > 0)
78 acc += n--;
80 return acc;
83 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
84 when we reach the return x statement, we should return a_acc + x * m_acc
85 instead. They are initially initialized to 0 and 1, respectively,
86 so the semantics of the function is obviously preserved. If we are
87 guaranteed that the value of the accumulator never change, we
88 omit the accumulator.
90 There are three cases how the function may exit. The first one is
91 handled in adjust_return_value, the other two in adjust_accumulator_values
92 (the second case is actually a special case of the third one and we
93 present it separately just for clarity):
95 1) Just return x, where x is not in any of the remaining special shapes.
96 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
98 2) return f (...), where f is the current function, is rewritten in a
99 classical tail-recursion elimination way, into assignment of arguments
100 and jump to the start of the function. Values of the accumulators
101 are unchanged.
103 3) return a + m * f(...), where a and m do not depend on call to f.
104 To preserve the semantics described before we want this to be rewritten
105 in such a way that we finally return
107 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
109 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
110 eliminate the tail call to f. Special cases when the value is just
111 added or just multiplied are obtained by setting a = 0 or m = 1.
113 TODO -- it is possible to do similar tricks for other operations. */
115 /* A structure that describes the tailcall. */
117 struct tailcall
119 /* The iterator pointing to the call statement. */
120 gimple_stmt_iterator call_gsi;
122 /* True if it is a call to the current function. */
123 bool tail_recursion;
125 /* The return value of the caller is mult * f + add, where f is the return
126 value of the call. */
127 tree mult, add;
129 /* Next tailcall in the chain. */
130 struct tailcall *next;
133 /* The variables holding the value of multiplicative and additive
134 accumulator. */
135 static tree m_acc, a_acc;
137 static bool suitable_for_tail_opt_p (void);
138 static bool optimize_tail_call (struct tailcall *, bool);
139 static void eliminate_tail_call (struct tailcall *);
140 static void find_tail_calls (basic_block, struct tailcall **);
142 /* Returns false when the function is not suitable for tail call optimization
143 from some reason (e.g. if it takes variable number of arguments). */
145 static bool
146 suitable_for_tail_opt_p (void)
148 if (cfun->stdarg)
149 return false;
151 return true;
153 /* Returns false when the function is not suitable for tail call optimization
154 from some reason (e.g. if it takes variable number of arguments).
155 This test must pass in addition to suitable_for_tail_opt_p in order to make
156 tail call discovery happen. */
158 static bool
159 suitable_for_tail_call_opt_p (void)
161 tree param;
163 /* alloca (until we have stack slot life analysis) inhibits
164 sibling call optimizations, but not tail recursion. */
165 if (cfun->calls_alloca)
166 return false;
168 /* If we are using sjlj exceptions, we may need to add a call to
169 _Unwind_SjLj_Unregister at exit of the function. Which means
170 that we cannot do any sibcall transformations. */
171 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
172 && current_function_has_exception_handlers ())
173 return false;
175 /* Any function that calls setjmp might have longjmp called from
176 any called function. ??? We really should represent this
177 properly in the CFG so that this needn't be special cased. */
178 if (cfun->calls_setjmp)
179 return false;
181 /* ??? It is OK if the argument of a function is taken in some cases,
182 but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
183 for (param = DECL_ARGUMENTS (current_function_decl);
184 param;
185 param = DECL_CHAIN (param))
186 if (TREE_ADDRESSABLE (param))
187 return false;
189 return true;
192 /* Checks whether the expression EXPR in stmt AT is independent of the
193 statement pointed to by GSI (in a sense that we already know EXPR's value
194 at GSI). We use the fact that we are only called from the chain of
195 basic blocks that have only single successor. Returns the expression
196 containing the value of EXPR at GSI. */
198 static tree
199 independent_of_stmt_p (tree expr, gimple at, gimple_stmt_iterator gsi)
201 basic_block bb, call_bb, at_bb;
202 edge e;
203 edge_iterator ei;
205 if (is_gimple_min_invariant (expr))
206 return expr;
208 if (TREE_CODE (expr) != SSA_NAME)
209 return NULL_TREE;
211 /* Mark the blocks in the chain leading to the end. */
212 at_bb = gimple_bb (at);
213 call_bb = gimple_bb (gsi_stmt (gsi));
214 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
215 bb->aux = &bb->aux;
216 bb->aux = &bb->aux;
218 while (1)
220 at = SSA_NAME_DEF_STMT (expr);
221 bb = gimple_bb (at);
223 /* The default definition or defined before the chain. */
224 if (!bb || !bb->aux)
225 break;
227 if (bb == call_bb)
229 for (; !gsi_end_p (gsi); gsi_next (&gsi))
230 if (gsi_stmt (gsi) == at)
231 break;
233 if (!gsi_end_p (gsi))
234 expr = NULL_TREE;
235 break;
238 if (gimple_code (at) != GIMPLE_PHI)
240 expr = NULL_TREE;
241 break;
244 FOR_EACH_EDGE (e, ei, bb->preds)
245 if (e->src->aux)
246 break;
247 gcc_assert (e);
249 expr = PHI_ARG_DEF_FROM_EDGE (at, e);
250 if (TREE_CODE (expr) != SSA_NAME)
252 /* The value is a constant. */
253 break;
257 /* Unmark the blocks. */
258 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
259 bb->aux = NULL;
260 bb->aux = NULL;
262 return expr;
265 /* Simulates the effect of an assignment STMT on the return value of the tail
266 recursive CALL passed in ASS_VAR. M and A are the multiplicative and the
267 additive factor for the real return value. */
269 static bool
270 process_assignment (gimple stmt, gimple_stmt_iterator call, tree *m,
271 tree *a, tree *ass_var)
273 tree op0, op1 = NULL_TREE, non_ass_var = NULL_TREE;
274 tree dest = gimple_assign_lhs (stmt);
275 enum tree_code code = gimple_assign_rhs_code (stmt);
276 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
277 tree src_var = gimple_assign_rhs1 (stmt);
279 /* See if this is a simple copy operation of an SSA name to the function
280 result. In that case we may have a simple tail call. Ignore type
281 conversions that can never produce extra code between the function
282 call and the function return. */
283 if ((rhs_class == GIMPLE_SINGLE_RHS || gimple_assign_cast_p (stmt))
284 && (TREE_CODE (src_var) == SSA_NAME))
286 /* Reject a tailcall if the type conversion might need
287 additional code. */
288 if (gimple_assign_cast_p (stmt))
290 if (TYPE_MODE (TREE_TYPE (dest)) != TYPE_MODE (TREE_TYPE (src_var)))
291 return false;
293 /* Even if the type modes are the same, if the precision of the
294 type is smaller than mode's precision,
295 reduce_to_bit_field_precision would generate additional code. */
296 if (INTEGRAL_TYPE_P (TREE_TYPE (dest))
297 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (dest)))
298 > TYPE_PRECISION (TREE_TYPE (dest))))
299 return false;
302 if (src_var != *ass_var)
303 return false;
305 *ass_var = dest;
306 return true;
309 switch (rhs_class)
311 case GIMPLE_BINARY_RHS:
312 op1 = gimple_assign_rhs2 (stmt);
314 /* Fall through. */
316 case GIMPLE_UNARY_RHS:
317 op0 = gimple_assign_rhs1 (stmt);
318 break;
320 default:
321 return false;
324 /* Accumulator optimizations will reverse the order of operations.
325 We can only do that for floating-point types if we're assuming
326 that addition and multiplication are associative. */
327 if (!flag_associative_math)
328 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
329 return false;
331 if (rhs_class == GIMPLE_UNARY_RHS)
333 else if (op0 == *ass_var
334 && (non_ass_var = independent_of_stmt_p (op1, stmt, call)))
336 else if (op1 == *ass_var
337 && (non_ass_var = independent_of_stmt_p (op0, stmt, call)))
339 else
340 return false;
342 switch (code)
344 case PLUS_EXPR:
345 *a = non_ass_var;
346 *ass_var = dest;
347 return true;
349 case POINTER_PLUS_EXPR:
350 if (op0 != *ass_var)
351 return false;
352 *a = non_ass_var;
353 *ass_var = dest;
354 return true;
356 case MULT_EXPR:
357 *m = non_ass_var;
358 *ass_var = dest;
359 return true;
361 case NEGATE_EXPR:
362 *m = build_minus_one_cst (TREE_TYPE (op0));
363 *ass_var = dest;
364 return true;
366 case MINUS_EXPR:
367 if (*ass_var == op0)
368 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
369 else
371 *m = build_minus_one_cst (TREE_TYPE (non_ass_var));
372 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
375 *ass_var = dest;
376 return true;
378 /* TODO -- Handle POINTER_PLUS_EXPR. */
380 default:
381 return false;
385 /* Propagate VAR through phis on edge E. */
387 static tree
388 propagate_through_phis (tree var, edge e)
390 basic_block dest = e->dest;
391 gimple_stmt_iterator gsi;
393 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
395 gimple phi = gsi_stmt (gsi);
396 if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
397 return PHI_RESULT (phi);
399 return var;
402 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
403 added to the start of RET. */
405 static void
406 find_tail_calls (basic_block bb, struct tailcall **ret)
408 tree ass_var = NULL_TREE, ret_var, func, param;
409 gimple stmt, call = NULL;
410 gimple_stmt_iterator gsi, agsi;
411 bool tail_recursion;
412 struct tailcall *nw;
413 edge e;
414 tree m, a;
415 basic_block abb;
416 size_t idx;
417 tree var;
419 if (!single_succ_p (bb))
420 return;
422 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
424 stmt = gsi_stmt (gsi);
426 /* Ignore labels, returns, clobbers and debug stmts. */
427 if (gimple_code (stmt) == GIMPLE_LABEL
428 || gimple_code (stmt) == GIMPLE_RETURN
429 || gimple_clobber_p (stmt)
430 || is_gimple_debug (stmt))
431 continue;
433 /* Check for a call. */
434 if (is_gimple_call (stmt))
436 call = stmt;
437 ass_var = gimple_call_lhs (stmt);
438 break;
441 /* If the statement references memory or volatile operands, fail. */
442 if (gimple_references_memory_p (stmt)
443 || gimple_has_volatile_ops (stmt))
444 return;
447 if (gsi_end_p (gsi))
449 edge_iterator ei;
450 /* Recurse to the predecessors. */
451 FOR_EACH_EDGE (e, ei, bb->preds)
452 find_tail_calls (e->src, ret);
454 return;
457 /* If the LHS of our call is not just a simple register, we can't
458 transform this into a tail or sibling call. This situation happens,
459 in (e.g.) "*p = foo()" where foo returns a struct. In this case
460 we won't have a temporary here, but we need to carry out the side
461 effect anyway, so tailcall is impossible.
463 ??? In some situations (when the struct is returned in memory via
464 invisible argument) we could deal with this, e.g. by passing 'p'
465 itself as that argument to foo, but it's too early to do this here,
466 and expand_call() will not handle it anyway. If it ever can, then
467 we need to revisit this here, to allow that situation. */
468 if (ass_var && !is_gimple_reg (ass_var))
469 return;
471 /* We found the call, check whether it is suitable. */
472 tail_recursion = false;
473 func = gimple_call_fndecl (call);
474 if (func
475 && !DECL_BUILT_IN (func)
476 && recursive_call_p (current_function_decl, func))
478 tree arg;
480 for (param = DECL_ARGUMENTS (func), idx = 0;
481 param && idx < gimple_call_num_args (call);
482 param = DECL_CHAIN (param), idx ++)
484 arg = gimple_call_arg (call, idx);
485 if (param != arg)
487 /* Make sure there are no problems with copying. The parameter
488 have a copyable type and the two arguments must have reasonably
489 equivalent types. The latter requirement could be relaxed if
490 we emitted a suitable type conversion statement. */
491 if (!is_gimple_reg_type (TREE_TYPE (param))
492 || !useless_type_conversion_p (TREE_TYPE (param),
493 TREE_TYPE (arg)))
494 break;
496 /* The parameter should be a real operand, so that phi node
497 created for it at the start of the function has the meaning
498 of copying the value. This test implies is_gimple_reg_type
499 from the previous condition, however this one could be
500 relaxed by being more careful with copying the new value
501 of the parameter (emitting appropriate GIMPLE_ASSIGN and
502 updating the virtual operands). */
503 if (!is_gimple_reg (param))
504 break;
507 if (idx == gimple_call_num_args (call) && !param)
508 tail_recursion = true;
511 /* Make sure the tail invocation of this function does not refer
512 to local variables. */
513 FOR_EACH_LOCAL_DECL (cfun, idx, var)
515 if (TREE_CODE (var) != PARM_DECL
516 && auto_var_in_fn_p (var, cfun->decl)
517 && (ref_maybe_used_by_stmt_p (call, var)
518 || call_may_clobber_ref_p (call, var)))
519 return;
522 /* Now check the statements after the call. None of them has virtual
523 operands, so they may only depend on the call through its return
524 value. The return value should also be dependent on each of them,
525 since we are running after dce. */
526 m = NULL_TREE;
527 a = NULL_TREE;
529 abb = bb;
530 agsi = gsi;
531 while (1)
533 tree tmp_a = NULL_TREE;
534 tree tmp_m = NULL_TREE;
535 gsi_next (&agsi);
537 while (gsi_end_p (agsi))
539 ass_var = propagate_through_phis (ass_var, single_succ_edge (abb));
540 abb = single_succ (abb);
541 agsi = gsi_start_bb (abb);
544 stmt = gsi_stmt (agsi);
546 if (gimple_code (stmt) == GIMPLE_LABEL)
547 continue;
549 if (gimple_code (stmt) == GIMPLE_RETURN)
550 break;
552 if (gimple_clobber_p (stmt))
553 continue;
555 if (is_gimple_debug (stmt))
556 continue;
558 if (gimple_code (stmt) != GIMPLE_ASSIGN)
559 return;
561 /* This is a gimple assign. */
562 if (! process_assignment (stmt, gsi, &tmp_m, &tmp_a, &ass_var))
563 return;
565 if (tmp_a)
567 tree type = TREE_TYPE (tmp_a);
568 if (a)
569 a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a);
570 else
571 a = tmp_a;
573 if (tmp_m)
575 tree type = TREE_TYPE (tmp_m);
576 if (m)
577 m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m);
578 else
579 m = tmp_m;
581 if (a)
582 a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m);
586 /* See if this is a tail call we can handle. */
587 ret_var = gimple_return_retval (stmt);
589 /* We may proceed if there either is no return value, or the return value
590 is identical to the call's return. */
591 if (ret_var
592 && (ret_var != ass_var))
593 return;
595 /* If this is not a tail recursive call, we cannot handle addends or
596 multiplicands. */
597 if (!tail_recursion && (m || a))
598 return;
600 /* For pointers only allow additions. */
601 if (m && POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
602 return;
604 nw = XNEW (struct tailcall);
606 nw->call_gsi = gsi;
608 nw->tail_recursion = tail_recursion;
610 nw->mult = m;
611 nw->add = a;
613 nw->next = *ret;
614 *ret = nw;
617 /* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E. */
619 static void
620 add_successor_phi_arg (edge e, tree var, tree phi_arg)
622 gimple_stmt_iterator gsi;
624 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
625 if (PHI_RESULT (gsi_stmt (gsi)) == var)
626 break;
628 gcc_assert (!gsi_end_p (gsi));
629 add_phi_arg (gsi_stmt (gsi), phi_arg, e, UNKNOWN_LOCATION);
632 /* Creates a GIMPLE statement which computes the operation specified by
633 CODE, ACC and OP1 to a new variable with name LABEL and inserts the
634 statement in the position specified by GSI. Returns the
635 tree node of the statement's result. */
637 static tree
638 adjust_return_value_with_ops (enum tree_code code, const char *label,
639 tree acc, tree op1, gimple_stmt_iterator gsi)
642 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
643 tree result = make_temp_ssa_name (ret_type, NULL, label);
644 gimple stmt;
646 if (POINTER_TYPE_P (ret_type))
648 gcc_assert (code == PLUS_EXPR && TREE_TYPE (acc) == sizetype);
649 code = POINTER_PLUS_EXPR;
651 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1))
652 && code != POINTER_PLUS_EXPR)
653 stmt = gimple_build_assign_with_ops (code, result, acc, op1);
654 else
656 tree tem;
657 if (code == POINTER_PLUS_EXPR)
658 tem = fold_build2 (code, TREE_TYPE (op1), op1, acc);
659 else
660 tem = fold_build2 (code, TREE_TYPE (op1),
661 fold_convert (TREE_TYPE (op1), acc), op1);
662 tree rhs = fold_convert (ret_type, tem);
663 rhs = force_gimple_operand_gsi (&gsi, rhs,
664 false, NULL, true, GSI_SAME_STMT);
665 stmt = gimple_build_assign (result, rhs);
668 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
669 return result;
672 /* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
673 the computation specified by CODE and OP1 and insert the statement
674 at the position specified by GSI as a new statement. Returns new SSA name
675 of updated accumulator. */
677 static tree
678 update_accumulator_with_ops (enum tree_code code, tree acc, tree op1,
679 gimple_stmt_iterator gsi)
681 gimple stmt;
682 tree var = copy_ssa_name (acc, NULL);
683 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
684 stmt = gimple_build_assign_with_ops (code, var, acc, op1);
685 else
687 tree rhs = fold_convert (TREE_TYPE (acc),
688 fold_build2 (code,
689 TREE_TYPE (op1),
690 fold_convert (TREE_TYPE (op1), acc),
691 op1));
692 rhs = force_gimple_operand_gsi (&gsi, rhs,
693 false, NULL, false, GSI_CONTINUE_LINKING);
694 stmt = gimple_build_assign (var, rhs);
696 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
697 return var;
700 /* Adjust the accumulator values according to A and M after GSI, and update
701 the phi nodes on edge BACK. */
703 static void
704 adjust_accumulator_values (gimple_stmt_iterator gsi, tree m, tree a, edge back)
706 tree var, a_acc_arg, m_acc_arg;
708 if (m)
709 m = force_gimple_operand_gsi (&gsi, m, true, NULL, true, GSI_SAME_STMT);
710 if (a)
711 a = force_gimple_operand_gsi (&gsi, a, true, NULL, true, GSI_SAME_STMT);
713 a_acc_arg = a_acc;
714 m_acc_arg = m_acc;
715 if (a)
717 if (m_acc)
719 if (integer_onep (a))
720 var = m_acc;
721 else
722 var = adjust_return_value_with_ops (MULT_EXPR, "acc_tmp", m_acc,
723 a, gsi);
725 else
726 var = a;
728 a_acc_arg = update_accumulator_with_ops (PLUS_EXPR, a_acc, var, gsi);
731 if (m)
732 m_acc_arg = update_accumulator_with_ops (MULT_EXPR, m_acc, m, gsi);
734 if (a_acc)
735 add_successor_phi_arg (back, a_acc, a_acc_arg);
737 if (m_acc)
738 add_successor_phi_arg (back, m_acc, m_acc_arg);
741 /* Adjust value of the return at the end of BB according to M and A
742 accumulators. */
744 static void
745 adjust_return_value (basic_block bb, tree m, tree a)
747 tree retval;
748 gimple ret_stmt = gimple_seq_last_stmt (bb_seq (bb));
749 gimple_stmt_iterator gsi = gsi_last_bb (bb);
751 gcc_assert (gimple_code (ret_stmt) == GIMPLE_RETURN);
753 retval = gimple_return_retval (ret_stmt);
754 if (!retval || retval == error_mark_node)
755 return;
757 if (m)
758 retval = adjust_return_value_with_ops (MULT_EXPR, "mul_tmp", m_acc, retval,
759 gsi);
760 if (a)
761 retval = adjust_return_value_with_ops (PLUS_EXPR, "acc_tmp", a_acc, retval,
762 gsi);
763 gimple_return_set_retval (ret_stmt, retval);
764 update_stmt (ret_stmt);
767 /* Subtract COUNT and FREQUENCY from the basic block and it's
768 outgoing edge. */
769 static void
770 decrease_profile (basic_block bb, gcov_type count, int frequency)
772 edge e;
773 bb->count -= count;
774 if (bb->count < 0)
775 bb->count = 0;
776 bb->frequency -= frequency;
777 if (bb->frequency < 0)
778 bb->frequency = 0;
779 if (!single_succ_p (bb))
781 gcc_assert (!EDGE_COUNT (bb->succs));
782 return;
784 e = single_succ_edge (bb);
785 e->count -= count;
786 if (e->count < 0)
787 e->count = 0;
790 /* Returns true if argument PARAM of the tail recursive call needs to be copied
791 when the call is eliminated. */
793 static bool
794 arg_needs_copy_p (tree param)
796 tree def;
798 if (!is_gimple_reg (param))
799 return false;
801 /* Parameters that are only defined but never used need not be copied. */
802 def = ssa_default_def (cfun, param);
803 if (!def)
804 return false;
806 return true;
809 /* Eliminates tail call described by T. TMP_VARS is a list of
810 temporary variables used to copy the function arguments. */
812 static void
813 eliminate_tail_call (struct tailcall *t)
815 tree param, rslt;
816 gimple stmt, call;
817 tree arg;
818 size_t idx;
819 basic_block bb, first;
820 edge e;
821 gimple phi;
822 gimple_stmt_iterator gsi;
823 gimple orig_stmt;
825 stmt = orig_stmt = gsi_stmt (t->call_gsi);
826 bb = gsi_bb (t->call_gsi);
828 if (dump_file && (dump_flags & TDF_DETAILS))
830 fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
831 bb->index);
832 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
833 fprintf (dump_file, "\n");
836 gcc_assert (is_gimple_call (stmt));
838 first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
840 /* Remove the code after call_gsi that will become unreachable. The
841 possibly unreachable code in other blocks is removed later in
842 cfg cleanup. */
843 gsi = t->call_gsi;
844 gsi_next (&gsi);
845 while (!gsi_end_p (gsi))
847 gimple t = gsi_stmt (gsi);
848 /* Do not remove the return statement, so that redirect_edge_and_branch
849 sees how the block ends. */
850 if (gimple_code (t) == GIMPLE_RETURN)
851 break;
853 gsi_remove (&gsi, true);
854 release_defs (t);
857 /* Number of executions of function has reduced by the tailcall. */
858 e = single_succ_edge (gsi_bb (t->call_gsi));
859 decrease_profile (EXIT_BLOCK_PTR_FOR_FN (cfun), e->count, EDGE_FREQUENCY (e));
860 decrease_profile (ENTRY_BLOCK_PTR_FOR_FN (cfun), e->count,
861 EDGE_FREQUENCY (e));
862 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
863 decrease_profile (e->dest, e->count, EDGE_FREQUENCY (e));
865 /* Replace the call by a jump to the start of function. */
866 e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)),
867 first);
868 gcc_assert (e);
869 PENDING_STMT (e) = NULL;
871 /* Add phi node entries for arguments. The ordering of the phi nodes should
872 be the same as the ordering of the arguments. */
873 for (param = DECL_ARGUMENTS (current_function_decl),
874 idx = 0, gsi = gsi_start_phis (first);
875 param;
876 param = DECL_CHAIN (param), idx++)
878 if (!arg_needs_copy_p (param))
879 continue;
881 arg = gimple_call_arg (stmt, idx);
882 phi = gsi_stmt (gsi);
883 gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));
885 add_phi_arg (phi, arg, e, gimple_location (stmt));
886 gsi_next (&gsi);
889 /* Update the values of accumulators. */
890 adjust_accumulator_values (t->call_gsi, t->mult, t->add, e);
892 call = gsi_stmt (t->call_gsi);
893 rslt = gimple_call_lhs (call);
894 if (rslt != NULL_TREE)
896 /* Result of the call will no longer be defined. So adjust the
897 SSA_NAME_DEF_STMT accordingly. */
898 SSA_NAME_DEF_STMT (rslt) = gimple_build_nop ();
901 gsi_remove (&t->call_gsi, true);
902 release_defs (call);
905 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
906 mark the tailcalls for the sibcall optimization. */
908 static bool
909 optimize_tail_call (struct tailcall *t, bool opt_tailcalls)
911 if (t->tail_recursion)
913 eliminate_tail_call (t);
914 return true;
917 if (opt_tailcalls)
919 gimple stmt = gsi_stmt (t->call_gsi);
921 gimple_call_set_tail (stmt, true);
922 cfun->tail_call_marked = true;
923 if (dump_file && (dump_flags & TDF_DETAILS))
925 fprintf (dump_file, "Found tail call ");
926 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
927 fprintf (dump_file, " in bb %i\n", (gsi_bb (t->call_gsi))->index);
931 return false;
934 /* Creates a tail-call accumulator of the same type as the return type of the
935 current function. LABEL is the name used to creating the temporary
936 variable for the accumulator. The accumulator will be inserted in the
937 phis of a basic block BB with single predecessor with an initial value
938 INIT converted to the current function return type. */
940 static tree
941 create_tailcall_accumulator (const char *label, basic_block bb, tree init)
943 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
944 if (POINTER_TYPE_P (ret_type))
945 ret_type = sizetype;
947 tree tmp = make_temp_ssa_name (ret_type, NULL, label);
948 gimple phi;
950 phi = create_phi_node (tmp, bb);
951 /* RET_TYPE can be a float when -ffast-maths is enabled. */
952 add_phi_arg (phi, fold_convert (ret_type, init), single_pred_edge (bb),
953 UNKNOWN_LOCATION);
954 return PHI_RESULT (phi);
957 /* Optimizes tail calls in the function, turning the tail recursion
958 into iteration. */
960 static unsigned int
961 tree_optimize_tail_calls_1 (bool opt_tailcalls)
963 edge e;
964 bool phis_constructed = false;
965 struct tailcall *tailcalls = NULL, *act, *next;
966 bool changed = false;
967 basic_block first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
968 tree param;
969 gimple stmt;
970 edge_iterator ei;
972 if (!suitable_for_tail_opt_p ())
973 return 0;
974 if (opt_tailcalls)
975 opt_tailcalls = suitable_for_tail_call_opt_p ();
977 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
979 /* Only traverse the normal exits, i.e. those that end with return
980 statement. */
981 stmt = last_stmt (e->src);
983 if (stmt
984 && gimple_code (stmt) == GIMPLE_RETURN)
985 find_tail_calls (e->src, &tailcalls);
988 /* Construct the phi nodes and accumulators if necessary. */
989 a_acc = m_acc = NULL_TREE;
990 for (act = tailcalls; act; act = act->next)
992 if (!act->tail_recursion)
993 continue;
995 if (!phis_constructed)
997 /* Ensure that there is only one predecessor of the block
998 or if there are existing degenerate PHI nodes. */
999 if (!single_pred_p (first)
1000 || !gimple_seq_empty_p (phi_nodes (first)))
1001 first =
1002 split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1004 /* Copy the args if needed. */
1005 for (param = DECL_ARGUMENTS (current_function_decl);
1006 param;
1007 param = DECL_CHAIN (param))
1008 if (arg_needs_copy_p (param))
1010 tree name = ssa_default_def (cfun, param);
1011 tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
1012 gimple phi;
1014 set_ssa_default_def (cfun, param, new_name);
1015 phi = create_phi_node (name, first);
1016 add_phi_arg (phi, new_name, single_pred_edge (first),
1017 EXPR_LOCATION (param));
1019 phis_constructed = true;
1022 if (act->add && !a_acc)
1023 a_acc = create_tailcall_accumulator ("add_acc", first,
1024 integer_zero_node);
1026 if (act->mult && !m_acc)
1027 m_acc = create_tailcall_accumulator ("mult_acc", first,
1028 integer_one_node);
1031 if (a_acc || m_acc)
1033 /* When the tail call elimination using accumulators is performed,
1034 statements adding the accumulated value are inserted at all exits.
1035 This turns all other tail calls to non-tail ones. */
1036 opt_tailcalls = false;
1039 for (; tailcalls; tailcalls = next)
1041 next = tailcalls->next;
1042 changed |= optimize_tail_call (tailcalls, opt_tailcalls);
1043 free (tailcalls);
1046 if (a_acc || m_acc)
1048 /* Modify the remaining return statements. */
1049 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
1051 stmt = last_stmt (e->src);
1053 if (stmt
1054 && gimple_code (stmt) == GIMPLE_RETURN)
1055 adjust_return_value (e->src, m_acc, a_acc);
1059 if (changed)
1061 /* We may have created new loops. Make them magically appear. */
1062 loops_state_set (LOOPS_NEED_FIXUP);
1063 free_dominance_info (CDI_DOMINATORS);
1066 /* Add phi nodes for the virtual operands defined in the function to the
1067 header of the loop created by tail recursion elimination. Do so
1068 by triggering the SSA renamer. */
1069 if (phis_constructed)
1070 mark_virtual_operands_for_renaming (cfun);
1072 if (changed)
1073 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
1074 return 0;
1077 static bool
1078 gate_tail_calls (void)
1080 return flag_optimize_sibling_calls != 0 && dbg_cnt (tail_call);
1083 static unsigned int
1084 execute_tail_calls (void)
1086 return tree_optimize_tail_calls_1 (true);
1089 namespace {
1091 const pass_data pass_data_tail_recursion =
1093 GIMPLE_PASS, /* type */
1094 "tailr", /* name */
1095 OPTGROUP_NONE, /* optinfo_flags */
1096 true, /* has_execute */
1097 TV_NONE, /* tv_id */
1098 ( PROP_cfg | PROP_ssa ), /* properties_required */
1099 0, /* properties_provided */
1100 0, /* properties_destroyed */
1101 0, /* todo_flags_start */
1102 0, /* todo_flags_finish */
1105 class pass_tail_recursion : public gimple_opt_pass
1107 public:
1108 pass_tail_recursion (gcc::context *ctxt)
1109 : gimple_opt_pass (pass_data_tail_recursion, ctxt)
1112 /* opt_pass methods: */
1113 opt_pass * clone () { return new pass_tail_recursion (m_ctxt); }
1114 virtual bool gate (function *) { return gate_tail_calls (); }
1115 virtual unsigned int execute (function *)
1117 return tree_optimize_tail_calls_1 (false);
1120 }; // class pass_tail_recursion
1122 } // anon namespace
1124 gimple_opt_pass *
1125 make_pass_tail_recursion (gcc::context *ctxt)
1127 return new pass_tail_recursion (ctxt);
1130 namespace {
1132 const pass_data pass_data_tail_calls =
1134 GIMPLE_PASS, /* type */
1135 "tailc", /* name */
1136 OPTGROUP_NONE, /* optinfo_flags */
1137 true, /* has_execute */
1138 TV_NONE, /* tv_id */
1139 ( PROP_cfg | PROP_ssa ), /* properties_required */
1140 0, /* properties_provided */
1141 0, /* properties_destroyed */
1142 0, /* todo_flags_start */
1143 0, /* todo_flags_finish */
1146 class pass_tail_calls : public gimple_opt_pass
1148 public:
1149 pass_tail_calls (gcc::context *ctxt)
1150 : gimple_opt_pass (pass_data_tail_calls, ctxt)
1153 /* opt_pass methods: */
1154 virtual bool gate (function *) { return gate_tail_calls (); }
1155 virtual unsigned int execute (function *) { return execute_tail_calls (); }
1157 }; // class pass_tail_calls
1159 } // anon namespace
1161 gimple_opt_pass *
1162 make_pass_tail_calls (gcc::context *ctxt)
1164 return new pass_tail_calls (ctxt);