* rw.po: Remove.
[official-gcc.git] / gcc / tree-tailcall.c
blob491ba385deba663c1565870c7d4b51394861cc6f
1 /* Tail call optimization on trees.
2 Copyright (C) 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "tree-flow.h"
31 #include "tree-dump.h"
32 #include "diagnostic.h"
33 #include "except.h"
34 #include "tree-pass.h"
35 #include "flags.h"
36 #include "langhooks.h"
38 /* The file implements the tail recursion elimination. It is also used to
39 analyze the tail calls in general, passing the results to the rtl level
40 where they are used for sibcall optimization.
42 In addition to the standard tail recursion elimination, we handle the most
43 trivial cases of making the call tail recursive by creating accumulators.
44 For example the following function
46 int sum (int n)
48 if (n > 0)
49 return n + sum (n - 1);
50 else
51 return 0;
54 is transformed into
56 int sum (int n)
58 int acc = 0;
60 while (n > 0)
61 acc += n--;
63 return acc;
66 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
67 when we reach the return x statement, we should return a_acc + x * m_acc
68 instead. They are initially initialized to 0 and 1, respectively,
69 so the semantics of the function is obviously preserved. If we are
70 guaranteed that the value of the accumulator never change, we
71 omit the accumulator.
73 There are three cases how the function may exit. The first one is
74 handled in adjust_return_value, the other two in adjust_accumulator_values
75 (the second case is actually a special case of the third one and we
76 present it separately just for clarity):
78 1) Just return x, where x is not in any of the remaining special shapes.
79 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
81 2) return f (...), where f is the current function, is rewritten in a
82 classical tail-recursion elimination way, into assignment of arguments
83 and jump to the start of the function. Values of the accumulators
84 are unchanged.
86 3) return a + m * f(...), where a and m do not depend on call to f.
87 To preserve the semantics described before we want this to be rewritten
88 in such a way that we finally return
90 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
92 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
93 eliminate the tail call to f. Special cases when the value is just
94 added or just multiplied are obtained by setting a = 0 or m = 1.
96 TODO -- it is possible to do similar tricks for other operations. */
98 /* A structure that describes the tailcall. */
100 struct tailcall
102 /* The block in that the call occur. */
103 basic_block call_block;
105 /* The iterator pointing to the call statement. */
106 block_stmt_iterator call_bsi;
108 /* True if it is a call to the current function. */
109 bool tail_recursion;
111 /* The return value of the caller is mult * f + add, where f is the return
112 value of the call. */
113 tree mult, add;
115 /* Next tailcall in the chain. */
116 struct tailcall *next;
119 /* The variables holding the value of multiplicative and additive
120 accumulator. */
121 static tree m_acc, a_acc;
123 static bool suitable_for_tail_opt_p (void);
124 static bool optimize_tail_call (struct tailcall *, bool);
125 static void eliminate_tail_call (struct tailcall *);
126 static void find_tail_calls (basic_block, struct tailcall **);
128 /* Returns false when the function is not suitable for tail call optimization
129 from some reason (e.g. if it takes variable number of arguments). */
131 static bool
132 suitable_for_tail_opt_p (void)
134 referenced_var_iterator rvi;
135 tree var;
137 if (current_function_stdarg)
138 return false;
140 /* No local variable nor structure field should be call-clobbered. We
141 ignore any kind of memory tag, as these are not real variables. */
143 FOR_EACH_REFERENCED_VAR (var, rvi)
146 if (!is_global_var (var)
147 && (!MTAG_P (var) || TREE_CODE (var) == STRUCT_FIELD_TAG)
148 && is_call_clobbered (var))
149 return false;
152 return true;
154 /* Returns false when the function is not suitable for tail call optimization
155 from some reason (e.g. if it takes variable number of arguments).
156 This test must pass in addition to suitable_for_tail_opt_p in order to make
157 tail call discovery happen. */
159 static bool
160 suitable_for_tail_call_opt_p (void)
162 tree param;
164 /* alloca (until we have stack slot life analysis) inhibits
165 sibling call optimizations, but not tail recursion. */
166 if (current_function_calls_alloca)
167 return false;
169 /* If we are using sjlj exceptions, we may need to add a call to
170 _Unwind_SjLj_Unregister at exit of the function. Which means
171 that we cannot do any sibcall transformations. */
172 if (USING_SJLJ_EXCEPTIONS && current_function_has_exception_handlers ())
173 return false;
175 /* Any function that calls setjmp might have longjmp called from
176 any called function. ??? We really should represent this
177 properly in the CFG so that this needn't be special cased. */
178 if (current_function_calls_setjmp)
179 return false;
181 /* ??? It is OK if the argument of a function is taken in some cases,
182 but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
183 for (param = DECL_ARGUMENTS (current_function_decl);
184 param;
185 param = TREE_CHAIN (param))
186 if (TREE_ADDRESSABLE (param))
187 return false;
189 return true;
192 /* Checks whether the expression EXPR in stmt AT is independent of the
193 statement pointed to by BSI (in a sense that we already know EXPR's value
194 at BSI). We use the fact that we are only called from the chain of
195 basic blocks that have only single successor. Returns the expression
196 containing the value of EXPR at BSI. */
198 static tree
199 independent_of_stmt_p (tree expr, tree at, block_stmt_iterator bsi)
201 basic_block bb, call_bb, at_bb;
202 edge e;
203 edge_iterator ei;
205 if (is_gimple_min_invariant (expr))
206 return expr;
208 if (TREE_CODE (expr) != SSA_NAME)
209 return NULL_TREE;
211 /* Mark the blocks in the chain leading to the end. */
212 at_bb = bb_for_stmt (at);
213 call_bb = bb_for_stmt (bsi_stmt (bsi));
214 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
215 bb->aux = &bb->aux;
216 bb->aux = &bb->aux;
218 while (1)
220 at = SSA_NAME_DEF_STMT (expr);
221 bb = bb_for_stmt (at);
223 /* The default definition or defined before the chain. */
224 if (!bb || !bb->aux)
225 break;
227 if (bb == call_bb)
229 for (; !bsi_end_p (bsi); bsi_next (&bsi))
230 if (bsi_stmt (bsi) == at)
231 break;
233 if (!bsi_end_p (bsi))
234 expr = NULL_TREE;
235 break;
238 if (TREE_CODE (at) != PHI_NODE)
240 expr = NULL_TREE;
241 break;
244 FOR_EACH_EDGE (e, ei, bb->preds)
245 if (e->src->aux)
246 break;
247 gcc_assert (e);
249 expr = PHI_ARG_DEF_FROM_EDGE (at, e);
250 if (TREE_CODE (expr) != SSA_NAME)
252 /* The value is a constant. */
253 break;
257 /* Unmark the blocks. */
258 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
259 bb->aux = NULL;
260 bb->aux = NULL;
262 return expr;
265 /* Simulates the effect of an assignment of ASS in STMT on the return value
266 of the tail recursive CALL passed in ASS_VAR. M and A are the
267 multiplicative and the additive factor for the real return value. */
269 static bool
270 process_assignment (tree ass, tree stmt, block_stmt_iterator call, tree *m,
271 tree *a, tree *ass_var)
273 tree op0, op1, non_ass_var;
274 tree dest = TREE_OPERAND (ass, 0);
275 tree src = TREE_OPERAND (ass, 1);
276 enum tree_code code = TREE_CODE (src);
277 tree src_var = src;
279 /* See if this is a simple copy operation of an SSA name to the function
280 result. In that case we may have a simple tail call. Ignore type
281 conversions that can never produce extra code between the function
282 call and the function return. */
283 STRIP_NOPS (src_var);
284 if (TREE_CODE (src_var) == SSA_NAME)
286 if (src_var != *ass_var)
287 return false;
289 *ass_var = dest;
290 return true;
293 if (TREE_CODE_CLASS (code) != tcc_binary)
294 return false;
296 /* Accumulator optimizations will reverse the order of operations.
297 We can only do that for floating-point types if we're assuming
298 that addition and multiplication are associative. */
299 if (!flag_unsafe_math_optimizations)
300 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
301 return false;
303 /* We only handle the code like
305 x = call ();
306 y = m * x;
307 z = y + a;
308 return z;
310 TODO -- Extend it for cases where the linear transformation of the output
311 is expressed in a more complicated way. */
313 op0 = TREE_OPERAND (src, 0);
314 op1 = TREE_OPERAND (src, 1);
316 if (op0 == *ass_var
317 && (non_ass_var = independent_of_stmt_p (op1, stmt, call)))
319 else if (op1 == *ass_var
320 && (non_ass_var = independent_of_stmt_p (op0, stmt, call)))
322 else
323 return false;
325 switch (code)
327 case PLUS_EXPR:
328 /* There should be no previous addition. TODO -- it should be fairly
329 straightforward to lift this restriction -- just allow storing
330 more complicated expressions in *A, and gimplify it in
331 adjust_accumulator_values. */
332 if (*a)
333 return false;
334 *a = non_ass_var;
335 *ass_var = dest;
336 return true;
338 case MULT_EXPR:
339 /* Similar remark applies here. Handling multiplication after addition
340 is just slightly more complicated -- we need to multiply both *A and
341 *M. */
342 if (*a || *m)
343 return false;
344 *m = non_ass_var;
345 *ass_var = dest;
346 return true;
348 /* TODO -- Handle other codes (NEGATE_EXPR, MINUS_EXPR). */
350 default:
351 return false;
355 /* Propagate VAR through phis on edge E. */
357 static tree
358 propagate_through_phis (tree var, edge e)
360 basic_block dest = e->dest;
361 tree phi;
363 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
364 if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
365 return PHI_RESULT (phi);
367 return var;
370 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
371 added to the start of RET. */
373 static void
374 find_tail_calls (basic_block bb, struct tailcall **ret)
376 tree ass_var, ret_var, stmt, func, param, args, call = NULL_TREE;
377 block_stmt_iterator bsi, absi;
378 bool tail_recursion;
379 struct tailcall *nw;
380 edge e;
381 tree m, a;
382 basic_block abb;
383 stmt_ann_t ann;
385 if (!single_succ_p (bb))
386 return;
388 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
390 stmt = bsi_stmt (bsi);
392 /* Ignore labels. */
393 if (TREE_CODE (stmt) == LABEL_EXPR)
394 continue;
396 /* Check for a call. */
397 if (TREE_CODE (stmt) == MODIFY_EXPR)
399 ass_var = TREE_OPERAND (stmt, 0);
400 call = TREE_OPERAND (stmt, 1);
401 if (TREE_CODE (call) == WITH_SIZE_EXPR)
402 call = TREE_OPERAND (call, 0);
404 else
406 ass_var = NULL_TREE;
407 call = stmt;
410 if (TREE_CODE (call) == CALL_EXPR)
411 break;
413 /* If the statement has virtual or volatile operands, fail. */
414 ann = stmt_ann (stmt);
415 if (!ZERO_SSA_OPERANDS (stmt, (SSA_OP_VUSE | SSA_OP_VIRTUAL_DEFS))
416 || ann->has_volatile_ops)
417 return;
420 if (bsi_end_p (bsi))
422 edge_iterator ei;
423 /* Recurse to the predecessors. */
424 FOR_EACH_EDGE (e, ei, bb->preds)
425 find_tail_calls (e->src, ret);
427 return;
430 /* We found the call, check whether it is suitable. */
431 tail_recursion = false;
432 func = get_callee_fndecl (call);
433 if (func == current_function_decl)
435 for (param = DECL_ARGUMENTS (func), args = TREE_OPERAND (call, 1);
436 param && args;
437 param = TREE_CHAIN (param), args = TREE_CHAIN (args))
439 tree arg = TREE_VALUE (args);
440 if (param != arg)
442 /* Make sure there are no problems with copying. The parameter
443 have a copyable type and the two arguments must have reasonably
444 equivalent types. The latter requirement could be relaxed if
445 we emitted a suitable type conversion statement. */
446 if (!is_gimple_reg_type (TREE_TYPE (param))
447 || !lang_hooks.types_compatible_p (TREE_TYPE (param),
448 TREE_TYPE (arg)))
449 break;
451 /* The parameter should be a real operand, so that phi node
452 created for it at the start of the function has the meaning
453 of copying the value. This test implies is_gimple_reg_type
454 from the previous condition, however this one could be
455 relaxed by being more careful with copying the new value
456 of the parameter (emitting appropriate MODIFY_EXPR and
457 updating the virtual operands). */
458 if (!is_gimple_reg (param))
459 break;
462 if (!args && !param)
463 tail_recursion = true;
466 /* Now check the statements after the call. None of them has virtual
467 operands, so they may only depend on the call through its return
468 value. The return value should also be dependent on each of them,
469 since we are running after dce. */
470 m = NULL_TREE;
471 a = NULL_TREE;
473 abb = bb;
474 absi = bsi;
475 while (1)
477 bsi_next (&absi);
479 while (bsi_end_p (absi))
481 ass_var = propagate_through_phis (ass_var, single_succ_edge (abb));
482 abb = single_succ (abb);
483 absi = bsi_start (abb);
486 stmt = bsi_stmt (absi);
488 if (TREE_CODE (stmt) == LABEL_EXPR)
489 continue;
491 if (TREE_CODE (stmt) == RETURN_EXPR)
492 break;
494 if (TREE_CODE (stmt) != MODIFY_EXPR)
495 return;
497 if (!process_assignment (stmt, stmt, bsi, &m, &a, &ass_var))
498 return;
501 /* See if this is a tail call we can handle. */
502 ret_var = TREE_OPERAND (stmt, 0);
503 if (ret_var
504 && TREE_CODE (ret_var) == MODIFY_EXPR)
506 tree ret_op = TREE_OPERAND (ret_var, 1);
507 STRIP_NOPS (ret_op);
508 if (!tail_recursion
509 && TREE_CODE (ret_op) != SSA_NAME)
510 return;
512 if (!process_assignment (ret_var, stmt, bsi, &m, &a, &ass_var))
513 return;
514 ret_var = TREE_OPERAND (ret_var, 0);
517 /* We may proceed if there either is no return value, or the return value
518 is identical to the call's return. */
519 if (ret_var
520 && (ret_var != ass_var))
521 return;
523 /* If this is not a tail recursive call, we cannot handle addends or
524 multiplicands. */
525 if (!tail_recursion && (m || a))
526 return;
528 nw = XNEW (struct tailcall);
530 nw->call_block = bb;
531 nw->call_bsi = bsi;
533 nw->tail_recursion = tail_recursion;
535 nw->mult = m;
536 nw->add = a;
538 nw->next = *ret;
539 *ret = nw;
542 /* Adjust the accumulator values according to A and M after BSI, and update
543 the phi nodes on edge BACK. */
545 static void
546 adjust_accumulator_values (block_stmt_iterator bsi, tree m, tree a, edge back)
548 tree stmt, var, phi, tmp;
549 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
550 tree a_acc_arg = a_acc, m_acc_arg = m_acc;
552 if (a)
554 if (m_acc)
556 if (integer_onep (a))
557 var = m_acc;
558 else
560 stmt = build2 (MODIFY_EXPR, ret_type, NULL_TREE,
561 build2 (MULT_EXPR, ret_type, m_acc, a));
563 tmp = create_tmp_var (ret_type, "acc_tmp");
564 add_referenced_var (tmp);
566 var = make_ssa_name (tmp, stmt);
567 TREE_OPERAND (stmt, 0) = var;
568 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
571 else
572 var = a;
574 stmt = build2 (MODIFY_EXPR, ret_type, NULL_TREE,
575 build2 (PLUS_EXPR, ret_type, a_acc, var));
576 var = make_ssa_name (SSA_NAME_VAR (a_acc), stmt);
577 TREE_OPERAND (stmt, 0) = var;
578 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
579 a_acc_arg = var;
582 if (m)
584 stmt = build2 (MODIFY_EXPR, ret_type, NULL_TREE,
585 build2 (MULT_EXPR, ret_type, m_acc, m));
586 var = make_ssa_name (SSA_NAME_VAR (m_acc), stmt);
587 TREE_OPERAND (stmt, 0) = var;
588 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
589 m_acc_arg = var;
592 if (a_acc)
594 for (phi = phi_nodes (back->dest); phi; phi = PHI_CHAIN (phi))
595 if (PHI_RESULT (phi) == a_acc)
596 break;
598 add_phi_arg (phi, a_acc_arg, back);
601 if (m_acc)
603 for (phi = phi_nodes (back->dest); phi; phi = PHI_CHAIN (phi))
604 if (PHI_RESULT (phi) == m_acc)
605 break;
607 add_phi_arg (phi, m_acc_arg, back);
611 /* Adjust value of the return at the end of BB according to M and A
612 accumulators. */
614 static void
615 adjust_return_value (basic_block bb, tree m, tree a)
617 tree ret_stmt = last_stmt (bb), ret_var, var, stmt, tmp;
618 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
619 block_stmt_iterator bsi = bsi_last (bb);
621 gcc_assert (TREE_CODE (ret_stmt) == RETURN_EXPR);
623 ret_var = TREE_OPERAND (ret_stmt, 0);
624 if (!ret_var)
625 return;
627 if (TREE_CODE (ret_var) == MODIFY_EXPR)
629 ret_var->common.ann = (tree_ann_t) stmt_ann (ret_stmt);
630 bsi_replace (&bsi, ret_var, true);
631 SSA_NAME_DEF_STMT (TREE_OPERAND (ret_var, 0)) = ret_var;
632 ret_var = TREE_OPERAND (ret_var, 0);
633 ret_stmt = build1 (RETURN_EXPR, TREE_TYPE (ret_stmt), ret_var);
634 bsi_insert_after (&bsi, ret_stmt, BSI_NEW_STMT);
637 if (m)
639 stmt = build2 (MODIFY_EXPR, ret_type, NULL_TREE,
640 build2 (MULT_EXPR, ret_type, m_acc, ret_var));
642 tmp = create_tmp_var (ret_type, "acc_tmp");
643 add_referenced_var (tmp);
645 var = make_ssa_name (tmp, stmt);
646 TREE_OPERAND (stmt, 0) = var;
647 bsi_insert_before (&bsi, stmt, BSI_SAME_STMT);
649 else
650 var = ret_var;
652 if (a)
654 stmt = build2 (MODIFY_EXPR, ret_type, NULL_TREE,
655 build2 (PLUS_EXPR, ret_type, a_acc, var));
657 tmp = create_tmp_var (ret_type, "acc_tmp");
658 add_referenced_var (tmp);
660 var = make_ssa_name (tmp, stmt);
661 TREE_OPERAND (stmt, 0) = var;
662 bsi_insert_before (&bsi, stmt, BSI_SAME_STMT);
665 TREE_OPERAND (ret_stmt, 0) = var;
666 update_stmt (ret_stmt);
669 /* Subtract COUNT and FREQUENCY from the basic block and it's
670 outgoing edge. */
671 static void
672 decrease_profile (basic_block bb, gcov_type count, int frequency)
674 edge e;
675 bb->count -= count;
676 if (bb->count < 0)
677 bb->count = 0;
678 bb->frequency -= frequency;
679 if (bb->frequency < 0)
680 bb->frequency = 0;
681 if (!single_succ_p (bb))
683 gcc_assert (!EDGE_COUNT (bb->succs));
684 return;
686 e = single_succ_edge (bb);
687 e->count -= count;
688 if (e->count < 0)
689 e->count = 0;
692 /* Returns true if argument PARAM of the tail recursive call needs to be copied
693 when the call is eliminated. */
695 static bool
696 arg_needs_copy_p (tree param)
698 tree def;
700 if (!is_gimple_reg (param) || !var_ann (param))
701 return false;
703 /* Parameters that are only defined but never used need not be copied. */
704 def = default_def (param);
705 if (!def)
706 return false;
708 return true;
711 /* Eliminates tail call described by T. TMP_VARS is a list of
712 temporary variables used to copy the function arguments. */
714 static void
715 eliminate_tail_call (struct tailcall *t)
717 tree param, stmt, args, rslt, call;
718 basic_block bb, first;
719 edge e;
720 tree phi;
721 block_stmt_iterator bsi;
722 tree orig_stmt;
724 stmt = orig_stmt = bsi_stmt (t->call_bsi);
725 bb = t->call_block;
727 if (dump_file && (dump_flags & TDF_DETAILS))
729 fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
730 bb->index);
731 print_generic_stmt (dump_file, stmt, TDF_SLIM);
732 fprintf (dump_file, "\n");
735 if (TREE_CODE (stmt) == MODIFY_EXPR)
736 stmt = TREE_OPERAND (stmt, 1);
738 first = single_succ (ENTRY_BLOCK_PTR);
740 /* Remove the code after call_bsi that will become unreachable. The
741 possibly unreachable code in other blocks is removed later in
742 cfg cleanup. */
743 bsi = t->call_bsi;
744 bsi_next (&bsi);
745 while (!bsi_end_p (bsi))
747 tree t = bsi_stmt (bsi);
748 /* Do not remove the return statement, so that redirect_edge_and_branch
749 sees how the block ends. */
750 if (TREE_CODE (t) == RETURN_EXPR)
751 break;
753 bsi_remove (&bsi, true);
754 release_defs (t);
757 /* Number of executions of function has reduced by the tailcall. */
758 e = single_succ_edge (t->call_block);
759 decrease_profile (EXIT_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
760 decrease_profile (ENTRY_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
761 if (e->dest != EXIT_BLOCK_PTR)
762 decrease_profile (e->dest, e->count, EDGE_FREQUENCY (e));
764 /* Replace the call by a jump to the start of function. */
765 e = redirect_edge_and_branch (single_succ_edge (t->call_block), first);
766 gcc_assert (e);
767 PENDING_STMT (e) = NULL_TREE;
769 /* Add phi node entries for arguments. The ordering of the phi nodes should
770 be the same as the ordering of the arguments. */
771 for (param = DECL_ARGUMENTS (current_function_decl),
772 args = TREE_OPERAND (stmt, 1),
773 phi = phi_nodes (first);
774 param;
775 param = TREE_CHAIN (param),
776 args = TREE_CHAIN (args))
778 if (!arg_needs_copy_p (param))
779 continue;
780 gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));
782 add_phi_arg (phi, TREE_VALUE (args), e);
783 phi = PHI_CHAIN (phi);
786 /* Update the values of accumulators. */
787 adjust_accumulator_values (t->call_bsi, t->mult, t->add, e);
789 call = bsi_stmt (t->call_bsi);
790 if (TREE_CODE (call) == MODIFY_EXPR)
792 rslt = TREE_OPERAND (call, 0);
794 /* Result of the call will no longer be defined. So adjust the
795 SSA_NAME_DEF_STMT accordingly. */
796 SSA_NAME_DEF_STMT (rslt) = build_empty_stmt ();
799 bsi_remove (&t->call_bsi, true);
800 release_defs (call);
803 /* Add phi nodes for the virtual operands defined in the function to the
804 header of the loop created by tail recursion elimination.
806 Originally, we used to add phi nodes only for call clobbered variables,
807 as the value of the non-call clobbered ones obviously cannot be used
808 or changed within the recursive call. However, the local variables
809 from multiple calls now share the same location, so the virtual ssa form
810 requires us to say that the location dies on further iterations of the loop,
811 which requires adding phi nodes.
813 static void
814 add_virtual_phis (void)
816 referenced_var_iterator rvi;
817 tree var;
819 /* The problematic part is that there is no way how to know what
820 to put into phi nodes (there in fact does not have to be such
821 ssa name available). A solution would be to have an artificial
822 use/kill for all virtual operands in EXIT node. Unless we have
823 this, we cannot do much better than to rebuild the ssa form for
824 possibly affected virtual ssa names from scratch. */
826 FOR_EACH_REFERENCED_VAR (var, rvi)
828 if (!is_gimple_reg (var) && default_def (var) != NULL_TREE)
829 mark_sym_for_renaming (var);
832 update_ssa (TODO_update_ssa_only_virtuals);
835 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
836 mark the tailcalls for the sibcall optimization. */
838 static bool
839 optimize_tail_call (struct tailcall *t, bool opt_tailcalls)
841 if (t->tail_recursion)
843 eliminate_tail_call (t);
844 return true;
847 if (opt_tailcalls)
849 tree stmt = bsi_stmt (t->call_bsi);
851 stmt = get_call_expr_in (stmt);
852 CALL_EXPR_TAILCALL (stmt) = 1;
853 if (dump_file && (dump_flags & TDF_DETAILS))
855 fprintf (dump_file, "Found tail call ");
856 print_generic_expr (dump_file, stmt, dump_flags);
857 fprintf (dump_file, " in bb %i\n", t->call_block->index);
861 return false;
864 /* Optimizes tail calls in the function, turning the tail recursion
865 into iteration. */
867 static void
868 tree_optimize_tail_calls_1 (bool opt_tailcalls)
870 edge e;
871 bool phis_constructed = false;
872 struct tailcall *tailcalls = NULL, *act, *next;
873 bool changed = false;
874 basic_block first = single_succ (ENTRY_BLOCK_PTR);
875 tree stmt, param, ret_type, tmp, phi;
876 edge_iterator ei;
878 if (!suitable_for_tail_opt_p ())
879 return;
880 if (opt_tailcalls)
881 opt_tailcalls = suitable_for_tail_call_opt_p ();
883 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
885 /* Only traverse the normal exits, i.e. those that end with return
886 statement. */
887 stmt = last_stmt (e->src);
889 if (stmt
890 && TREE_CODE (stmt) == RETURN_EXPR)
891 find_tail_calls (e->src, &tailcalls);
894 /* Construct the phi nodes and accumulators if necessary. */
895 a_acc = m_acc = NULL_TREE;
896 for (act = tailcalls; act; act = act->next)
898 if (!act->tail_recursion)
899 continue;
901 if (!phis_constructed)
903 /* Ensure that there is only one predecessor of the block. */
904 if (!single_pred_p (first))
905 first = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
907 /* Copy the args if needed. */
908 for (param = DECL_ARGUMENTS (current_function_decl);
909 param;
910 param = TREE_CHAIN (param))
911 if (arg_needs_copy_p (param))
913 tree name = default_def (param);
914 tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
915 tree phi;
917 set_default_def (param, new_name);
918 phi = create_phi_node (name, first);
919 SSA_NAME_DEF_STMT (name) = phi;
920 add_phi_arg (phi, new_name, single_pred_edge (first));
922 phis_constructed = true;
925 if (act->add && !a_acc)
927 ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
929 tmp = create_tmp_var (ret_type, "add_acc");
930 add_referenced_var (tmp);
932 phi = create_phi_node (tmp, first);
933 add_phi_arg (phi,
934 /* RET_TYPE can be a float when -ffast-maths is
935 enabled. */
936 fold_convert (ret_type, integer_zero_node),
937 single_pred_edge (first));
938 a_acc = PHI_RESULT (phi);
941 if (act->mult && !m_acc)
943 ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
945 tmp = create_tmp_var (ret_type, "mult_acc");
946 add_referenced_var (tmp);
948 phi = create_phi_node (tmp, first);
949 add_phi_arg (phi,
950 /* RET_TYPE can be a float when -ffast-maths is
951 enabled. */
952 fold_convert (ret_type, integer_one_node),
953 single_pred_edge (first));
954 m_acc = PHI_RESULT (phi);
959 if (phis_constructed)
961 /* Reverse the order of the phi nodes, so that it matches the order
962 of operands of the function, as assumed by eliminate_tail_call. */
963 set_phi_nodes (first, phi_reverse (phi_nodes (first)));
966 for (; tailcalls; tailcalls = next)
968 next = tailcalls->next;
969 changed |= optimize_tail_call (tailcalls, opt_tailcalls);
970 free (tailcalls);
973 if (a_acc || m_acc)
975 /* Modify the remaining return statements. */
976 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
978 stmt = last_stmt (e->src);
980 if (stmt
981 && TREE_CODE (stmt) == RETURN_EXPR)
982 adjust_return_value (e->src, m_acc, a_acc);
986 if (changed)
988 free_dominance_info (CDI_DOMINATORS);
989 cleanup_tree_cfg ();
992 if (phis_constructed)
993 add_virtual_phis ();
996 static unsigned int
997 execute_tail_recursion (void)
999 tree_optimize_tail_calls_1 (false);
1000 return 0;
1003 static bool
1004 gate_tail_calls (void)
1006 return flag_optimize_sibling_calls != 0;
1009 static unsigned int
1010 execute_tail_calls (void)
1012 tree_optimize_tail_calls_1 (true);
1013 return 0;
1016 struct tree_opt_pass pass_tail_recursion =
1018 "tailr", /* name */
1019 gate_tail_calls, /* gate */
1020 execute_tail_recursion, /* execute */
1021 NULL, /* sub */
1022 NULL, /* next */
1023 0, /* static_pass_number */
1024 0, /* tv_id */
1025 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1026 0, /* properties_provided */
1027 0, /* properties_destroyed */
1028 0, /* todo_flags_start */
1029 TODO_dump_func | TODO_verify_ssa, /* todo_flags_finish */
1030 0 /* letter */
1033 struct tree_opt_pass pass_tail_calls =
1035 "tailc", /* name */
1036 gate_tail_calls, /* gate */
1037 execute_tail_calls, /* execute */
1038 NULL, /* sub */
1039 NULL, /* next */
1040 0, /* static_pass_number */
1041 0, /* tv_id */
1042 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1043 0, /* properties_provided */
1044 0, /* properties_destroyed */
1045 0, /* todo_flags_start */
1046 TODO_dump_func | TODO_verify_ssa, /* todo_flags_finish */
1047 0 /* letter */