* Merge with edge-vector-mergepoint-20040918.
[official-gcc.git] / gcc / tree-tailcall.c
blob990c35f1c8dfd10e5b766a1a320f2ab9170f211f
1 /* Tail call optimization on trees.
2 Copyright (C) 2003, 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "function.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "diagnostic.h"
34 #include "except.h"
35 #include "tree-pass.h"
36 #include "flags.h"
37 #include "langhooks.h"
39 /* The file implements the tail recursion elimination. It is also used to
40 analyze the tail calls in general, passing the results to the rtl level
41 where they are used for sibcall optimization.
43 In addition to the standard tail recursion elimination, we handle the most
44 trivial cases of making the call tail recursive by creating accumulators.
45 For example the following function
47 int sum (int n)
49 if (n > 0)
50 return n + sum (n - 1);
51 else
52 return 0;
55 is transformed into
57 int sum (int n)
59 int acc = 0;
61 while (n > 0)
62 acc += n--;
64 return acc;
67 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
68 when we reach the return x statement, we should return a_acc + x * m_acc
69 instead. They are initially initialized to 0 and 1, respectively,
70 so the semantics of the function is obviously preserved. If we are
71 guaranteed that the value of the accumulator never change, we
72 omit the accumulator.
74 There are three cases how the function may exit. The first one is
75 handled in adjust_return_value, the other two in adjust_accumulator_values
76 (the second case is actually a special case of the third one and we
77 present it separately just for clarity):
79 1) Just return x, where x is not in any of the remaining special shapes.
80 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
82 2) return f (...), where f is the current function, is rewritten in a
83 classical tail-recursion elimination way, into assignment of arguments
84 and jump to the start of the function. Values of the accumulators
85 are unchanged.
87 3) return a + m * f(...), where a and m do not depend on call to f.
88 To preserve the semantics described before we want this to be rewritten
89 in such a way that we finally return
91 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
93 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
94 eliminate the tail call to f. Special cases when the value is just
95 added or just multiplied are obtained by setting a = 0 or m = 1.
97 TODO -- it is possible to do similar tricks for other operations. */
99 /* A structure that describes the tailcall. */
101 struct tailcall
103 /* The block in that the call occur. */
104 basic_block call_block;
106 /* The iterator pointing to the call statement. */
107 block_stmt_iterator call_bsi;
109 /* True if it is a call to the current function. */
110 bool tail_recursion;
112 /* The return value of the caller is mult * f + add, where f is the return
113 value of the call. */
114 tree mult, add;
116 /* Next tailcall in the chain. */
117 struct tailcall *next;
120 /* The variables holding the value of multiplicative and additive
121 accumulator. */
122 static tree m_acc, a_acc;
124 static bool suitable_for_tail_opt_p (void);
125 static bool optimize_tail_call (struct tailcall *, bool);
126 static void eliminate_tail_call (struct tailcall *);
127 static void find_tail_calls (basic_block, struct tailcall **);
129 /* Returns false when the function is not suitable for tail call optimization
130 from some reason (e.g. if it takes variable number of arguments). */
132 static bool
133 suitable_for_tail_opt_p (void)
135 int i;
137 if (current_function_stdarg)
138 return false;
140 /* No local variable should be call-clobbered. We ignore any kind
141 of memory tag, as these are not real variables. */
142 for (i = 0; i < (int) VARRAY_ACTIVE_SIZE (referenced_vars); i++)
144 tree var = VARRAY_TREE (referenced_vars, i);
146 if (!(TREE_STATIC (var) || DECL_EXTERNAL (var))
147 && var_ann (var)->mem_tag_kind == NOT_A_TAG
148 && is_call_clobbered (var))
149 return false;
152 return true;
154 /* Returns false when the function is not suitable for tail call optimization
155 from some reason (e.g. if it takes variable number of arguments).
156 This test must pass in addition to suitable_for_tail_opt_p in order to make
157 tail call discovery happen. */
159 static bool
160 suitable_for_tail_call_opt_p (void)
162 /* alloca (until we have stack slot life analysis) inhibits
163 sibling call optimizations, but not tail recursion. */
164 if (current_function_calls_alloca)
165 return false;
167 /* If we are using sjlj exceptions, we may need to add a call to
168 _Unwind_SjLj_Unregister at exit of the function. Which means
169 that we cannot do any sibcall transformations. */
170 if (USING_SJLJ_EXCEPTIONS && current_function_has_exception_handlers ())
171 return false;
173 /* Any function that calls setjmp might have longjmp called from
174 any called function. ??? We really should represent this
175 properly in the CFG so that this needn't be special cased. */
176 if (current_function_calls_setjmp)
177 return false;
179 return true;
182 /* Checks whether the expression EXPR in stmt AT is independent of the
183 statement pointed by BSI (in a sense that we already know EXPR's value
184 at BSI). We use the fact that we are only called from the chain of
185 basic blocks that have only single successor. Returns the expression
186 containing the value of EXPR at BSI. */
188 static tree
189 independent_of_stmt_p (tree expr, tree at, block_stmt_iterator bsi)
191 basic_block bb, call_bb, at_bb;
192 edge e;
193 edge_iterator ei;
195 if (is_gimple_min_invariant (expr))
196 return expr;
198 if (TREE_CODE (expr) != SSA_NAME)
199 return NULL_TREE;
201 /* Mark the blocks in the chain leading to the end. */
202 at_bb = bb_for_stmt (at);
203 call_bb = bb_for_stmt (bsi_stmt (bsi));
204 for (bb = call_bb; bb != at_bb; bb = EDGE_SUCC (bb, 0)->dest)
205 bb->aux = &bb->aux;
206 bb->aux = &bb->aux;
208 while (1)
210 at = SSA_NAME_DEF_STMT (expr);
211 bb = bb_for_stmt (at);
213 /* The default definition or defined before the chain. */
214 if (!bb || !bb->aux)
215 break;
217 if (bb == call_bb)
219 for (; !bsi_end_p (bsi); bsi_next (&bsi))
220 if (bsi_stmt (bsi) == at)
221 break;
223 if (!bsi_end_p (bsi))
224 expr = NULL_TREE;
225 break;
228 if (TREE_CODE (at) != PHI_NODE)
230 expr = NULL_TREE;
231 break;
234 FOR_EACH_EDGE (e, ei, bb->preds)
236 if (e->src->aux)
237 break;
240 gcc_assert (e);
242 expr = PHI_ARG_DEF_FROM_EDGE (at, e);
243 if (TREE_CODE (expr) != SSA_NAME)
245 /* The value is a constant. */
246 break;
250 /* Unmark the blocks. */
251 for (bb = call_bb; bb != at_bb; bb = EDGE_SUCC (bb, 0)->dest)
252 bb->aux = NULL;
253 bb->aux = NULL;
255 return expr;
258 /* Simulates the effect of an assignment of ASS in STMT on the return value
259 of the tail recursive CALL passed in ASS_VAR. M and A are the
260 multiplicative and the additive factor for the real return value. */
262 static bool
263 process_assignment (tree ass, tree stmt, block_stmt_iterator call, tree *m,
264 tree *a, tree *ass_var)
266 tree op0, op1, non_ass_var;
267 tree dest = TREE_OPERAND (ass, 0);
268 tree src = TREE_OPERAND (ass, 1);
269 enum tree_code code = TREE_CODE (src);
270 tree src_var = src;
272 /* See if this is a simple copy operation of an SSA name to the function
273 result. In that case we may have a simple tail call. Ignore type
274 conversions that can never produce extra code between the function
275 call and the function return. */
276 STRIP_NOPS (src_var);
277 if (TREE_CODE (src_var) == SSA_NAME)
279 if (src_var != *ass_var)
280 return false;
282 *ass_var = dest;
283 return true;
286 if (TREE_CODE_CLASS (code) != tcc_binary)
287 return false;
289 /* Accumulator optimizations will reverse the order of operations.
290 We can only do that for floating-point types if we're assuming
291 that addition and multiplication are associative. */
292 if (!flag_unsafe_math_optimizations)
293 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
294 return false;
296 /* We only handle the code like
298 x = call ();
299 y = m * x;
300 z = y + a;
301 return z;
303 TODO -- Extend it for cases where the linear transformation of the output
304 is expressed in a more complicated way. */
306 op0 = TREE_OPERAND (src, 0);
307 op1 = TREE_OPERAND (src, 1);
309 if (op0 == *ass_var
310 && (non_ass_var = independent_of_stmt_p (op1, stmt, call)))
312 else if (op1 == *ass_var
313 && (non_ass_var = independent_of_stmt_p (op0, stmt, call)))
315 else
316 return false;
318 switch (code)
320 case PLUS_EXPR:
321 /* There should be no previous addition. TODO -- it should be fairly
322 straightforward to lift this restriction -- just allow storing
323 more complicated expressions in *A, and gimplify it in
324 adjust_accumulator_values. */
325 if (*a)
326 return false;
327 *a = non_ass_var;
328 *ass_var = dest;
329 return true;
331 case MULT_EXPR:
332 /* Similar remark applies here. Handling multiplication after addition
333 is just slightly more complicated -- we need to multiply both *A and
334 *M. */
335 if (*a || *m)
336 return false;
337 *m = non_ass_var;
338 *ass_var = dest;
339 return true;
341 /* TODO -- Handle other codes (NEGATE_EXPR, MINUS_EXPR). */
343 default:
344 return false;
348 /* Propagate VAR through phis on edge E. */
350 static tree
351 propagate_through_phis (tree var, edge e)
353 basic_block dest = e->dest;
354 tree phi;
356 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
357 if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
358 return PHI_RESULT (phi);
360 return var;
363 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
364 added to the start of RET. */
366 static void
367 find_tail_calls (basic_block bb, struct tailcall **ret)
369 tree ass_var, ret_var, stmt, func, param, args, call = NULL_TREE;
370 block_stmt_iterator bsi, absi;
371 bool tail_recursion;
372 struct tailcall *nw;
373 edge e;
374 tree m, a;
375 basic_block abb;
376 stmt_ann_t ann;
378 if (EDGE_COUNT (bb->succs) > 1)
379 return;
381 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
383 stmt = bsi_stmt (bsi);
385 /* Ignore labels. */
386 if (TREE_CODE (stmt) == LABEL_EXPR)
387 continue;
389 get_stmt_operands (stmt);
391 /* Check for a call. */
392 if (TREE_CODE (stmt) == MODIFY_EXPR)
394 ass_var = TREE_OPERAND (stmt, 0);
395 call = TREE_OPERAND (stmt, 1);
396 if (TREE_CODE (call) == WITH_SIZE_EXPR)
397 call = TREE_OPERAND (call, 0);
399 else
401 ass_var = NULL_TREE;
402 call = stmt;
405 if (TREE_CODE (call) == CALL_EXPR)
406 break;
408 /* If the statement has virtual or volatile operands, fail. */
409 ann = stmt_ann (stmt);
410 if (NUM_V_MAY_DEFS (V_MAY_DEF_OPS (ann))
411 || NUM_V_MUST_DEFS (V_MUST_DEF_OPS (ann))
412 || NUM_VUSES (VUSE_OPS (ann))
413 || ann->has_volatile_ops)
414 return;
417 if (bsi_end_p (bsi))
419 edge_iterator ei;
420 /* Recurse to the predecessors. */
421 FOR_EACH_EDGE (e, ei, bb->preds)
423 find_tail_calls (e->src, ret);
425 return;
428 /* We found the call, check whether it is suitable. */
429 tail_recursion = false;
430 func = get_callee_fndecl (call);
431 if (func == current_function_decl)
433 for (param = DECL_ARGUMENTS (func), args = TREE_OPERAND (call, 1);
434 param && args;
435 param = TREE_CHAIN (param), args = TREE_CHAIN (args))
437 tree arg = TREE_VALUE (args);
438 if (param != arg
439 /* Make sure there are no problems with copying. Note we must
440 have a copyable type and the two arguments must have reasonably
441 equivalent types. The latter requirement could be relaxed if
442 we emitted a suitable type conversion statement. */
443 && (!is_gimple_reg_type (TREE_TYPE (param))
444 || !lang_hooks.types_compatible_p (TREE_TYPE (param),
445 TREE_TYPE (arg))))
446 break;
448 if (!args && !param)
449 tail_recursion = true;
452 /* Now check the statements after the call. None of them has virtual
453 operands, so they may only depend on the call through its return
454 value. The return value should also be dependent on each of them,
455 since we are running after dce. */
456 m = NULL_TREE;
457 a = NULL_TREE;
459 abb = bb;
460 absi = bsi;
461 while (1)
463 bsi_next (&absi);
465 while (bsi_end_p (absi))
467 ass_var = propagate_through_phis (ass_var, EDGE_SUCC (abb, 0));
468 abb = EDGE_SUCC (abb, 0)->dest;
469 absi = bsi_start (abb);
472 stmt = bsi_stmt (absi);
474 if (TREE_CODE (stmt) == LABEL_EXPR)
475 continue;
477 if (TREE_CODE (stmt) == RETURN_EXPR)
478 break;
480 if (TREE_CODE (stmt) != MODIFY_EXPR)
481 return;
483 if (!process_assignment (stmt, stmt, bsi, &m, &a, &ass_var))
484 return;
487 /* See if this is a tail call we can handle. */
488 ret_var = TREE_OPERAND (stmt, 0);
489 if (ret_var
490 && TREE_CODE (ret_var) == MODIFY_EXPR)
492 tree ret_op = TREE_OPERAND (ret_var, 1);
493 STRIP_NOPS (ret_op);
494 if (!tail_recursion
495 && TREE_CODE (ret_op) != SSA_NAME)
496 return;
498 if (!process_assignment (ret_var, stmt, bsi, &m, &a, &ass_var))
499 return;
500 ret_var = TREE_OPERAND (ret_var, 0);
503 /* We may proceed if there either is no return value, or the return value
504 is identical to the call's return. */
505 if (ret_var
506 && (ret_var != ass_var))
507 return;
509 /* If this is not a tail recursive call, we cannot handle addends or
510 multiplicands. */
511 if (!tail_recursion && (m || a))
512 return;
514 nw = xmalloc (sizeof (struct tailcall));
516 nw->call_block = bb;
517 nw->call_bsi = bsi;
519 nw->tail_recursion = tail_recursion;
521 nw->mult = m;
522 nw->add = a;
524 nw->next = *ret;
525 *ret = nw;
528 /* Adjust the accumulator values according to A and M after BSI, and update
529 the phi nodes on edge BACK. */
531 static void
532 adjust_accumulator_values (block_stmt_iterator bsi, tree m, tree a, edge back)
534 tree stmt, var, phi, tmp;
535 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
536 tree a_acc_arg = a_acc, m_acc_arg = m_acc;
538 if (a)
540 if (m_acc)
542 if (integer_onep (a))
543 var = m_acc;
544 else
546 stmt = build (MODIFY_EXPR, ret_type, NULL_TREE,
547 build (MULT_EXPR, ret_type, m_acc, a));
549 tmp = create_tmp_var (ret_type, "acc_tmp");
550 add_referenced_tmp_var (tmp);
552 var = make_ssa_name (tmp, stmt);
553 TREE_OPERAND (stmt, 0) = var;
554 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
557 else
558 var = a;
560 stmt = build (MODIFY_EXPR, ret_type, NULL_TREE,
561 build (PLUS_EXPR, ret_type, a_acc, var));
562 var = make_ssa_name (SSA_NAME_VAR (a_acc), stmt);
563 TREE_OPERAND (stmt, 0) = var;
564 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
565 a_acc_arg = var;
568 if (m)
570 stmt = build (MODIFY_EXPR, ret_type, NULL_TREE,
571 build (MULT_EXPR, ret_type, m_acc, m));
572 var = make_ssa_name (SSA_NAME_VAR (m_acc), stmt);
573 TREE_OPERAND (stmt, 0) = var;
574 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
575 m_acc_arg = var;
578 if (a_acc)
580 for (phi = phi_nodes (back->dest); phi; phi = PHI_CHAIN (phi))
581 if (PHI_RESULT (phi) == a_acc)
582 break;
584 add_phi_arg (&phi, a_acc_arg, back);
587 if (m_acc)
589 for (phi = phi_nodes (back->dest); phi; phi = PHI_CHAIN (phi))
590 if (PHI_RESULT (phi) == m_acc)
591 break;
593 add_phi_arg (&phi, m_acc_arg, back);
597 /* Adjust value of the return at the end of BB according to M and A
598 accumulators. */
600 static void
601 adjust_return_value (basic_block bb, tree m, tree a)
603 tree ret_stmt = last_stmt (bb), ret_var, var, stmt, tmp;
604 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
605 block_stmt_iterator bsi = bsi_last (bb);
607 gcc_assert (TREE_CODE (ret_stmt) == RETURN_EXPR);
609 ret_var = TREE_OPERAND (ret_stmt, 0);
610 if (!ret_var)
611 return;
613 if (TREE_CODE (ret_var) == MODIFY_EXPR)
615 ret_var->common.ann = (tree_ann_t) stmt_ann (ret_stmt);
616 bsi_replace (&bsi, ret_var, true);
617 SSA_NAME_DEF_STMT (TREE_OPERAND (ret_var, 0)) = ret_var;
618 ret_var = TREE_OPERAND (ret_var, 0);
619 ret_stmt = build1 (RETURN_EXPR, TREE_TYPE (ret_stmt), ret_var);
620 bsi_insert_after (&bsi, ret_stmt, BSI_NEW_STMT);
623 if (m)
625 stmt = build (MODIFY_EXPR, ret_type, NULL_TREE,
626 build (MULT_EXPR, ret_type, m_acc, ret_var));
628 tmp = create_tmp_var (ret_type, "acc_tmp");
629 add_referenced_tmp_var (tmp);
631 var = make_ssa_name (tmp, stmt);
632 TREE_OPERAND (stmt, 0) = var;
633 bsi_insert_before (&bsi, stmt, BSI_SAME_STMT);
635 else
636 var = ret_var;
638 if (a)
640 stmt = build (MODIFY_EXPR, ret_type, NULL_TREE,
641 build (PLUS_EXPR, ret_type, a_acc, var));
643 tmp = create_tmp_var (ret_type, "acc_tmp");
644 add_referenced_tmp_var (tmp);
646 var = make_ssa_name (tmp, stmt);
647 TREE_OPERAND (stmt, 0) = var;
648 bsi_insert_before (&bsi, stmt, BSI_SAME_STMT);
651 TREE_OPERAND (ret_stmt, 0) = var;
652 modify_stmt (ret_stmt);
655 /* Eliminates tail call described by T. TMP_VARS is a list of
656 temporary variables used to copy the function arguments. */
658 static void
659 eliminate_tail_call (struct tailcall *t)
661 tree param, stmt, args, rslt, call;
662 basic_block bb, first;
663 edge e;
664 tree phi;
665 stmt_ann_t ann;
666 v_may_def_optype v_may_defs;
667 unsigned i;
668 block_stmt_iterator bsi;
670 stmt = bsi_stmt (t->call_bsi);
671 get_stmt_operands (stmt);
672 ann = stmt_ann (stmt);
673 bb = t->call_block;
675 if (dump_file && (dump_flags & TDF_DETAILS))
677 fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
678 bb->index);
679 print_generic_stmt (dump_file, stmt, TDF_SLIM);
680 fprintf (dump_file, "\n");
683 if (TREE_CODE (stmt) == MODIFY_EXPR)
684 stmt = TREE_OPERAND (stmt, 1);
686 first = EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest;
688 /* Remove the code after call_bsi that will become unreachable. The
689 possibly unreachable code in other blocks is removed later in
690 cfg cleanup. */
691 bsi = t->call_bsi;
692 bsi_next (&bsi);
693 while (!bsi_end_p (bsi))
695 tree t = bsi_stmt (bsi);
696 /* Do not remove the return statement, so that redirect_edge_and_branch
697 sees how the block ends. */
698 if (TREE_CODE (t) == RETURN_EXPR)
699 break;
701 bsi_remove (&bsi);
702 release_defs (t);
705 /* Replace the call by a jump to the start of function. */
706 e = redirect_edge_and_branch (EDGE_SUCC (t->call_block, 0), first);
707 gcc_assert (e);
708 PENDING_STMT (e) = NULL_TREE;
710 /* Add phi node entries for arguments. Not every PHI node corresponds to
711 a function argument (there may be PHI nodes for virtual definitions of the
712 eliminated calls), so we search for a PHI corresponding to each argument
713 rather than searching for which argument a PHI node corresponds to. */
715 for (param = DECL_ARGUMENTS (current_function_decl),
716 args = TREE_OPERAND (stmt, 1);
717 param;
718 param = TREE_CHAIN (param),
719 args = TREE_CHAIN (args))
722 for (phi = phi_nodes (first); phi; phi = PHI_CHAIN (phi))
723 if (param == SSA_NAME_VAR (PHI_RESULT (phi)))
724 break;
726 /* The phi node indeed does not have to be there, in case the operand is
727 invariant in the function. */
728 if (!phi)
729 continue;
731 add_phi_arg (&phi, TREE_VALUE (args), e);
734 /* Add phi nodes for the call clobbered variables. */
735 v_may_defs = V_MAY_DEF_OPS (ann);
736 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
738 param = SSA_NAME_VAR (V_MAY_DEF_RESULT (v_may_defs, i));
739 for (phi = phi_nodes (first); phi; phi = PHI_CHAIN (phi))
740 if (param == SSA_NAME_VAR (PHI_RESULT (phi)))
741 break;
743 if (!phi)
745 tree name = var_ann (param)->default_def;
746 tree new_name;
748 if (!name)
750 /* It may happen that the tag does not have a default_def in case
751 when all uses of it are dominated by a MUST_DEF. This however
752 means that it is not necessary to add a phi node for this
753 tag. */
754 continue;
756 new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
758 var_ann (param)->default_def = new_name;
759 phi = create_phi_node (name, first);
760 SSA_NAME_DEF_STMT (name) = phi;
761 add_phi_arg (&phi, new_name, EDGE_SUCC (ENTRY_BLOCK_PTR, 0));
763 /* For all calls the same set of variables should be clobbered. This
764 means that there always should be the appropriate phi node except
765 for the first time we eliminate the call. */
766 gcc_assert (EDGE_COUNT (first->preds) <= 2);
769 add_phi_arg (&phi, V_MAY_DEF_OP (v_may_defs, i), e);
772 /* Update the values of accumulators. */
773 adjust_accumulator_values (t->call_bsi, t->mult, t->add, e);
775 call = bsi_stmt (t->call_bsi);
776 if (TREE_CODE (call) == MODIFY_EXPR)
778 rslt = TREE_OPERAND (call, 0);
780 /* Result of the call will no longer be defined. So adjust the
781 SSA_NAME_DEF_STMT accordingly. */
782 SSA_NAME_DEF_STMT (rslt) = build_empty_stmt ();
785 bsi_remove (&t->call_bsi);
786 release_defs (call);
789 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
790 mark the tailcalls for the sibcall optimization. */
792 static bool
793 optimize_tail_call (struct tailcall *t, bool opt_tailcalls)
795 if (t->tail_recursion)
797 eliminate_tail_call (t);
798 return true;
801 if (opt_tailcalls)
803 tree stmt = bsi_stmt (t->call_bsi);
805 stmt = get_call_expr_in (stmt);
806 CALL_EXPR_TAILCALL (stmt) = 1;
807 if (dump_file && (dump_flags & TDF_DETAILS))
809 fprintf (dump_file, "Found tail call ");
810 print_generic_expr (dump_file, stmt, dump_flags);
811 fprintf (dump_file, " in bb %i\n", t->call_block->index);
815 return false;
818 /* Optimizes tail calls in the function, turning the tail recursion
819 into iteration. */
821 static void
822 tree_optimize_tail_calls_1 (bool opt_tailcalls)
824 edge e;
825 bool phis_constructed = false;
826 struct tailcall *tailcalls = NULL, *act, *next;
827 bool changed = false;
828 basic_block first = EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest;
829 tree stmt, param, ret_type, tmp, phi;
830 edge_iterator ei;
832 if (!suitable_for_tail_opt_p ())
833 return;
834 if (opt_tailcalls)
835 opt_tailcalls = suitable_for_tail_call_opt_p ();
837 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
839 /* Only traverse the normal exits, i.e. those that end with return
840 statement. */
841 stmt = last_stmt (e->src);
843 if (stmt
844 && TREE_CODE (stmt) == RETURN_EXPR)
845 find_tail_calls (e->src, &tailcalls);
848 /* Construct the phi nodes and accumulators if necessary. */
849 a_acc = m_acc = NULL_TREE;
850 for (act = tailcalls; act; act = act->next)
852 if (!act->tail_recursion)
853 continue;
855 if (!phis_constructed)
857 /* Ensure that there is only one predecessor of the block. */
858 if (EDGE_COUNT (first->preds) > 1)
859 first = split_edge (EDGE_SUCC (ENTRY_BLOCK_PTR, 0));
861 /* Copy the args if needed. */
862 for (param = DECL_ARGUMENTS (current_function_decl);
863 param;
864 param = TREE_CHAIN (param))
865 if (var_ann (param)
866 /* Also parameters that are only defined but never used need not
867 be copied. */
868 && (var_ann (param)->default_def
869 && TREE_CODE (var_ann (param)->default_def) == SSA_NAME))
871 tree name = var_ann (param)->default_def;
872 tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
873 tree phi;
875 var_ann (param)->default_def = new_name;
876 phi = create_phi_node (name, first);
877 SSA_NAME_DEF_STMT (name) = phi;
878 add_phi_arg (&phi, new_name, EDGE_PRED (first, 0));
880 phis_constructed = true;
883 if (act->add && !a_acc)
885 ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
887 tmp = create_tmp_var (ret_type, "add_acc");
888 add_referenced_tmp_var (tmp);
890 phi = create_phi_node (tmp, first);
891 add_phi_arg (&phi, build_int_cst (ret_type, 0), EDGE_PRED (first, 0));
892 a_acc = PHI_RESULT (phi);
895 if (act->mult && !m_acc)
897 ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
899 tmp = create_tmp_var (ret_type, "mult_acc");
900 add_referenced_tmp_var (tmp);
902 phi = create_phi_node (tmp, first);
903 add_phi_arg (&phi, build_int_cst (ret_type, 1), EDGE_PRED (first, 0));
904 m_acc = PHI_RESULT (phi);
908 for (; tailcalls; tailcalls = next)
910 next = tailcalls->next;
911 changed |= optimize_tail_call (tailcalls, opt_tailcalls);
912 free (tailcalls);
915 if (a_acc || m_acc)
917 /* Modify the remaining return statements. */
918 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
920 stmt = last_stmt (e->src);
922 if (stmt
923 && TREE_CODE (stmt) == RETURN_EXPR)
924 adjust_return_value (e->src, m_acc, a_acc);
928 if (changed)
930 free_dominance_info (CDI_DOMINATORS);
931 cleanup_tree_cfg ();
935 static void
936 execute_tail_recursion (void)
938 tree_optimize_tail_calls_1 (false);
941 static bool
942 gate_tail_calls (void)
944 return flag_optimize_sibling_calls != 0;
947 static void
948 execute_tail_calls (void)
950 tree_optimize_tail_calls_1 (true);
953 struct tree_opt_pass pass_tail_recursion =
955 "tailr", /* name */
956 NULL, /* gate */
957 execute_tail_recursion, /* execute */
958 NULL, /* sub */
959 NULL, /* next */
960 0, /* static_pass_number */
961 0, /* tv_id */
962 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
963 0, /* properties_provided */
964 0, /* properties_destroyed */
965 0, /* todo_flags_start */
966 TODO_dump_func | TODO_verify_ssa, /* todo_flags_finish */
967 0 /* letter */
970 struct tree_opt_pass pass_tail_calls =
972 "tailc", /* name */
973 gate_tail_calls, /* gate */
974 execute_tail_calls, /* execute */
975 NULL, /* sub */
976 NULL, /* next */
977 0, /* static_pass_number */
978 0, /* tv_id */
979 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
980 0, /* properties_provided */
981 0, /* properties_destroyed */
982 0, /* todo_flags_start */
983 TODO_dump_func | TODO_verify_ssa, /* todo_flags_finish */
984 0 /* letter */