Ignore -ansi -pedantic-errors option coming from dejagnu.
[official-gcc.git] / gcc / tree-tailcall.c
blob6f3fdcaaf93f1dd5833e11b592b6bd40041847a7
1 /* Tail call optimization on trees.
2 Copyright (C) 2003, 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "function.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "diagnostic.h"
34 #include "except.h"
35 #include "tree-pass.h"
36 #include "flags.h"
37 #include "langhooks.h"
39 /* The file implements the tail recursion elimination. It is also used to
40 analyze the tail calls in general, passing the results to the rtl level
41 where they are used for sibcall optimization.
43 In addition to the standard tail recursion elimination, we handle the most
44 trivial cases of making the call tail recursive by creating accumulators.
45 For example the following function
47 int sum (int n)
49 if (n > 0)
50 return n + sum (n - 1);
51 else
52 return 0;
55 is transformed into
57 int sum (int n)
59 int acc = 0;
61 while (n > 0)
62 acc += n--;
64 return acc;
67 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
68 when we reach the return x statement, we should return a_acc + x * m_acc
69 instead. They are initially initialized to 0 and 1, respectively,
70 so the semantics of the function is obviously preserved. If we are
71 guaranteed that the value of the accumulator never change, we
72 omit the accumulator.
74 There are three cases how the function may exit. The first one is
75 handled in adjust_return_value, the other two in adjust_accumulator_values
76 (the second case is actually a special case of the third one and we
77 present it separately just for clarity):
79 1) Just return x, where x is not in any of the remaining special shapes.
80 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
82 2) return f (...), where f is the current function, is rewritten in a
83 classical tail-recursion elimination way, into assignment of arguments
84 and jump to the start of the function. Values of the accumulators
85 are unchanged.
87 3) return a + m * f(...), where a and m do not depend on call to f.
88 To preserve the semantics described before we want this to be rewritten
89 in such a way that we finally return
91 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
93 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
94 eliminate the tail call to f. Special cases when the value is just
95 added or just multiplied are obtained by setting a = 0 or m = 1.
97 TODO -- it is possible to do similar tricks for other operations. */
99 /* A structure that describes the tailcall. */
101 struct tailcall
103 /* The block in that the call occur. */
104 basic_block call_block;
106 /* The iterator pointing to the call statement. */
107 block_stmt_iterator call_bsi;
109 /* True if it is a call to the current function. */
110 bool tail_recursion;
112 /* The return value of the caller is mult * f + add, where f is the return
113 value of the call. */
114 tree mult, add;
116 /* Next tailcall in the chain. */
117 struct tailcall *next;
120 /* The variables holding the value of multiplicative and additive
121 accumulator. */
122 static tree m_acc, a_acc;
124 static bool suitable_for_tail_opt_p (void);
125 static bool optimize_tail_call (struct tailcall *, bool);
126 static void eliminate_tail_call (struct tailcall *);
127 static void find_tail_calls (basic_block, struct tailcall **);
129 /* Returns false when the function is not suitable for tail call optimization
130 from some reason (e.g. if it takes variable number of arguments). */
132 static bool
133 suitable_for_tail_opt_p (void)
135 int i;
137 if (current_function_stdarg)
138 return false;
140 /* No local variable should be call-clobbered. We ignore any kind
141 of memory tag, as these are not real variables. */
142 for (i = 0; i < (int) VARRAY_ACTIVE_SIZE (referenced_vars); i++)
144 tree var = VARRAY_TREE (referenced_vars, i);
146 if (!(TREE_STATIC (var) || DECL_EXTERNAL (var))
147 && var_ann (var)->mem_tag_kind == NOT_A_TAG
148 && is_call_clobbered (var))
149 return false;
152 return true;
154 /* Returns false when the function is not suitable for tail call optimization
155 from some reason (e.g. if it takes variable number of arguments).
156 This test must pass in addition to suitable_for_tail_opt_p in order to make
157 tail call discovery happen. */
159 static bool
160 suitable_for_tail_call_opt_p (void)
162 /* alloca (until we have stack slot life analysis) inhibits
163 sibling call optimizations, but not tail recursion. */
164 if (current_function_calls_alloca)
165 return false;
167 /* If we are using sjlj exceptions, we may need to add a call to
168 _Unwind_SjLj_Unregister at exit of the function. Which means
169 that we cannot do any sibcall transformations. */
170 if (USING_SJLJ_EXCEPTIONS && current_function_has_exception_handlers ())
171 return false;
173 /* Any function that calls setjmp might have longjmp called from
174 any called function. ??? We really should represent this
175 properly in the CFG so that this needn't be special cased. */
176 if (current_function_calls_setjmp)
177 return false;
179 return true;
182 /* Checks whether the expression EXPR in stmt AT is independent of the
183 statement pointed by BSI (in a sense that we already know EXPR's value
184 at BSI). We use the fact that we are only called from the chain of
185 basic blocks that have only single successor. Returns the expression
186 containing the value of EXPR at BSI. */
188 static tree
189 independent_of_stmt_p (tree expr, tree at, block_stmt_iterator bsi)
191 basic_block bb, call_bb, at_bb;
192 edge e;
194 if (is_gimple_min_invariant (expr))
195 return expr;
197 if (TREE_CODE (expr) != SSA_NAME)
198 return NULL_TREE;
200 /* Mark the blocks in the chain leading to the end. */
201 at_bb = bb_for_stmt (at);
202 call_bb = bb_for_stmt (bsi_stmt (bsi));
203 for (bb = call_bb; bb != at_bb; bb = bb->succ->dest)
204 bb->aux = &bb->aux;
205 bb->aux = &bb->aux;
207 while (1)
209 at = SSA_NAME_DEF_STMT (expr);
210 bb = bb_for_stmt (at);
212 /* The default definition or defined before the chain. */
213 if (!bb || !bb->aux)
214 break;
216 if (bb == call_bb)
218 for (; !bsi_end_p (bsi); bsi_next (&bsi))
219 if (bsi_stmt (bsi) == at)
220 break;
222 if (!bsi_end_p (bsi))
223 expr = NULL_TREE;
224 break;
227 if (TREE_CODE (at) != PHI_NODE)
229 expr = NULL_TREE;
230 break;
233 for (e = bb->pred; e; e = e->pred_next)
234 if (e->src->aux)
235 break;
236 gcc_assert (e);
238 expr = PHI_ARG_DEF_FROM_EDGE (at, e);
239 if (TREE_CODE (expr) != SSA_NAME)
241 /* The value is a constant. */
242 break;
246 /* Unmark the blocks. */
247 for (bb = call_bb; bb != at_bb; bb = bb->succ->dest)
248 bb->aux = NULL;
249 bb->aux = NULL;
251 return expr;
254 /* Simulates the effect of an assignment of ASS in STMT on the return value
255 of the tail recursive CALL passed in ASS_VAR. M and A are the
256 multiplicative and the additive factor for the real return value. */
258 static bool
259 process_assignment (tree ass, tree stmt, block_stmt_iterator call, tree *m,
260 tree *a, tree *ass_var)
262 tree op0, op1, non_ass_var;
263 tree dest = TREE_OPERAND (ass, 0);
264 tree src = TREE_OPERAND (ass, 1);
265 enum tree_code code = TREE_CODE (src);
266 tree src_var = src;
268 /* See if this is a simple copy operation of an SSA name to the function
269 result. In that case we may have a simple tail call. Ignore type
270 conversions that can never produce extra code between the function
271 call and the function return. */
272 STRIP_NOPS (src_var);
273 if (TREE_CODE (src_var) == SSA_NAME)
275 if (src_var != *ass_var)
276 return false;
278 *ass_var = dest;
279 return true;
282 if (TREE_CODE_CLASS (code) != tcc_binary)
283 return false;
285 /* Accumulator optimizations will reverse the order of operations.
286 We can only do that for floating-point types if we're assuming
287 that addition and multiplication are associative. */
288 if (!flag_unsafe_math_optimizations)
289 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
290 return false;
292 /* We only handle the code like
294 x = call ();
295 y = m * x;
296 z = y + a;
297 return z;
299 TODO -- Extend it for cases where the linear transformation of the output
300 is expressed in a more complicated way. */
302 op0 = TREE_OPERAND (src, 0);
303 op1 = TREE_OPERAND (src, 1);
305 if (op0 == *ass_var
306 && (non_ass_var = independent_of_stmt_p (op1, stmt, call)))
308 else if (op1 == *ass_var
309 && (non_ass_var = independent_of_stmt_p (op0, stmt, call)))
311 else
312 return false;
314 switch (code)
316 case PLUS_EXPR:
317 /* There should be no previous addition. TODO -- it should be fairly
318 straightforward to lift this restriction -- just allow storing
319 more complicated expressions in *A, and gimplify it in
320 adjust_accumulator_values. */
321 if (*a)
322 return false;
323 *a = non_ass_var;
324 *ass_var = dest;
325 return true;
327 case MULT_EXPR:
328 /* Similar remark applies here. Handling multiplication after addition
329 is just slightly more complicated -- we need to multiply both *A and
330 *M. */
331 if (*a || *m)
332 return false;
333 *m = non_ass_var;
334 *ass_var = dest;
335 return true;
337 /* TODO -- Handle other codes (NEGATE_EXPR, MINUS_EXPR). */
339 default:
340 return false;
344 /* Propagate VAR through phis on edge E. */
346 static tree
347 propagate_through_phis (tree var, edge e)
349 basic_block dest = e->dest;
350 tree phi;
352 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
353 if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
354 return PHI_RESULT (phi);
356 return var;
359 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
360 added to the start of RET. */
362 static void
363 find_tail_calls (basic_block bb, struct tailcall **ret)
365 tree ass_var, ret_var, stmt, func, param, args, call = NULL_TREE;
366 block_stmt_iterator bsi, absi;
367 bool tail_recursion;
368 struct tailcall *nw;
369 edge e;
370 tree m, a;
371 basic_block abb;
372 stmt_ann_t ann;
374 if (bb->succ->succ_next)
375 return;
377 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
379 stmt = bsi_stmt (bsi);
381 /* Ignore labels. */
382 if (TREE_CODE (stmt) == LABEL_EXPR)
383 continue;
385 get_stmt_operands (stmt);
387 /* Check for a call. */
388 if (TREE_CODE (stmt) == MODIFY_EXPR)
390 ass_var = TREE_OPERAND (stmt, 0);
391 call = TREE_OPERAND (stmt, 1);
392 if (TREE_CODE (call) == WITH_SIZE_EXPR)
393 call = TREE_OPERAND (call, 0);
395 else
397 ass_var = NULL_TREE;
398 call = stmt;
401 if (TREE_CODE (call) == CALL_EXPR)
402 break;
404 /* If the statement has virtual or volatile operands, fail. */
405 ann = stmt_ann (stmt);
406 if (NUM_V_MAY_DEFS (V_MAY_DEF_OPS (ann))
407 || NUM_V_MUST_DEFS (V_MUST_DEF_OPS (ann))
408 || NUM_VUSES (VUSE_OPS (ann))
409 || ann->has_volatile_ops)
410 return;
413 if (bsi_end_p (bsi))
415 /* Recurse to the predecessors. */
416 for (e = bb->pred; e; e = e->pred_next)
417 find_tail_calls (e->src, ret);
419 return;
422 /* We found the call, check whether it is suitable. */
423 tail_recursion = false;
424 func = get_callee_fndecl (call);
425 if (func == current_function_decl)
427 for (param = DECL_ARGUMENTS (func), args = TREE_OPERAND (call, 1);
428 param && args;
429 param = TREE_CHAIN (param), args = TREE_CHAIN (args))
431 tree arg = TREE_VALUE (args);
432 if (param != arg
433 /* Make sure there are no problems with copying. Note we must
434 have a copyable type and the two arguments must have reasonably
435 equivalent types. The latter requirement could be relaxed if
436 we emitted a suitable type conversion statement. */
437 && (!is_gimple_reg_type (TREE_TYPE (param))
438 || !lang_hooks.types_compatible_p (TREE_TYPE (param),
439 TREE_TYPE (arg))))
440 break;
442 if (!args && !param)
443 tail_recursion = true;
446 /* Now check the statements after the call. None of them has virtual
447 operands, so they may only depend on the call through its return
448 value. The return value should also be dependent on each of them,
449 since we are running after dce. */
450 m = NULL_TREE;
451 a = NULL_TREE;
453 abb = bb;
454 absi = bsi;
455 while (1)
457 bsi_next (&absi);
459 while (bsi_end_p (absi))
461 ass_var = propagate_through_phis (ass_var, abb->succ);
462 abb = abb->succ->dest;
463 absi = bsi_start (abb);
466 stmt = bsi_stmt (absi);
468 if (TREE_CODE (stmt) == LABEL_EXPR)
469 continue;
471 if (TREE_CODE (stmt) == RETURN_EXPR)
472 break;
474 if (TREE_CODE (stmt) != MODIFY_EXPR)
475 return;
477 if (!process_assignment (stmt, stmt, bsi, &m, &a, &ass_var))
478 return;
481 /* See if this is a tail call we can handle. */
482 ret_var = TREE_OPERAND (stmt, 0);
483 if (ret_var
484 && TREE_CODE (ret_var) == MODIFY_EXPR)
486 tree ret_op = TREE_OPERAND (ret_var, 1);
487 STRIP_NOPS (ret_op);
488 if (!tail_recursion
489 && TREE_CODE (ret_op) != SSA_NAME)
490 return;
492 if (!process_assignment (ret_var, stmt, bsi, &m, &a, &ass_var))
493 return;
494 ret_var = TREE_OPERAND (ret_var, 0);
497 /* We may proceed if there either is no return value, or the return value
498 is identical to the call's return. */
499 if (ret_var
500 && (ret_var != ass_var))
501 return;
503 /* If this is not a tail recursive call, we cannot handle addends or
504 multiplicands. */
505 if (!tail_recursion && (m || a))
506 return;
508 nw = xmalloc (sizeof (struct tailcall));
510 nw->call_block = bb;
511 nw->call_bsi = bsi;
513 nw->tail_recursion = tail_recursion;
515 nw->mult = m;
516 nw->add = a;
518 nw->next = *ret;
519 *ret = nw;
522 /* Adjust the accumulator values according to A and M after BSI, and update
523 the phi nodes on edge BACK. */
525 static void
526 adjust_accumulator_values (block_stmt_iterator bsi, tree m, tree a, edge back)
528 tree stmt, var, phi, tmp;
529 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
530 tree a_acc_arg = a_acc, m_acc_arg = m_acc;
532 if (a)
534 if (m_acc)
536 if (integer_onep (a))
537 var = m_acc;
538 else
540 stmt = build (MODIFY_EXPR, ret_type, NULL_TREE,
541 build (MULT_EXPR, ret_type, m_acc, a));
543 tmp = create_tmp_var (ret_type, "acc_tmp");
544 add_referenced_tmp_var (tmp);
546 var = make_ssa_name (tmp, stmt);
547 TREE_OPERAND (stmt, 0) = var;
548 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
551 else
552 var = a;
554 stmt = build (MODIFY_EXPR, ret_type, NULL_TREE,
555 build (PLUS_EXPR, ret_type, a_acc, var));
556 var = make_ssa_name (SSA_NAME_VAR (a_acc), stmt);
557 TREE_OPERAND (stmt, 0) = var;
558 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
559 a_acc_arg = var;
562 if (m)
564 stmt = build (MODIFY_EXPR, ret_type, NULL_TREE,
565 build (MULT_EXPR, ret_type, m_acc, m));
566 var = make_ssa_name (SSA_NAME_VAR (m_acc), stmt);
567 TREE_OPERAND (stmt, 0) = var;
568 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
569 m_acc_arg = var;
572 if (a_acc)
574 for (phi = phi_nodes (back->dest); phi; phi = PHI_CHAIN (phi))
575 if (PHI_RESULT (phi) == a_acc)
576 break;
578 add_phi_arg (&phi, a_acc_arg, back);
581 if (m_acc)
583 for (phi = phi_nodes (back->dest); phi; phi = PHI_CHAIN (phi))
584 if (PHI_RESULT (phi) == m_acc)
585 break;
587 add_phi_arg (&phi, m_acc_arg, back);
591 /* Adjust value of the return at the end of BB according to M and A
592 accumulators. */
594 static void
595 adjust_return_value (basic_block bb, tree m, tree a)
597 tree ret_stmt = last_stmt (bb), ret_var, var, stmt, tmp;
598 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
599 block_stmt_iterator bsi = bsi_last (bb);
601 gcc_assert (TREE_CODE (ret_stmt) == RETURN_EXPR);
603 ret_var = TREE_OPERAND (ret_stmt, 0);
604 if (!ret_var)
605 return;
607 if (TREE_CODE (ret_var) == MODIFY_EXPR)
609 ret_var->common.ann = (tree_ann_t) stmt_ann (ret_stmt);
610 bsi_replace (&bsi, ret_var, true);
611 SSA_NAME_DEF_STMT (TREE_OPERAND (ret_var, 0)) = ret_var;
612 ret_var = TREE_OPERAND (ret_var, 0);
613 ret_stmt = build1 (RETURN_EXPR, TREE_TYPE (ret_stmt), ret_var);
614 bsi_insert_after (&bsi, ret_stmt, BSI_NEW_STMT);
617 if (m)
619 stmt = build (MODIFY_EXPR, ret_type, NULL_TREE,
620 build (MULT_EXPR, ret_type, m_acc, ret_var));
622 tmp = create_tmp_var (ret_type, "acc_tmp");
623 add_referenced_tmp_var (tmp);
625 var = make_ssa_name (tmp, stmt);
626 TREE_OPERAND (stmt, 0) = var;
627 bsi_insert_before (&bsi, stmt, BSI_SAME_STMT);
629 else
630 var = ret_var;
632 if (a)
634 stmt = build (MODIFY_EXPR, ret_type, NULL_TREE,
635 build (PLUS_EXPR, ret_type, a_acc, var));
637 tmp = create_tmp_var (ret_type, "acc_tmp");
638 add_referenced_tmp_var (tmp);
640 var = make_ssa_name (tmp, stmt);
641 TREE_OPERAND (stmt, 0) = var;
642 bsi_insert_before (&bsi, stmt, BSI_SAME_STMT);
645 TREE_OPERAND (ret_stmt, 0) = var;
646 modify_stmt (ret_stmt);
649 /* Eliminates tail call described by T. TMP_VARS is a list of
650 temporary variables used to copy the function arguments. */
652 static void
653 eliminate_tail_call (struct tailcall *t)
655 tree param, stmt, args, rslt, call;
656 basic_block bb, first;
657 edge e;
658 tree phi;
659 stmt_ann_t ann;
660 v_may_def_optype v_may_defs;
661 unsigned i;
662 block_stmt_iterator bsi;
664 stmt = bsi_stmt (t->call_bsi);
665 get_stmt_operands (stmt);
666 ann = stmt_ann (stmt);
667 bb = t->call_block;
669 if (dump_file && (dump_flags & TDF_DETAILS))
671 fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
672 bb->index);
673 print_generic_stmt (dump_file, stmt, TDF_SLIM);
674 fprintf (dump_file, "\n");
677 if (TREE_CODE (stmt) == MODIFY_EXPR)
678 stmt = TREE_OPERAND (stmt, 1);
680 first = ENTRY_BLOCK_PTR->succ->dest;
682 /* Remove the code after call_bsi that will become unreachable. The
683 possibly unreachable code in other blocks is removed later in
684 cfg cleanup. */
685 bsi = t->call_bsi;
686 bsi_next (&bsi);
687 while (!bsi_end_p (bsi))
689 tree t = bsi_stmt (bsi);
690 /* Do not remove the return statement, so that redirect_edge_and_branch
691 sees how the block ends. */
692 if (TREE_CODE (t) == RETURN_EXPR)
693 break;
695 bsi_remove (&bsi);
696 release_defs (t);
699 /* Replace the call by a jump to the start of function. */
700 e = redirect_edge_and_branch (t->call_block->succ, first);
701 gcc_assert (e);
702 PENDING_STMT (e) = NULL_TREE;
704 /* Add phi node entries for arguments. Not every PHI node corresponds to
705 a function argument (there may be PHI nodes for virtual definitions of the
706 eliminated calls), so we search for a PHI corresponding to each argument
707 rather than searching for which argument a PHI node corresponds to. */
709 for (param = DECL_ARGUMENTS (current_function_decl),
710 args = TREE_OPERAND (stmt, 1);
711 param;
712 param = TREE_CHAIN (param),
713 args = TREE_CHAIN (args))
716 for (phi = phi_nodes (first); phi; phi = PHI_CHAIN (phi))
717 if (param == SSA_NAME_VAR (PHI_RESULT (phi)))
718 break;
720 /* The phi node indeed does not have to be there, in case the operand is
721 invariant in the function. */
722 if (!phi)
723 continue;
725 add_phi_arg (&phi, TREE_VALUE (args), e);
728 /* Add phi nodes for the call clobbered variables. */
729 v_may_defs = V_MAY_DEF_OPS (ann);
730 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
732 param = SSA_NAME_VAR (V_MAY_DEF_RESULT (v_may_defs, i));
733 for (phi = phi_nodes (first); phi; phi = PHI_CHAIN (phi))
734 if (param == SSA_NAME_VAR (PHI_RESULT (phi)))
735 break;
737 if (!phi)
739 tree name = var_ann (param)->default_def;
740 tree new_name;
742 if (!name)
744 /* It may happen that the tag does not have a default_def in case
745 when all uses of it are dominated by a MUST_DEF. This however
746 means that it is not necessary to add a phi node for this
747 tag. */
748 continue;
750 new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
752 var_ann (param)->default_def = new_name;
753 phi = create_phi_node (name, first);
754 SSA_NAME_DEF_STMT (name) = phi;
755 add_phi_arg (&phi, new_name, ENTRY_BLOCK_PTR->succ);
757 /* For all calls the same set of variables should be clobbered. This
758 means that there always should be the appropriate phi node except
759 for the first time we eliminate the call. */
760 gcc_assert (!first->pred->pred_next->pred_next);
763 add_phi_arg (&phi, V_MAY_DEF_OP (v_may_defs, i), e);
766 /* Update the values of accumulators. */
767 adjust_accumulator_values (t->call_bsi, t->mult, t->add, e);
769 call = bsi_stmt (t->call_bsi);
770 if (TREE_CODE (call) == MODIFY_EXPR)
772 rslt = TREE_OPERAND (call, 0);
774 /* Result of the call will no longer be defined. So adjust the
775 SSA_NAME_DEF_STMT accordingly. */
776 SSA_NAME_DEF_STMT (rslt) = build_empty_stmt ();
779 bsi_remove (&t->call_bsi);
780 release_defs (call);
783 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
784 mark the tailcalls for the sibcall optimization. */
786 static bool
787 optimize_tail_call (struct tailcall *t, bool opt_tailcalls)
789 if (t->tail_recursion)
791 eliminate_tail_call (t);
792 return true;
795 if (opt_tailcalls)
797 tree stmt = bsi_stmt (t->call_bsi);
799 stmt = get_call_expr_in (stmt);
800 CALL_EXPR_TAILCALL (stmt) = 1;
801 if (dump_file && (dump_flags & TDF_DETAILS))
803 fprintf (dump_file, "Found tail call ");
804 print_generic_expr (dump_file, stmt, dump_flags);
805 fprintf (dump_file, " in bb %i\n", t->call_block->index);
809 return false;
812 /* Optimizes tail calls in the function, turning the tail recursion
813 into iteration. */
815 static void
816 tree_optimize_tail_calls_1 (bool opt_tailcalls)
818 edge e;
819 bool phis_constructed = false;
820 struct tailcall *tailcalls = NULL, *act, *next;
821 bool changed = false;
822 basic_block first = ENTRY_BLOCK_PTR->succ->dest;
823 tree stmt, param, ret_type, tmp, phi;
825 if (!suitable_for_tail_opt_p ())
826 return;
827 if (opt_tailcalls)
828 opt_tailcalls = suitable_for_tail_call_opt_p ();
830 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
832 /* Only traverse the normal exits, i.e. those that end with return
833 statement. */
834 stmt = last_stmt (e->src);
836 if (stmt
837 && TREE_CODE (stmt) == RETURN_EXPR)
838 find_tail_calls (e->src, &tailcalls);
841 /* Construct the phi nodes and accumulators if necessary. */
842 a_acc = m_acc = NULL_TREE;
843 for (act = tailcalls; act; act = act->next)
845 if (!act->tail_recursion)
846 continue;
848 if (!phis_constructed)
850 /* Ensure that there is only one predecessor of the block. */
851 if (first->pred->pred_next)
852 first = split_edge (ENTRY_BLOCK_PTR->succ);
854 /* Copy the args if needed. */
855 for (param = DECL_ARGUMENTS (current_function_decl);
856 param;
857 param = TREE_CHAIN (param))
858 if (var_ann (param)
859 /* Also parameters that are only defined but never used need not
860 be copied. */
861 && (var_ann (param)->default_def
862 && TREE_CODE (var_ann (param)->default_def) == SSA_NAME))
864 tree name = var_ann (param)->default_def;
865 tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
866 tree phi;
868 var_ann (param)->default_def = new_name;
869 phi = create_phi_node (name, first);
870 SSA_NAME_DEF_STMT (name) = phi;
871 add_phi_arg (&phi, new_name, first->pred);
873 phis_constructed = true;
876 if (act->add && !a_acc)
878 ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
880 tmp = create_tmp_var (ret_type, "add_acc");
881 add_referenced_tmp_var (tmp);
883 phi = create_phi_node (tmp, first);
884 add_phi_arg (&phi, build_int_cst (ret_type, 0), first->pred);
885 a_acc = PHI_RESULT (phi);
888 if (act->mult && !m_acc)
890 ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
892 tmp = create_tmp_var (ret_type, "mult_acc");
893 add_referenced_tmp_var (tmp);
895 phi = create_phi_node (tmp, first);
896 add_phi_arg (&phi, build_int_cst (ret_type, 1), first->pred);
897 m_acc = PHI_RESULT (phi);
901 for (; tailcalls; tailcalls = next)
903 next = tailcalls->next;
904 changed |= optimize_tail_call (tailcalls, opt_tailcalls);
905 free (tailcalls);
908 if (a_acc || m_acc)
910 /* Modify the remaining return statements. */
911 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
913 stmt = last_stmt (e->src);
915 if (stmt
916 && TREE_CODE (stmt) == RETURN_EXPR)
917 adjust_return_value (e->src, m_acc, a_acc);
921 if (changed)
923 free_dominance_info (CDI_DOMINATORS);
924 cleanup_tree_cfg ();
928 static void
929 execute_tail_recursion (void)
931 tree_optimize_tail_calls_1 (false);
934 static bool
935 gate_tail_calls (void)
937 return flag_optimize_sibling_calls != 0;
940 static void
941 execute_tail_calls (void)
943 tree_optimize_tail_calls_1 (true);
946 struct tree_opt_pass pass_tail_recursion =
948 "tailr", /* name */
949 NULL, /* gate */
950 execute_tail_recursion, /* execute */
951 NULL, /* sub */
952 NULL, /* next */
953 0, /* static_pass_number */
954 0, /* tv_id */
955 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
956 0, /* properties_provided */
957 0, /* properties_destroyed */
958 0, /* todo_flags_start */
959 TODO_dump_func | TODO_verify_ssa, /* todo_flags_finish */
960 0 /* letter */
963 struct tree_opt_pass pass_tail_calls =
965 "tailc", /* name */
966 gate_tail_calls, /* gate */
967 execute_tail_calls, /* execute */
968 NULL, /* sub */
969 NULL, /* next */
970 0, /* static_pass_number */
971 0, /* tv_id */
972 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
973 0, /* properties_provided */
974 0, /* properties_destroyed */
975 0, /* todo_flags_start */
976 TODO_dump_func | TODO_verify_ssa, /* todo_flags_finish */
977 0 /* letter */