1 /* Tail call optimization on trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
23 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "diagnostic.h"
35 #include "tree-pass.h"
37 #include "langhooks.h"
39 /* The file implements the tail recursion elimination. It is also used to
40 analyze the tail calls in general, passing the results to the rtl level
41 where they are used for sibcall optimization.
43 In addition to the standard tail recursion elimination, we handle the most
44 trivial cases of making the call tail recursive by creating accumulators.
45 For example the following function
50 return n + sum (n - 1);
67 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
68 when we reach the return x statement, we should return a_acc + x * m_acc
69 instead. They are initially initialized to 0 and 1, respectively,
70 so the semantics of the function is obviously preserved. If we are
71 guaranteed that the value of the accumulator never change, we
74 There are three cases how the function may exit. The first one is
75 handled in adjust_return_value, the other two in adjust_accumulator_values
76 (the second case is actually a special case of the third one and we
77 present it separately just for clarity):
79 1) Just return x, where x is not in any of the remaining special shapes.
80 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
82 2) return f (...), where f is the current function, is rewritten in a
83 classical tail-recursion elimination way, into assignment of arguments
84 and jump to the start of the function. Values of the accumulators
87 3) return a + m * f(...), where a and m do not depend on call to f.
88 To preserve the semantics described before we want this to be rewritten
89 in such a way that we finally return
91 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
93 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
94 eliminate the tail call to f. Special cases when the value is just
95 added or just multiplied are obtained by setting a = 0 or m = 1.
97 TODO -- it is possible to do similar tricks for other operations. */
99 /* A structure that describes the tailcall. */
103 /* The block in that the call occur. */
104 basic_block call_block
;
106 /* The iterator pointing to the call statement. */
107 block_stmt_iterator call_bsi
;
109 /* True if it is a call to the current function. */
112 /* The return value of the caller is mult * f + add, where f is the return
113 value of the call. */
116 /* Next tailcall in the chain. */
117 struct tailcall
*next
;
120 /* The variables holding the value of multiplicative and additive
122 static tree m_acc
, a_acc
;
124 static bool suitable_for_tail_opt_p (void);
125 static bool optimize_tail_call (struct tailcall
*, bool);
126 static void eliminate_tail_call (struct tailcall
*);
127 static void find_tail_calls (basic_block
, struct tailcall
**);
129 /* Returns false when the function is not suitable for tail call optimization
130 from some reason (e.g. if it takes variable number of arguments). */
133 suitable_for_tail_opt_p (void)
137 if (current_function_stdarg
)
140 /* No local variable should be call-clobbered. We ignore any kind
141 of memory tag, as these are not real variables. */
142 for (i
= 0; i
< (int) VARRAY_ACTIVE_SIZE (referenced_vars
); i
++)
144 tree var
= VARRAY_TREE (referenced_vars
, i
);
146 if (!(TREE_STATIC (var
) || DECL_EXTERNAL (var
))
147 && var_ann (var
)->mem_tag_kind
== NOT_A_TAG
148 && is_call_clobbered (var
))
154 /* Returns false when the function is not suitable for tail call optimization
155 from some reason (e.g. if it takes variable number of arguments).
156 This test must pass in addition to suitable_for_tail_opt_p in order to make
157 tail call discovery happen. */
160 suitable_for_tail_call_opt_p (void)
164 /* alloca (until we have stack slot life analysis) inhibits
165 sibling call optimizations, but not tail recursion. */
166 if (current_function_calls_alloca
)
169 /* If we are using sjlj exceptions, we may need to add a call to
170 _Unwind_SjLj_Unregister at exit of the function. Which means
171 that we cannot do any sibcall transformations. */
172 if (USING_SJLJ_EXCEPTIONS
&& current_function_has_exception_handlers ())
175 /* Any function that calls setjmp might have longjmp called from
176 any called function. ??? We really should represent this
177 properly in the CFG so that this needn't be special cased. */
178 if (current_function_calls_setjmp
)
181 /* ??? It is OK if the argument of a function is taken in some cases,
182 but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
183 for (param
= DECL_ARGUMENTS (current_function_decl
);
185 param
= TREE_CHAIN (param
))
186 if (TREE_ADDRESSABLE (param
))
192 /* Checks whether the expression EXPR in stmt AT is independent of the
193 statement pointed by BSI (in a sense that we already know EXPR's value
194 at BSI). We use the fact that we are only called from the chain of
195 basic blocks that have only single successor. Returns the expression
196 containing the value of EXPR at BSI. */
199 independent_of_stmt_p (tree expr
, tree at
, block_stmt_iterator bsi
)
201 basic_block bb
, call_bb
, at_bb
;
205 if (is_gimple_min_invariant (expr
))
208 if (TREE_CODE (expr
) != SSA_NAME
)
211 /* Mark the blocks in the chain leading to the end. */
212 at_bb
= bb_for_stmt (at
);
213 call_bb
= bb_for_stmt (bsi_stmt (bsi
));
214 for (bb
= call_bb
; bb
!= at_bb
; bb
= EDGE_SUCC (bb
, 0)->dest
)
220 at
= SSA_NAME_DEF_STMT (expr
);
221 bb
= bb_for_stmt (at
);
223 /* The default definition or defined before the chain. */
229 for (; !bsi_end_p (bsi
); bsi_next (&bsi
))
230 if (bsi_stmt (bsi
) == at
)
233 if (!bsi_end_p (bsi
))
238 if (TREE_CODE (at
) != PHI_NODE
)
244 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
249 expr
= PHI_ARG_DEF_FROM_EDGE (at
, e
);
250 if (TREE_CODE (expr
) != SSA_NAME
)
252 /* The value is a constant. */
257 /* Unmark the blocks. */
258 for (bb
= call_bb
; bb
!= at_bb
; bb
= EDGE_SUCC (bb
, 0)->dest
)
265 /* Simulates the effect of an assignment of ASS in STMT on the return value
266 of the tail recursive CALL passed in ASS_VAR. M and A are the
267 multiplicative and the additive factor for the real return value. */
270 process_assignment (tree ass
, tree stmt
, block_stmt_iterator call
, tree
*m
,
271 tree
*a
, tree
*ass_var
)
273 tree op0
, op1
, non_ass_var
;
274 tree dest
= TREE_OPERAND (ass
, 0);
275 tree src
= TREE_OPERAND (ass
, 1);
276 enum tree_code code
= TREE_CODE (src
);
279 /* See if this is a simple copy operation of an SSA name to the function
280 result. In that case we may have a simple tail call. Ignore type
281 conversions that can never produce extra code between the function
282 call and the function return. */
283 STRIP_NOPS (src_var
);
284 if (TREE_CODE (src_var
) == SSA_NAME
)
286 if (src_var
!= *ass_var
)
293 if (TREE_CODE_CLASS (code
) != tcc_binary
)
296 /* Accumulator optimizations will reverse the order of operations.
297 We can only do that for floating-point types if we're assuming
298 that addition and multiplication are associative. */
299 if (!flag_unsafe_math_optimizations
)
300 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl
))))
303 /* We only handle the code like
310 TODO -- Extend it for cases where the linear transformation of the output
311 is expressed in a more complicated way. */
313 op0
= TREE_OPERAND (src
, 0);
314 op1
= TREE_OPERAND (src
, 1);
317 && (non_ass_var
= independent_of_stmt_p (op1
, stmt
, call
)))
319 else if (op1
== *ass_var
320 && (non_ass_var
= independent_of_stmt_p (op0
, stmt
, call
)))
328 /* There should be no previous addition. TODO -- it should be fairly
329 straightforward to lift this restriction -- just allow storing
330 more complicated expressions in *A, and gimplify it in
331 adjust_accumulator_values. */
339 /* Similar remark applies here. Handling multiplication after addition
340 is just slightly more complicated -- we need to multiply both *A and
348 /* TODO -- Handle other codes (NEGATE_EXPR, MINUS_EXPR). */
355 /* Propagate VAR through phis on edge E. */
358 propagate_through_phis (tree var
, edge e
)
360 basic_block dest
= e
->dest
;
363 for (phi
= phi_nodes (dest
); phi
; phi
= PHI_CHAIN (phi
))
364 if (PHI_ARG_DEF_FROM_EDGE (phi
, e
) == var
)
365 return PHI_RESULT (phi
);
370 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
371 added to the start of RET. */
374 find_tail_calls (basic_block bb
, struct tailcall
**ret
)
376 tree ass_var
, ret_var
, stmt
, func
, param
, args
, call
= NULL_TREE
;
377 block_stmt_iterator bsi
, absi
;
385 if (EDGE_COUNT (bb
->succs
) > 1)
388 for (bsi
= bsi_last (bb
); !bsi_end_p (bsi
); bsi_prev (&bsi
))
390 stmt
= bsi_stmt (bsi
);
393 if (TREE_CODE (stmt
) == LABEL_EXPR
)
396 get_stmt_operands (stmt
);
398 /* Check for a call. */
399 if (TREE_CODE (stmt
) == MODIFY_EXPR
)
401 ass_var
= TREE_OPERAND (stmt
, 0);
402 call
= TREE_OPERAND (stmt
, 1);
403 if (TREE_CODE (call
) == WITH_SIZE_EXPR
)
404 call
= TREE_OPERAND (call
, 0);
412 if (TREE_CODE (call
) == CALL_EXPR
)
415 /* If the statement has virtual or volatile operands, fail. */
416 ann
= stmt_ann (stmt
);
417 if (NUM_V_MAY_DEFS (V_MAY_DEF_OPS (ann
))
418 || NUM_V_MUST_DEFS (V_MUST_DEF_OPS (ann
))
419 || NUM_VUSES (VUSE_OPS (ann
))
420 || ann
->has_volatile_ops
)
427 /* Recurse to the predecessors. */
428 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
429 find_tail_calls (e
->src
, ret
);
434 /* We found the call, check whether it is suitable. */
435 tail_recursion
= false;
436 func
= get_callee_fndecl (call
);
437 if (func
== current_function_decl
)
439 for (param
= DECL_ARGUMENTS (func
), args
= TREE_OPERAND (call
, 1);
441 param
= TREE_CHAIN (param
), args
= TREE_CHAIN (args
))
443 tree arg
= TREE_VALUE (args
);
446 /* Make sure there are no problems with copying. The parameter
447 have a copyable type and the two arguments must have reasonably
448 equivalent types. The latter requirement could be relaxed if
449 we emitted a suitable type conversion statement. */
450 if (!is_gimple_reg_type (TREE_TYPE (param
))
451 || !lang_hooks
.types_compatible_p (TREE_TYPE (param
),
455 /* The parameter should be a real operand, so that phi node
456 created for it at the start of the function has the meaning
457 of copying the value. This test implies is_gimple_reg_type
458 from the previous condition, however this one could be
459 relaxed by being more careful with copying the new value
460 of the parameter (emitting appropriate MODIFY_EXPR and
461 updating the virtual operands). */
462 if (!is_gimple_reg (param
))
467 tail_recursion
= true;
470 /* Now check the statements after the call. None of them has virtual
471 operands, so they may only depend on the call through its return
472 value. The return value should also be dependent on each of them,
473 since we are running after dce. */
483 while (bsi_end_p (absi
))
485 ass_var
= propagate_through_phis (ass_var
, EDGE_SUCC (abb
, 0));
486 abb
= EDGE_SUCC (abb
, 0)->dest
;
487 absi
= bsi_start (abb
);
490 stmt
= bsi_stmt (absi
);
492 if (TREE_CODE (stmt
) == LABEL_EXPR
)
495 if (TREE_CODE (stmt
) == RETURN_EXPR
)
498 if (TREE_CODE (stmt
) != MODIFY_EXPR
)
501 if (!process_assignment (stmt
, stmt
, bsi
, &m
, &a
, &ass_var
))
505 /* See if this is a tail call we can handle. */
506 ret_var
= TREE_OPERAND (stmt
, 0);
508 && TREE_CODE (ret_var
) == MODIFY_EXPR
)
510 tree ret_op
= TREE_OPERAND (ret_var
, 1);
513 && TREE_CODE (ret_op
) != SSA_NAME
)
516 if (!process_assignment (ret_var
, stmt
, bsi
, &m
, &a
, &ass_var
))
518 ret_var
= TREE_OPERAND (ret_var
, 0);
521 /* We may proceed if there either is no return value, or the return value
522 is identical to the call's return. */
524 && (ret_var
!= ass_var
))
527 /* If this is not a tail recursive call, we cannot handle addends or
529 if (!tail_recursion
&& (m
|| a
))
532 nw
= xmalloc (sizeof (struct tailcall
));
537 nw
->tail_recursion
= tail_recursion
;
546 /* Adjust the accumulator values according to A and M after BSI, and update
547 the phi nodes on edge BACK. */
550 adjust_accumulator_values (block_stmt_iterator bsi
, tree m
, tree a
, edge back
)
552 tree stmt
, var
, phi
, tmp
;
553 tree ret_type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
554 tree a_acc_arg
= a_acc
, m_acc_arg
= m_acc
;
560 if (integer_onep (a
))
564 stmt
= build (MODIFY_EXPR
, ret_type
, NULL_TREE
,
565 build (MULT_EXPR
, ret_type
, m_acc
, a
));
567 tmp
= create_tmp_var (ret_type
, "acc_tmp");
568 add_referenced_tmp_var (tmp
);
570 var
= make_ssa_name (tmp
, stmt
);
571 TREE_OPERAND (stmt
, 0) = var
;
572 bsi_insert_after (&bsi
, stmt
, BSI_NEW_STMT
);
578 stmt
= build (MODIFY_EXPR
, ret_type
, NULL_TREE
,
579 build (PLUS_EXPR
, ret_type
, a_acc
, var
));
580 var
= make_ssa_name (SSA_NAME_VAR (a_acc
), stmt
);
581 TREE_OPERAND (stmt
, 0) = var
;
582 bsi_insert_after (&bsi
, stmt
, BSI_NEW_STMT
);
588 stmt
= build (MODIFY_EXPR
, ret_type
, NULL_TREE
,
589 build (MULT_EXPR
, ret_type
, m_acc
, m
));
590 var
= make_ssa_name (SSA_NAME_VAR (m_acc
), stmt
);
591 TREE_OPERAND (stmt
, 0) = var
;
592 bsi_insert_after (&bsi
, stmt
, BSI_NEW_STMT
);
598 for (phi
= phi_nodes (back
->dest
); phi
; phi
= PHI_CHAIN (phi
))
599 if (PHI_RESULT (phi
) == a_acc
)
602 add_phi_arg (phi
, a_acc_arg
, back
);
607 for (phi
= phi_nodes (back
->dest
); phi
; phi
= PHI_CHAIN (phi
))
608 if (PHI_RESULT (phi
) == m_acc
)
611 add_phi_arg (phi
, m_acc_arg
, back
);
615 /* Adjust value of the return at the end of BB according to M and A
619 adjust_return_value (basic_block bb
, tree m
, tree a
)
621 tree ret_stmt
= last_stmt (bb
), ret_var
, var
, stmt
, tmp
;
622 tree ret_type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
623 block_stmt_iterator bsi
= bsi_last (bb
);
625 gcc_assert (TREE_CODE (ret_stmt
) == RETURN_EXPR
);
627 ret_var
= TREE_OPERAND (ret_stmt
, 0);
631 if (TREE_CODE (ret_var
) == MODIFY_EXPR
)
633 ret_var
->common
.ann
= (tree_ann_t
) stmt_ann (ret_stmt
);
634 bsi_replace (&bsi
, ret_var
, true);
635 SSA_NAME_DEF_STMT (TREE_OPERAND (ret_var
, 0)) = ret_var
;
636 ret_var
= TREE_OPERAND (ret_var
, 0);
637 ret_stmt
= build1 (RETURN_EXPR
, TREE_TYPE (ret_stmt
), ret_var
);
638 bsi_insert_after (&bsi
, ret_stmt
, BSI_NEW_STMT
);
643 stmt
= build (MODIFY_EXPR
, ret_type
, NULL_TREE
,
644 build (MULT_EXPR
, ret_type
, m_acc
, ret_var
));
646 tmp
= create_tmp_var (ret_type
, "acc_tmp");
647 add_referenced_tmp_var (tmp
);
649 var
= make_ssa_name (tmp
, stmt
);
650 TREE_OPERAND (stmt
, 0) = var
;
651 bsi_insert_before (&bsi
, stmt
, BSI_SAME_STMT
);
658 stmt
= build (MODIFY_EXPR
, ret_type
, NULL_TREE
,
659 build (PLUS_EXPR
, ret_type
, a_acc
, var
));
661 tmp
= create_tmp_var (ret_type
, "acc_tmp");
662 add_referenced_tmp_var (tmp
);
664 var
= make_ssa_name (tmp
, stmt
);
665 TREE_OPERAND (stmt
, 0) = var
;
666 bsi_insert_before (&bsi
, stmt
, BSI_SAME_STMT
);
669 TREE_OPERAND (ret_stmt
, 0) = var
;
670 modify_stmt (ret_stmt
);
673 /* Eliminates tail call described by T. TMP_VARS is a list of
674 temporary variables used to copy the function arguments. */
677 eliminate_tail_call (struct tailcall
*t
)
679 tree param
, stmt
, args
, rslt
, call
;
680 basic_block bb
, first
;
684 v_may_def_optype v_may_defs
;
686 block_stmt_iterator bsi
;
688 stmt
= bsi_stmt (t
->call_bsi
);
689 get_stmt_operands (stmt
);
690 ann
= stmt_ann (stmt
);
693 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
695 fprintf (dump_file
, "Eliminated tail recursion in bb %d : ",
697 print_generic_stmt (dump_file
, stmt
, TDF_SLIM
);
698 fprintf (dump_file
, "\n");
701 if (TREE_CODE (stmt
) == MODIFY_EXPR
)
702 stmt
= TREE_OPERAND (stmt
, 1);
704 first
= EDGE_SUCC (ENTRY_BLOCK_PTR
, 0)->dest
;
706 /* Remove the code after call_bsi that will become unreachable. The
707 possibly unreachable code in other blocks is removed later in
711 while (!bsi_end_p (bsi
))
713 tree t
= bsi_stmt (bsi
);
714 /* Do not remove the return statement, so that redirect_edge_and_branch
715 sees how the block ends. */
716 if (TREE_CODE (t
) == RETURN_EXPR
)
723 /* Replace the call by a jump to the start of function. */
724 e
= redirect_edge_and_branch (EDGE_SUCC (t
->call_block
, 0), first
);
726 PENDING_STMT (e
) = NULL_TREE
;
728 /* Add phi node entries for arguments. Not every PHI node corresponds to
729 a function argument (there may be PHI nodes for virtual definitions of the
730 eliminated calls), so we search for a PHI corresponding to each argument
731 rather than searching for which argument a PHI node corresponds to. */
733 for (param
= DECL_ARGUMENTS (current_function_decl
),
734 args
= TREE_OPERAND (stmt
, 1);
736 param
= TREE_CHAIN (param
),
737 args
= TREE_CHAIN (args
))
740 for (phi
= phi_nodes (first
); phi
; phi
= PHI_CHAIN (phi
))
741 if (param
== SSA_NAME_VAR (PHI_RESULT (phi
)))
744 /* The phi node indeed does not have to be there, in case the operand is
745 invariant in the function. */
749 add_phi_arg (phi
, TREE_VALUE (args
), e
);
752 /* Add phi nodes for the call clobbered variables. */
753 v_may_defs
= V_MAY_DEF_OPS (ann
);
754 for (i
= 0; i
< NUM_V_MAY_DEFS (v_may_defs
); i
++)
756 param
= SSA_NAME_VAR (V_MAY_DEF_RESULT (v_may_defs
, i
));
757 for (phi
= phi_nodes (first
); phi
; phi
= PHI_CHAIN (phi
))
758 if (param
== SSA_NAME_VAR (PHI_RESULT (phi
)))
763 tree name
= var_ann (param
)->default_def
;
768 /* It may happen that the tag does not have a default_def in case
769 when all uses of it are dominated by a MUST_DEF. This however
770 means that it is not necessary to add a phi node for this
774 new_name
= make_ssa_name (param
, SSA_NAME_DEF_STMT (name
));
776 var_ann (param
)->default_def
= new_name
;
777 phi
= create_phi_node (name
, first
);
778 SSA_NAME_DEF_STMT (name
) = phi
;
779 add_phi_arg (phi
, new_name
, EDGE_SUCC (ENTRY_BLOCK_PTR
, 0));
781 /* For all calls the same set of variables should be clobbered. This
782 means that there always should be the appropriate phi node except
783 for the first time we eliminate the call. */
784 gcc_assert (EDGE_COUNT (first
->preds
) <= 2);
787 add_phi_arg (phi
, V_MAY_DEF_OP (v_may_defs
, i
), e
);
790 /* Update the values of accumulators. */
791 adjust_accumulator_values (t
->call_bsi
, t
->mult
, t
->add
, e
);
793 call
= bsi_stmt (t
->call_bsi
);
794 if (TREE_CODE (call
) == MODIFY_EXPR
)
796 rslt
= TREE_OPERAND (call
, 0);
798 /* Result of the call will no longer be defined. So adjust the
799 SSA_NAME_DEF_STMT accordingly. */
800 SSA_NAME_DEF_STMT (rslt
) = build_empty_stmt ();
803 bsi_remove (&t
->call_bsi
);
807 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
808 mark the tailcalls for the sibcall optimization. */
811 optimize_tail_call (struct tailcall
*t
, bool opt_tailcalls
)
813 if (t
->tail_recursion
)
815 eliminate_tail_call (t
);
821 tree stmt
= bsi_stmt (t
->call_bsi
);
823 stmt
= get_call_expr_in (stmt
);
824 CALL_EXPR_TAILCALL (stmt
) = 1;
825 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
827 fprintf (dump_file
, "Found tail call ");
828 print_generic_expr (dump_file
, stmt
, dump_flags
);
829 fprintf (dump_file
, " in bb %i\n", t
->call_block
->index
);
836 /* Optimizes tail calls in the function, turning the tail recursion
840 tree_optimize_tail_calls_1 (bool opt_tailcalls
)
843 bool phis_constructed
= false;
844 struct tailcall
*tailcalls
= NULL
, *act
, *next
;
845 bool changed
= false;
846 basic_block first
= EDGE_SUCC (ENTRY_BLOCK_PTR
, 0)->dest
;
847 tree stmt
, param
, ret_type
, tmp
, phi
;
850 if (!suitable_for_tail_opt_p ())
853 opt_tailcalls
= suitable_for_tail_call_opt_p ();
855 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
857 /* Only traverse the normal exits, i.e. those that end with return
859 stmt
= last_stmt (e
->src
);
862 && TREE_CODE (stmt
) == RETURN_EXPR
)
863 find_tail_calls (e
->src
, &tailcalls
);
866 /* Construct the phi nodes and accumulators if necessary. */
867 a_acc
= m_acc
= NULL_TREE
;
868 for (act
= tailcalls
; act
; act
= act
->next
)
870 if (!act
->tail_recursion
)
873 if (!phis_constructed
)
875 /* Ensure that there is only one predecessor of the block. */
876 if (EDGE_COUNT (first
->preds
) > 1)
877 first
= split_edge (EDGE_SUCC (ENTRY_BLOCK_PTR
, 0));
879 /* Copy the args if needed. */
880 for (param
= DECL_ARGUMENTS (current_function_decl
);
882 param
= TREE_CHAIN (param
))
883 if (is_gimple_reg (param
)
885 /* Also parameters that are only defined but never used need not
887 && (var_ann (param
)->default_def
888 && TREE_CODE (var_ann (param
)->default_def
) == SSA_NAME
))
890 tree name
= var_ann (param
)->default_def
;
891 tree new_name
= make_ssa_name (param
, SSA_NAME_DEF_STMT (name
));
894 var_ann (param
)->default_def
= new_name
;
895 phi
= create_phi_node (name
, first
);
896 SSA_NAME_DEF_STMT (name
) = phi
;
897 add_phi_arg (phi
, new_name
, EDGE_PRED (first
, 0));
899 phis_constructed
= true;
902 if (act
->add
&& !a_acc
)
904 ret_type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
906 tmp
= create_tmp_var (ret_type
, "add_acc");
907 add_referenced_tmp_var (tmp
);
909 phi
= create_phi_node (tmp
, first
);
911 /* RET_TYPE can be a float when -ffast-maths is
913 fold_convert (ret_type
, integer_zero_node
),
914 EDGE_PRED (first
, 0));
915 a_acc
= PHI_RESULT (phi
);
918 if (act
->mult
&& !m_acc
)
920 ret_type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
922 tmp
= create_tmp_var (ret_type
, "mult_acc");
923 add_referenced_tmp_var (tmp
);
925 phi
= create_phi_node (tmp
, first
);
927 /* RET_TYPE can be a float when -ffast-maths is
929 fold_convert (ret_type
, integer_one_node
),
930 EDGE_PRED (first
, 0));
931 m_acc
= PHI_RESULT (phi
);
935 for (; tailcalls
; tailcalls
= next
)
937 next
= tailcalls
->next
;
938 changed
|= optimize_tail_call (tailcalls
, opt_tailcalls
);
944 /* Modify the remaining return statements. */
945 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
947 stmt
= last_stmt (e
->src
);
950 && TREE_CODE (stmt
) == RETURN_EXPR
)
951 adjust_return_value (e
->src
, m_acc
, a_acc
);
957 free_dominance_info (CDI_DOMINATORS
);
963 execute_tail_recursion (void)
965 tree_optimize_tail_calls_1 (false);
969 gate_tail_calls (void)
971 return flag_optimize_sibling_calls
!= 0;
975 execute_tail_calls (void)
977 tree_optimize_tail_calls_1 (true);
980 struct tree_opt_pass pass_tail_recursion
=
984 execute_tail_recursion
, /* execute */
987 0, /* static_pass_number */
989 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
990 0, /* properties_provided */
991 0, /* properties_destroyed */
992 0, /* todo_flags_start */
993 TODO_dump_func
| TODO_verify_ssa
, /* todo_flags_finish */
997 struct tree_opt_pass pass_tail_calls
=
1000 gate_tail_calls
, /* gate */
1001 execute_tail_calls
, /* execute */
1004 0, /* static_pass_number */
1006 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
1007 0, /* properties_provided */
1008 0, /* properties_destroyed */
1009 0, /* todo_flags_start */
1010 TODO_dump_func
| TODO_verify_ssa
, /* todo_flags_finish */