1 /* Tail call optimization on trees.
2 Copyright (C) 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
30 #include "tree-flow.h"
31 #include "tree-dump.h"
32 #include "diagnostic.h"
34 #include "tree-pass.h"
36 #include "langhooks.h"
38 /* The file implements the tail recursion elimination. It is also used to
39 analyze the tail calls in general, passing the results to the rtl level
40 where they are used for sibcall optimization.
42 In addition to the standard tail recursion elimination, we handle the most
43 trivial cases of making the call tail recursive by creating accumulators.
44 For example the following function
49 return n + sum (n - 1);
66 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
67 when we reach the return x statement, we should return a_acc + x * m_acc
68 instead. They are initially initialized to 0 and 1, respectively,
69 so the semantics of the function is obviously preserved. If we are
70 guaranteed that the value of the accumulator never change, we
73 There are three cases how the function may exit. The first one is
74 handled in adjust_return_value, the other two in adjust_accumulator_values
75 (the second case is actually a special case of the third one and we
76 present it separately just for clarity):
78 1) Just return x, where x is not in any of the remaining special shapes.
79 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
81 2) return f (...), where f is the current function, is rewritten in a
82 classical tail-recursion elimination way, into assignment of arguments
83 and jump to the start of the function. Values of the accumulators
86 3) return a + m * f(...), where a and m do not depend on call to f.
87 To preserve the semantics described before we want this to be rewritten
88 in such a way that we finally return
90 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
92 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
93 eliminate the tail call to f. Special cases when the value is just
94 added or just multiplied are obtained by setting a = 0 or m = 1.
96 TODO -- it is possible to do similar tricks for other operations. */
98 /* A structure that describes the tailcall. */
102 /* The block in that the call occur. */
103 basic_block call_block
;
105 /* The iterator pointing to the call statement. */
106 block_stmt_iterator call_bsi
;
108 /* True if it is a call to the current function. */
111 /* The return value of the caller is mult * f + add, where f is the return
112 value of the call. */
115 /* Next tailcall in the chain. */
116 struct tailcall
*next
;
119 /* The variables holding the value of multiplicative and additive
121 static tree m_acc
, a_acc
;
123 static bool suitable_for_tail_opt_p (void);
124 static bool optimize_tail_call (struct tailcall
*, bool);
125 static void eliminate_tail_call (struct tailcall
*);
126 static void find_tail_calls (basic_block
, struct tailcall
**);
128 /* Returns false when the function is not suitable for tail call optimization
129 from some reason (e.g. if it takes variable number of arguments). */
132 suitable_for_tail_opt_p (void)
134 referenced_var_iterator rvi
;
137 if (current_function_stdarg
)
140 /* No local variable nor structure field should be call-clobbered. We
141 ignore any kind of memory tag, as these are not real variables. */
143 FOR_EACH_REFERENCED_VAR (var
, rvi
)
146 if (!is_global_var (var
)
147 && (!MTAG_P (var
) || TREE_CODE (var
) == STRUCT_FIELD_TAG
)
148 && is_call_clobbered (var
))
154 /* Returns false when the function is not suitable for tail call optimization
155 from some reason (e.g. if it takes variable number of arguments).
156 This test must pass in addition to suitable_for_tail_opt_p in order to make
157 tail call discovery happen. */
160 suitable_for_tail_call_opt_p (void)
164 /* alloca (until we have stack slot life analysis) inhibits
165 sibling call optimizations, but not tail recursion. */
166 if (current_function_calls_alloca
)
169 /* If we are using sjlj exceptions, we may need to add a call to
170 _Unwind_SjLj_Unregister at exit of the function. Which means
171 that we cannot do any sibcall transformations. */
172 if (USING_SJLJ_EXCEPTIONS
&& current_function_has_exception_handlers ())
175 /* Any function that calls setjmp might have longjmp called from
176 any called function. ??? We really should represent this
177 properly in the CFG so that this needn't be special cased. */
178 if (current_function_calls_setjmp
)
181 /* ??? It is OK if the argument of a function is taken in some cases,
182 but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
183 for (param
= DECL_ARGUMENTS (current_function_decl
);
185 param
= TREE_CHAIN (param
))
186 if (TREE_ADDRESSABLE (param
))
192 /* Checks whether the expression EXPR in stmt AT is independent of the
193 statement pointed to by BSI (in a sense that we already know EXPR's value
194 at BSI). We use the fact that we are only called from the chain of
195 basic blocks that have only single successor. Returns the expression
196 containing the value of EXPR at BSI. */
199 independent_of_stmt_p (tree expr
, tree at
, block_stmt_iterator bsi
)
201 basic_block bb
, call_bb
, at_bb
;
205 if (is_gimple_min_invariant (expr
))
208 if (TREE_CODE (expr
) != SSA_NAME
)
211 /* Mark the blocks in the chain leading to the end. */
212 at_bb
= bb_for_stmt (at
);
213 call_bb
= bb_for_stmt (bsi_stmt (bsi
));
214 for (bb
= call_bb
; bb
!= at_bb
; bb
= single_succ (bb
))
220 at
= SSA_NAME_DEF_STMT (expr
);
221 bb
= bb_for_stmt (at
);
223 /* The default definition or defined before the chain. */
229 for (; !bsi_end_p (bsi
); bsi_next (&bsi
))
230 if (bsi_stmt (bsi
) == at
)
233 if (!bsi_end_p (bsi
))
238 if (TREE_CODE (at
) != PHI_NODE
)
244 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
249 expr
= PHI_ARG_DEF_FROM_EDGE (at
, e
);
250 if (TREE_CODE (expr
) != SSA_NAME
)
252 /* The value is a constant. */
257 /* Unmark the blocks. */
258 for (bb
= call_bb
; bb
!= at_bb
; bb
= single_succ (bb
))
265 /* Simulates the effect of an assignment of ASS in STMT on the return value
266 of the tail recursive CALL passed in ASS_VAR. M and A are the
267 multiplicative and the additive factor for the real return value. */
270 process_assignment (tree ass
, tree stmt
, block_stmt_iterator call
, tree
*m
,
271 tree
*a
, tree
*ass_var
)
273 tree op0
, op1
, non_ass_var
;
274 tree dest
= TREE_OPERAND (ass
, 0);
275 tree src
= TREE_OPERAND (ass
, 1);
276 enum tree_code code
= TREE_CODE (src
);
279 /* See if this is a simple copy operation of an SSA name to the function
280 result. In that case we may have a simple tail call. Ignore type
281 conversions that can never produce extra code between the function
282 call and the function return. */
283 STRIP_NOPS (src_var
);
284 if (TREE_CODE (src_var
) == SSA_NAME
)
286 if (src_var
!= *ass_var
)
293 if (TREE_CODE_CLASS (code
) != tcc_binary
)
296 /* Accumulator optimizations will reverse the order of operations.
297 We can only do that for floating-point types if we're assuming
298 that addition and multiplication are associative. */
299 if (!flag_unsafe_math_optimizations
)
300 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl
))))
303 /* We only handle the code like
310 TODO -- Extend it for cases where the linear transformation of the output
311 is expressed in a more complicated way. */
313 op0
= TREE_OPERAND (src
, 0);
314 op1
= TREE_OPERAND (src
, 1);
317 && (non_ass_var
= independent_of_stmt_p (op1
, stmt
, call
)))
319 else if (op1
== *ass_var
320 && (non_ass_var
= independent_of_stmt_p (op0
, stmt
, call
)))
328 /* There should be no previous addition. TODO -- it should be fairly
329 straightforward to lift this restriction -- just allow storing
330 more complicated expressions in *A, and gimplify it in
331 adjust_accumulator_values. */
339 /* Similar remark applies here. Handling multiplication after addition
340 is just slightly more complicated -- we need to multiply both *A and
348 /* TODO -- Handle other codes (NEGATE_EXPR, MINUS_EXPR). */
355 /* Propagate VAR through phis on edge E. */
358 propagate_through_phis (tree var
, edge e
)
360 basic_block dest
= e
->dest
;
363 for (phi
= phi_nodes (dest
); phi
; phi
= PHI_CHAIN (phi
))
364 if (PHI_ARG_DEF_FROM_EDGE (phi
, e
) == var
)
365 return PHI_RESULT (phi
);
370 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
371 added to the start of RET. */
374 find_tail_calls (basic_block bb
, struct tailcall
**ret
)
376 tree ass_var
, ret_var
, stmt
, func
, param
, args
, call
= NULL_TREE
;
377 block_stmt_iterator bsi
, absi
;
385 if (!single_succ_p (bb
))
388 for (bsi
= bsi_last (bb
); !bsi_end_p (bsi
); bsi_prev (&bsi
))
390 stmt
= bsi_stmt (bsi
);
393 if (TREE_CODE (stmt
) == LABEL_EXPR
)
396 /* Check for a call. */
397 if (TREE_CODE (stmt
) == MODIFY_EXPR
)
399 ass_var
= TREE_OPERAND (stmt
, 0);
400 call
= TREE_OPERAND (stmt
, 1);
401 if (TREE_CODE (call
) == WITH_SIZE_EXPR
)
402 call
= TREE_OPERAND (call
, 0);
410 if (TREE_CODE (call
) == CALL_EXPR
)
413 /* If the statement has virtual or volatile operands, fail. */
414 ann
= stmt_ann (stmt
);
415 if (!ZERO_SSA_OPERANDS (stmt
, (SSA_OP_VUSE
| SSA_OP_VIRTUAL_DEFS
))
416 || ann
->has_volatile_ops
)
423 /* Recurse to the predecessors. */
424 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
425 find_tail_calls (e
->src
, ret
);
430 /* We found the call, check whether it is suitable. */
431 tail_recursion
= false;
432 func
= get_callee_fndecl (call
);
433 if (func
== current_function_decl
)
435 for (param
= DECL_ARGUMENTS (func
), args
= TREE_OPERAND (call
, 1);
437 param
= TREE_CHAIN (param
), args
= TREE_CHAIN (args
))
439 tree arg
= TREE_VALUE (args
);
442 /* Make sure there are no problems with copying. The parameter
443 have a copyable type and the two arguments must have reasonably
444 equivalent types. The latter requirement could be relaxed if
445 we emitted a suitable type conversion statement. */
446 if (!is_gimple_reg_type (TREE_TYPE (param
))
447 || !lang_hooks
.types_compatible_p (TREE_TYPE (param
),
451 /* The parameter should be a real operand, so that phi node
452 created for it at the start of the function has the meaning
453 of copying the value. This test implies is_gimple_reg_type
454 from the previous condition, however this one could be
455 relaxed by being more careful with copying the new value
456 of the parameter (emitting appropriate MODIFY_EXPR and
457 updating the virtual operands). */
458 if (!is_gimple_reg (param
))
463 tail_recursion
= true;
466 /* Now check the statements after the call. None of them has virtual
467 operands, so they may only depend on the call through its return
468 value. The return value should also be dependent on each of them,
469 since we are running after dce. */
479 while (bsi_end_p (absi
))
481 ass_var
= propagate_through_phis (ass_var
, single_succ_edge (abb
));
482 abb
= single_succ (abb
);
483 absi
= bsi_start (abb
);
486 stmt
= bsi_stmt (absi
);
488 if (TREE_CODE (stmt
) == LABEL_EXPR
)
491 if (TREE_CODE (stmt
) == RETURN_EXPR
)
494 if (TREE_CODE (stmt
) != MODIFY_EXPR
)
497 if (!process_assignment (stmt
, stmt
, bsi
, &m
, &a
, &ass_var
))
501 /* See if this is a tail call we can handle. */
502 ret_var
= TREE_OPERAND (stmt
, 0);
504 && TREE_CODE (ret_var
) == MODIFY_EXPR
)
506 tree ret_op
= TREE_OPERAND (ret_var
, 1);
509 && TREE_CODE (ret_op
) != SSA_NAME
)
512 if (!process_assignment (ret_var
, stmt
, bsi
, &m
, &a
, &ass_var
))
514 ret_var
= TREE_OPERAND (ret_var
, 0);
517 /* We may proceed if there either is no return value, or the return value
518 is identical to the call's return. */
520 && (ret_var
!= ass_var
))
523 /* If this is not a tail recursive call, we cannot handle addends or
525 if (!tail_recursion
&& (m
|| a
))
528 nw
= XNEW (struct tailcall
);
533 nw
->tail_recursion
= tail_recursion
;
542 /* Adjust the accumulator values according to A and M after BSI, and update
543 the phi nodes on edge BACK. */
546 adjust_accumulator_values (block_stmt_iterator bsi
, tree m
, tree a
, edge back
)
548 tree stmt
, var
, phi
, tmp
;
549 tree ret_type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
550 tree a_acc_arg
= a_acc
, m_acc_arg
= m_acc
;
556 if (integer_onep (a
))
560 stmt
= build2 (MODIFY_EXPR
, ret_type
, NULL_TREE
,
561 build2 (MULT_EXPR
, ret_type
, m_acc
, a
));
563 tmp
= create_tmp_var (ret_type
, "acc_tmp");
564 add_referenced_var (tmp
);
566 var
= make_ssa_name (tmp
, stmt
);
567 TREE_OPERAND (stmt
, 0) = var
;
568 bsi_insert_after (&bsi
, stmt
, BSI_NEW_STMT
);
574 stmt
= build2 (MODIFY_EXPR
, ret_type
, NULL_TREE
,
575 build2 (PLUS_EXPR
, ret_type
, a_acc
, var
));
576 var
= make_ssa_name (SSA_NAME_VAR (a_acc
), stmt
);
577 TREE_OPERAND (stmt
, 0) = var
;
578 bsi_insert_after (&bsi
, stmt
, BSI_NEW_STMT
);
584 stmt
= build2 (MODIFY_EXPR
, ret_type
, NULL_TREE
,
585 build2 (MULT_EXPR
, ret_type
, m_acc
, m
));
586 var
= make_ssa_name (SSA_NAME_VAR (m_acc
), stmt
);
587 TREE_OPERAND (stmt
, 0) = var
;
588 bsi_insert_after (&bsi
, stmt
, BSI_NEW_STMT
);
594 for (phi
= phi_nodes (back
->dest
); phi
; phi
= PHI_CHAIN (phi
))
595 if (PHI_RESULT (phi
) == a_acc
)
598 add_phi_arg (phi
, a_acc_arg
, back
);
603 for (phi
= phi_nodes (back
->dest
); phi
; phi
= PHI_CHAIN (phi
))
604 if (PHI_RESULT (phi
) == m_acc
)
607 add_phi_arg (phi
, m_acc_arg
, back
);
611 /* Adjust value of the return at the end of BB according to M and A
615 adjust_return_value (basic_block bb
, tree m
, tree a
)
617 tree ret_stmt
= last_stmt (bb
), ret_var
, var
, stmt
, tmp
;
618 tree ret_type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
619 block_stmt_iterator bsi
= bsi_last (bb
);
621 gcc_assert (TREE_CODE (ret_stmt
) == RETURN_EXPR
);
623 ret_var
= TREE_OPERAND (ret_stmt
, 0);
627 if (TREE_CODE (ret_var
) == MODIFY_EXPR
)
629 ret_var
->common
.ann
= (tree_ann_t
) stmt_ann (ret_stmt
);
630 bsi_replace (&bsi
, ret_var
, true);
631 SSA_NAME_DEF_STMT (TREE_OPERAND (ret_var
, 0)) = ret_var
;
632 ret_var
= TREE_OPERAND (ret_var
, 0);
633 ret_stmt
= build1 (RETURN_EXPR
, TREE_TYPE (ret_stmt
), ret_var
);
634 bsi_insert_after (&bsi
, ret_stmt
, BSI_NEW_STMT
);
639 stmt
= build2 (MODIFY_EXPR
, ret_type
, NULL_TREE
,
640 build2 (MULT_EXPR
, ret_type
, m_acc
, ret_var
));
642 tmp
= create_tmp_var (ret_type
, "acc_tmp");
643 add_referenced_var (tmp
);
645 var
= make_ssa_name (tmp
, stmt
);
646 TREE_OPERAND (stmt
, 0) = var
;
647 bsi_insert_before (&bsi
, stmt
, BSI_SAME_STMT
);
654 stmt
= build2 (MODIFY_EXPR
, ret_type
, NULL_TREE
,
655 build2 (PLUS_EXPR
, ret_type
, a_acc
, var
));
657 tmp
= create_tmp_var (ret_type
, "acc_tmp");
658 add_referenced_var (tmp
);
660 var
= make_ssa_name (tmp
, stmt
);
661 TREE_OPERAND (stmt
, 0) = var
;
662 bsi_insert_before (&bsi
, stmt
, BSI_SAME_STMT
);
665 TREE_OPERAND (ret_stmt
, 0) = var
;
666 update_stmt (ret_stmt
);
669 /* Subtract COUNT and FREQUENCY from the basic block and it's
672 decrease_profile (basic_block bb
, gcov_type count
, int frequency
)
678 bb
->frequency
-= frequency
;
679 if (bb
->frequency
< 0)
681 if (!single_succ_p (bb
))
683 gcc_assert (!EDGE_COUNT (bb
->succs
));
686 e
= single_succ_edge (bb
);
692 /* Returns true if argument PARAM of the tail recursive call needs to be copied
693 when the call is eliminated. */
696 arg_needs_copy_p (tree param
)
700 if (!is_gimple_reg (param
) || !var_ann (param
))
703 /* Parameters that are only defined but never used need not be copied. */
704 def
= default_def (param
);
711 /* Eliminates tail call described by T. TMP_VARS is a list of
712 temporary variables used to copy the function arguments. */
715 eliminate_tail_call (struct tailcall
*t
)
717 tree param
, stmt
, args
, rslt
, call
;
718 basic_block bb
, first
;
721 block_stmt_iterator bsi
;
724 stmt
= orig_stmt
= bsi_stmt (t
->call_bsi
);
727 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
729 fprintf (dump_file
, "Eliminated tail recursion in bb %d : ",
731 print_generic_stmt (dump_file
, stmt
, TDF_SLIM
);
732 fprintf (dump_file
, "\n");
735 if (TREE_CODE (stmt
) == MODIFY_EXPR
)
736 stmt
= TREE_OPERAND (stmt
, 1);
738 first
= single_succ (ENTRY_BLOCK_PTR
);
740 /* Remove the code after call_bsi that will become unreachable. The
741 possibly unreachable code in other blocks is removed later in
745 while (!bsi_end_p (bsi
))
747 tree t
= bsi_stmt (bsi
);
748 /* Do not remove the return statement, so that redirect_edge_and_branch
749 sees how the block ends. */
750 if (TREE_CODE (t
) == RETURN_EXPR
)
753 bsi_remove (&bsi
, true);
757 /* Number of executions of function has reduced by the tailcall. */
758 e
= single_succ_edge (t
->call_block
);
759 decrease_profile (EXIT_BLOCK_PTR
, e
->count
, EDGE_FREQUENCY (e
));
760 decrease_profile (ENTRY_BLOCK_PTR
, e
->count
, EDGE_FREQUENCY (e
));
761 if (e
->dest
!= EXIT_BLOCK_PTR
)
762 decrease_profile (e
->dest
, e
->count
, EDGE_FREQUENCY (e
));
764 /* Replace the call by a jump to the start of function. */
765 e
= redirect_edge_and_branch (single_succ_edge (t
->call_block
), first
);
767 PENDING_STMT (e
) = NULL_TREE
;
769 /* Add phi node entries for arguments. The ordering of the phi nodes should
770 be the same as the ordering of the arguments. */
771 for (param
= DECL_ARGUMENTS (current_function_decl
),
772 args
= TREE_OPERAND (stmt
, 1),
773 phi
= phi_nodes (first
);
775 param
= TREE_CHAIN (param
),
776 args
= TREE_CHAIN (args
))
778 if (!arg_needs_copy_p (param
))
780 gcc_assert (param
== SSA_NAME_VAR (PHI_RESULT (phi
)));
782 add_phi_arg (phi
, TREE_VALUE (args
), e
);
783 phi
= PHI_CHAIN (phi
);
786 /* Update the values of accumulators. */
787 adjust_accumulator_values (t
->call_bsi
, t
->mult
, t
->add
, e
);
789 call
= bsi_stmt (t
->call_bsi
);
790 if (TREE_CODE (call
) == MODIFY_EXPR
)
792 rslt
= TREE_OPERAND (call
, 0);
794 /* Result of the call will no longer be defined. So adjust the
795 SSA_NAME_DEF_STMT accordingly. */
796 SSA_NAME_DEF_STMT (rslt
) = build_empty_stmt ();
799 bsi_remove (&t
->call_bsi
, true);
803 /* Add phi nodes for the virtual operands defined in the function to the
804 header of the loop created by tail recursion elimination.
806 Originally, we used to add phi nodes only for call clobbered variables,
807 as the value of the non-call clobbered ones obviously cannot be used
808 or changed within the recursive call. However, the local variables
809 from multiple calls now share the same location, so the virtual ssa form
810 requires us to say that the location dies on further iterations of the loop,
811 which requires adding phi nodes.
814 add_virtual_phis (void)
816 referenced_var_iterator rvi
;
819 /* The problematic part is that there is no way how to know what
820 to put into phi nodes (there in fact does not have to be such
821 ssa name available). A solution would be to have an artificial
822 use/kill for all virtual operands in EXIT node. Unless we have
823 this, we cannot do much better than to rebuild the ssa form for
824 possibly affected virtual ssa names from scratch. */
826 FOR_EACH_REFERENCED_VAR (var
, rvi
)
828 if (!is_gimple_reg (var
) && default_def (var
) != NULL_TREE
)
829 mark_sym_for_renaming (var
);
832 update_ssa (TODO_update_ssa_only_virtuals
);
835 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
836 mark the tailcalls for the sibcall optimization. */
839 optimize_tail_call (struct tailcall
*t
, bool opt_tailcalls
)
841 if (t
->tail_recursion
)
843 eliminate_tail_call (t
);
849 tree stmt
= bsi_stmt (t
->call_bsi
);
851 stmt
= get_call_expr_in (stmt
);
852 CALL_EXPR_TAILCALL (stmt
) = 1;
853 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
855 fprintf (dump_file
, "Found tail call ");
856 print_generic_expr (dump_file
, stmt
, dump_flags
);
857 fprintf (dump_file
, " in bb %i\n", t
->call_block
->index
);
864 /* Optimizes tail calls in the function, turning the tail recursion
868 tree_optimize_tail_calls_1 (bool opt_tailcalls
)
871 bool phis_constructed
= false;
872 struct tailcall
*tailcalls
= NULL
, *act
, *next
;
873 bool changed
= false;
874 basic_block first
= single_succ (ENTRY_BLOCK_PTR
);
875 tree stmt
, param
, ret_type
, tmp
, phi
;
878 if (!suitable_for_tail_opt_p ())
881 opt_tailcalls
= suitable_for_tail_call_opt_p ();
883 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
885 /* Only traverse the normal exits, i.e. those that end with return
887 stmt
= last_stmt (e
->src
);
890 && TREE_CODE (stmt
) == RETURN_EXPR
)
891 find_tail_calls (e
->src
, &tailcalls
);
894 /* Construct the phi nodes and accumulators if necessary. */
895 a_acc
= m_acc
= NULL_TREE
;
896 for (act
= tailcalls
; act
; act
= act
->next
)
898 if (!act
->tail_recursion
)
901 if (!phis_constructed
)
903 /* Ensure that there is only one predecessor of the block. */
904 if (!single_pred_p (first
))
905 first
= split_edge (single_succ_edge (ENTRY_BLOCK_PTR
));
907 /* Copy the args if needed. */
908 for (param
= DECL_ARGUMENTS (current_function_decl
);
910 param
= TREE_CHAIN (param
))
911 if (arg_needs_copy_p (param
))
913 tree name
= default_def (param
);
914 tree new_name
= make_ssa_name (param
, SSA_NAME_DEF_STMT (name
));
917 set_default_def (param
, new_name
);
918 phi
= create_phi_node (name
, first
);
919 SSA_NAME_DEF_STMT (name
) = phi
;
920 add_phi_arg (phi
, new_name
, single_pred_edge (first
));
922 phis_constructed
= true;
925 if (act
->add
&& !a_acc
)
927 ret_type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
929 tmp
= create_tmp_var (ret_type
, "add_acc");
930 add_referenced_var (tmp
);
932 phi
= create_phi_node (tmp
, first
);
934 /* RET_TYPE can be a float when -ffast-maths is
936 fold_convert (ret_type
, integer_zero_node
),
937 single_pred_edge (first
));
938 a_acc
= PHI_RESULT (phi
);
941 if (act
->mult
&& !m_acc
)
943 ret_type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
945 tmp
= create_tmp_var (ret_type
, "mult_acc");
946 add_referenced_var (tmp
);
948 phi
= create_phi_node (tmp
, first
);
950 /* RET_TYPE can be a float when -ffast-maths is
952 fold_convert (ret_type
, integer_one_node
),
953 single_pred_edge (first
));
954 m_acc
= PHI_RESULT (phi
);
959 if (phis_constructed
)
961 /* Reverse the order of the phi nodes, so that it matches the order
962 of operands of the function, as assumed by eliminate_tail_call. */
963 set_phi_nodes (first
, phi_reverse (phi_nodes (first
)));
966 for (; tailcalls
; tailcalls
= next
)
968 next
= tailcalls
->next
;
969 changed
|= optimize_tail_call (tailcalls
, opt_tailcalls
);
975 /* Modify the remaining return statements. */
976 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
978 stmt
= last_stmt (e
->src
);
981 && TREE_CODE (stmt
) == RETURN_EXPR
)
982 adjust_return_value (e
->src
, m_acc
, a_acc
);
988 free_dominance_info (CDI_DOMINATORS
);
992 if (phis_constructed
)
997 execute_tail_recursion (void)
999 tree_optimize_tail_calls_1 (false);
1004 gate_tail_calls (void)
1006 return flag_optimize_sibling_calls
!= 0;
1010 execute_tail_calls (void)
1012 tree_optimize_tail_calls_1 (true);
1016 struct tree_opt_pass pass_tail_recursion
=
1019 gate_tail_calls
, /* gate */
1020 execute_tail_recursion
, /* execute */
1023 0, /* static_pass_number */
1025 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
1026 0, /* properties_provided */
1027 0, /* properties_destroyed */
1028 0, /* todo_flags_start */
1029 TODO_dump_func
| TODO_verify_ssa
, /* todo_flags_finish */
1033 struct tree_opt_pass pass_tail_calls
=
1036 gate_tail_calls
, /* gate */
1037 execute_tail_calls
, /* execute */
1040 0, /* static_pass_number */
1042 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
1043 0, /* properties_provided */
1044 0, /* properties_destroyed */
1045 0, /* todo_flags_start */
1046 TODO_dump_func
| TODO_verify_ssa
, /* todo_flags_finish */