1 /* Tail call optimization on trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "basic-block.h"
29 #include "tree-flow.h"
30 #include "tree-dump.h"
31 #include "diagnostic.h"
32 #include "gimple-pretty-print.h"
34 #include "tree-pass.h"
36 #include "langhooks.h"
39 /* The file implements the tail recursion elimination. It is also used to
40 analyze the tail calls in general, passing the results to the rtl level
41 where they are used for sibcall optimization.
43 In addition to the standard tail recursion elimination, we handle the most
44 trivial cases of making the call tail recursive by creating accumulators.
45 For example the following function
50 return n + sum (n - 1);
67 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
68 when we reach the return x statement, we should return a_acc + x * m_acc
69 instead. They are initially initialized to 0 and 1, respectively,
70 so the semantics of the function is obviously preserved. If we are
71 guaranteed that the value of the accumulator never change, we
74 There are three cases how the function may exit. The first one is
75 handled in adjust_return_value, the other two in adjust_accumulator_values
76 (the second case is actually a special case of the third one and we
77 present it separately just for clarity):
79 1) Just return x, where x is not in any of the remaining special shapes.
80 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
82 2) return f (...), where f is the current function, is rewritten in a
83 classical tail-recursion elimination way, into assignment of arguments
84 and jump to the start of the function. Values of the accumulators
87 3) return a + m * f(...), where a and m do not depend on call to f.
88 To preserve the semantics described before we want this to be rewritten
89 in such a way that we finally return
91 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
93 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
94 eliminate the tail call to f. Special cases when the value is just
95 added or just multiplied are obtained by setting a = 0 or m = 1.
97 TODO -- it is possible to do similar tricks for other operations. */
99 /* A structure that describes the tailcall. */
103 /* The iterator pointing to the call statement. */
104 gimple_stmt_iterator call_gsi
;
106 /* True if it is a call to the current function. */
109 /* The return value of the caller is mult * f + add, where f is the return
110 value of the call. */
113 /* Next tailcall in the chain. */
114 struct tailcall
*next
;
117 /* The variables holding the value of multiplicative and additive
119 static tree m_acc
, a_acc
;
121 static bool suitable_for_tail_opt_p (void);
122 static bool optimize_tail_call (struct tailcall
*, bool);
123 static void eliminate_tail_call (struct tailcall
*);
124 static void find_tail_calls (basic_block
, struct tailcall
**);
126 /* Returns false when the function is not suitable for tail call optimization
127 from some reason (e.g. if it takes variable number of arguments). */
130 suitable_for_tail_opt_p (void)
137 /* Returns false when the function is not suitable for tail call optimization
138 from some reason (e.g. if it takes variable number of arguments).
139 This test must pass in addition to suitable_for_tail_opt_p in order to make
140 tail call discovery happen. */
143 suitable_for_tail_call_opt_p (void)
147 /* alloca (until we have stack slot life analysis) inhibits
148 sibling call optimizations, but not tail recursion. */
149 if (cfun
->calls_alloca
)
152 /* If we are using sjlj exceptions, we may need to add a call to
153 _Unwind_SjLj_Unregister at exit of the function. Which means
154 that we cannot do any sibcall transformations. */
155 if (USING_SJLJ_EXCEPTIONS
&& current_function_has_exception_handlers ())
158 /* Any function that calls setjmp might have longjmp called from
159 any called function. ??? We really should represent this
160 properly in the CFG so that this needn't be special cased. */
161 if (cfun
->calls_setjmp
)
164 /* ??? It is OK if the argument of a function is taken in some cases,
165 but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
166 for (param
= DECL_ARGUMENTS (current_function_decl
);
168 param
= TREE_CHAIN (param
))
169 if (TREE_ADDRESSABLE (param
))
175 /* Checks whether the expression EXPR in stmt AT is independent of the
176 statement pointed to by GSI (in a sense that we already know EXPR's value
177 at GSI). We use the fact that we are only called from the chain of
178 basic blocks that have only single successor. Returns the expression
179 containing the value of EXPR at GSI. */
182 independent_of_stmt_p (tree expr
, gimple at
, gimple_stmt_iterator gsi
)
184 basic_block bb
, call_bb
, at_bb
;
188 if (is_gimple_min_invariant (expr
))
191 if (TREE_CODE (expr
) != SSA_NAME
)
194 /* Mark the blocks in the chain leading to the end. */
195 at_bb
= gimple_bb (at
);
196 call_bb
= gimple_bb (gsi_stmt (gsi
));
197 for (bb
= call_bb
; bb
!= at_bb
; bb
= single_succ (bb
))
203 at
= SSA_NAME_DEF_STMT (expr
);
206 /* The default definition or defined before the chain. */
212 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
213 if (gsi_stmt (gsi
) == at
)
216 if (!gsi_end_p (gsi
))
221 if (gimple_code (at
) != GIMPLE_PHI
)
227 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
232 expr
= PHI_ARG_DEF_FROM_EDGE (at
, e
);
233 if (TREE_CODE (expr
) != SSA_NAME
)
235 /* The value is a constant. */
240 /* Unmark the blocks. */
241 for (bb
= call_bb
; bb
!= at_bb
; bb
= single_succ (bb
))
248 /* Simulates the effect of an assignment STMT on the return value of the tail
249 recursive CALL passed in ASS_VAR. M and A are the multiplicative and the
250 additive factor for the real return value. */
253 process_assignment (gimple stmt
, gimple_stmt_iterator call
, tree
*m
,
254 tree
*a
, tree
*ass_var
)
256 tree op0
, op1
, non_ass_var
;
257 tree dest
= gimple_assign_lhs (stmt
);
258 enum tree_code code
= gimple_assign_rhs_code (stmt
);
259 enum gimple_rhs_class rhs_class
= get_gimple_rhs_class (code
);
260 tree src_var
= gimple_assign_rhs1 (stmt
);
262 /* See if this is a simple copy operation of an SSA name to the function
263 result. In that case we may have a simple tail call. Ignore type
264 conversions that can never produce extra code between the function
265 call and the function return. */
266 if ((rhs_class
== GIMPLE_SINGLE_RHS
|| gimple_assign_cast_p (stmt
))
267 && (TREE_CODE (src_var
) == SSA_NAME
))
269 /* Reject a tailcall if the type conversion might need
271 if (gimple_assign_cast_p (stmt
)
272 && TYPE_MODE (TREE_TYPE (dest
)) != TYPE_MODE (TREE_TYPE (src_var
)))
275 if (src_var
!= *ass_var
)
282 if (rhs_class
!= GIMPLE_BINARY_RHS
)
285 /* Accumulator optimizations will reverse the order of operations.
286 We can only do that for floating-point types if we're assuming
287 that addition and multiplication are associative. */
288 if (!flag_associative_math
)
289 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl
))))
292 /* We only handle the code like
299 TODO -- Extend it for cases where the linear transformation of the output
300 is expressed in a more complicated way. */
302 op0
= gimple_assign_rhs1 (stmt
);
303 op1
= gimple_assign_rhs2 (stmt
);
306 && (non_ass_var
= independent_of_stmt_p (op1
, stmt
, call
)))
308 else if (op1
== *ass_var
309 && (non_ass_var
= independent_of_stmt_p (op0
, stmt
, call
)))
326 /* TODO -- Handle other codes (NEGATE_EXPR, MINUS_EXPR,
327 POINTER_PLUS_EXPR). */
334 /* Propagate VAR through phis on edge E. */
337 propagate_through_phis (tree var
, edge e
)
339 basic_block dest
= e
->dest
;
340 gimple_stmt_iterator gsi
;
342 for (gsi
= gsi_start_phis (dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
344 gimple phi
= gsi_stmt (gsi
);
345 if (PHI_ARG_DEF_FROM_EDGE (phi
, e
) == var
)
346 return PHI_RESULT (phi
);
351 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
352 added to the start of RET. */
355 find_tail_calls (basic_block bb
, struct tailcall
**ret
)
357 tree ass_var
= NULL_TREE
, ret_var
, func
, param
;
358 gimple stmt
, call
= NULL
;
359 gimple_stmt_iterator gsi
, agsi
;
367 referenced_var_iterator rvi
;
369 if (!single_succ_p (bb
))
372 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
374 stmt
= gsi_stmt (gsi
);
377 if (gimple_code (stmt
) == GIMPLE_LABEL
|| is_gimple_debug (stmt
))
380 /* Check for a call. */
381 if (is_gimple_call (stmt
))
384 ass_var
= gimple_call_lhs (stmt
);
388 /* If the statement references memory or volatile operands, fail. */
389 if (gimple_references_memory_p (stmt
)
390 || gimple_has_volatile_ops (stmt
))
397 /* Recurse to the predecessors. */
398 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
399 find_tail_calls (e
->src
, ret
);
404 /* If the LHS of our call is not just a simple register, we can't
405 transform this into a tail or sibling call. This situation happens,
406 in (e.g.) "*p = foo()" where foo returns a struct. In this case
407 we won't have a temporary here, but we need to carry out the side
408 effect anyway, so tailcall is impossible.
410 ??? In some situations (when the struct is returned in memory via
411 invisible argument) we could deal with this, e.g. by passing 'p'
412 itself as that argument to foo, but it's too early to do this here,
413 and expand_call() will not handle it anyway. If it ever can, then
414 we need to revisit this here, to allow that situation. */
415 if (ass_var
&& !is_gimple_reg (ass_var
))
418 /* We found the call, check whether it is suitable. */
419 tail_recursion
= false;
420 func
= gimple_call_fndecl (call
);
421 if (func
== current_function_decl
)
425 for (param
= DECL_ARGUMENTS (func
), idx
= 0;
426 param
&& idx
< gimple_call_num_args (call
);
427 param
= TREE_CHAIN (param
), idx
++)
429 arg
= gimple_call_arg (call
, idx
);
432 /* Make sure there are no problems with copying. The parameter
433 have a copyable type and the two arguments must have reasonably
434 equivalent types. The latter requirement could be relaxed if
435 we emitted a suitable type conversion statement. */
436 if (!is_gimple_reg_type (TREE_TYPE (param
))
437 || !useless_type_conversion_p (TREE_TYPE (param
),
441 /* The parameter should be a real operand, so that phi node
442 created for it at the start of the function has the meaning
443 of copying the value. This test implies is_gimple_reg_type
444 from the previous condition, however this one could be
445 relaxed by being more careful with copying the new value
446 of the parameter (emitting appropriate GIMPLE_ASSIGN and
447 updating the virtual operands). */
448 if (!is_gimple_reg (param
))
452 if (idx
== gimple_call_num_args (call
) && !param
)
453 tail_recursion
= true;
456 /* Make sure the tail invocation of this function does not refer
457 to local variables. */
458 FOR_EACH_REFERENCED_VAR (var
, rvi
)
460 if (TREE_CODE (var
) != PARM_DECL
461 && auto_var_in_fn_p (var
, cfun
->decl
)
462 && (ref_maybe_used_by_stmt_p (call
, var
)
463 || call_may_clobber_ref_p (call
, var
)))
467 /* Now check the statements after the call. None of them has virtual
468 operands, so they may only depend on the call through its return
469 value. The return value should also be dependent on each of them,
470 since we are running after dce. */
478 tree tmp_a
= NULL_TREE
;
479 tree tmp_m
= NULL_TREE
;
482 while (gsi_end_p (agsi
))
484 ass_var
= propagate_through_phis (ass_var
, single_succ_edge (abb
));
485 abb
= single_succ (abb
);
486 agsi
= gsi_start_bb (abb
);
489 stmt
= gsi_stmt (agsi
);
491 if (gimple_code (stmt
) == GIMPLE_LABEL
)
494 if (gimple_code (stmt
) == GIMPLE_RETURN
)
497 if (is_gimple_debug (stmt
))
500 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
503 /* This is a gimple assign. */
504 if (! process_assignment (stmt
, gsi
, &tmp_m
, &tmp_a
, &ass_var
))
510 a
= fold_build2 (PLUS_EXPR
, TREE_TYPE (tmp_a
), a
, tmp_a
);
517 m
= fold_build2 (MULT_EXPR
, TREE_TYPE (tmp_m
), m
, tmp_m
);
522 a
= fold_build2 (MULT_EXPR
, TREE_TYPE (tmp_m
), a
, tmp_m
);
526 /* See if this is a tail call we can handle. */
527 ret_var
= gimple_return_retval (stmt
);
529 /* We may proceed if there either is no return value, or the return value
530 is identical to the call's return. */
532 && (ret_var
!= ass_var
))
535 /* If this is not a tail recursive call, we cannot handle addends or
537 if (!tail_recursion
&& (m
|| a
))
540 nw
= XNEW (struct tailcall
);
544 nw
->tail_recursion
= tail_recursion
;
553 /* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E. */
556 add_successor_phi_arg (edge e
, tree var
, tree phi_arg
)
558 gimple_stmt_iterator gsi
;
560 for (gsi
= gsi_start_phis (e
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
561 if (PHI_RESULT (gsi_stmt (gsi
)) == var
)
564 gcc_assert (!gsi_end_p (gsi
));
565 add_phi_arg (gsi_stmt (gsi
), phi_arg
, e
, UNKNOWN_LOCATION
);
568 /* Creates a GIMPLE statement which computes the operation specified by
569 CODE, OP0 and OP1 to a new variable with name LABEL and inserts the
570 statement in the position specified by GSI and UPDATE. Returns the
571 tree node of the statement's result. */
574 adjust_return_value_with_ops (enum tree_code code
, const char *label
,
575 tree acc
, tree op1
, gimple_stmt_iterator gsi
)
578 tree ret_type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
579 tree tmp
= create_tmp_reg (ret_type
, label
);
583 add_referenced_var (tmp
);
585 if (types_compatible_p (TREE_TYPE (acc
), TREE_TYPE (op1
)))
586 stmt
= gimple_build_assign_with_ops (code
, tmp
, acc
, op1
);
589 tree rhs
= fold_convert (TREE_TYPE (acc
),
592 fold_convert (TREE_TYPE (op1
), acc
),
594 rhs
= force_gimple_operand_gsi (&gsi
, rhs
,
595 false, NULL
, true, GSI_CONTINUE_LINKING
);
596 stmt
= gimple_build_assign (NULL_TREE
, rhs
);
599 result
= make_ssa_name (tmp
, stmt
);
600 gimple_assign_set_lhs (stmt
, result
);
602 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
606 /* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
607 the computation specified by CODE and OP1 and insert the statement
608 at the position specified by GSI as a new statement. Returns new SSA name
609 of updated accumulator. */
612 update_accumulator_with_ops (enum tree_code code
, tree acc
, tree op1
,
613 gimple_stmt_iterator gsi
)
617 if (types_compatible_p (TREE_TYPE (acc
), TREE_TYPE (op1
)))
618 stmt
= gimple_build_assign_with_ops (code
, SSA_NAME_VAR (acc
), acc
, op1
);
621 tree rhs
= fold_convert (TREE_TYPE (acc
),
624 fold_convert (TREE_TYPE (op1
), acc
),
626 rhs
= force_gimple_operand_gsi (&gsi
, rhs
,
627 false, NULL
, false, GSI_CONTINUE_LINKING
);
628 stmt
= gimple_build_assign (NULL_TREE
, rhs
);
630 var
= make_ssa_name (SSA_NAME_VAR (acc
), stmt
);
631 gimple_assign_set_lhs (stmt
, var
);
633 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
637 /* Adjust the accumulator values according to A and M after GSI, and update
638 the phi nodes on edge BACK. */
641 adjust_accumulator_values (gimple_stmt_iterator gsi
, tree m
, tree a
, edge back
)
643 tree var
, a_acc_arg
, m_acc_arg
;
646 m
= force_gimple_operand_gsi (&gsi
, m
, true, NULL
, true, GSI_SAME_STMT
);
648 a
= force_gimple_operand_gsi (&gsi
, a
, true, NULL
, true, GSI_SAME_STMT
);
656 if (integer_onep (a
))
659 var
= adjust_return_value_with_ops (MULT_EXPR
, "acc_tmp", m_acc
,
665 a_acc_arg
= update_accumulator_with_ops (PLUS_EXPR
, a_acc
, var
, gsi
);
669 m_acc_arg
= update_accumulator_with_ops (MULT_EXPR
, m_acc
, m
, gsi
);
672 add_successor_phi_arg (back
, a_acc
, a_acc_arg
);
675 add_successor_phi_arg (back
, m_acc
, m_acc_arg
);
678 /* Adjust value of the return at the end of BB according to M and A
682 adjust_return_value (basic_block bb
, tree m
, tree a
)
685 gimple ret_stmt
= gimple_seq_last_stmt (bb_seq (bb
));
686 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
688 gcc_assert (gimple_code (ret_stmt
) == GIMPLE_RETURN
);
690 retval
= gimple_return_retval (ret_stmt
);
691 if (!retval
|| retval
== error_mark_node
)
695 retval
= adjust_return_value_with_ops (MULT_EXPR
, "mul_tmp", m_acc
, retval
,
698 retval
= adjust_return_value_with_ops (PLUS_EXPR
, "acc_tmp", a_acc
, retval
,
700 gimple_return_set_retval (ret_stmt
, retval
);
701 update_stmt (ret_stmt
);
704 /* Subtract COUNT and FREQUENCY from the basic block and it's
707 decrease_profile (basic_block bb
, gcov_type count
, int frequency
)
713 bb
->frequency
-= frequency
;
714 if (bb
->frequency
< 0)
716 if (!single_succ_p (bb
))
718 gcc_assert (!EDGE_COUNT (bb
->succs
));
721 e
= single_succ_edge (bb
);
727 /* Returns true if argument PARAM of the tail recursive call needs to be copied
728 when the call is eliminated. */
731 arg_needs_copy_p (tree param
)
735 if (!is_gimple_reg (param
) || !var_ann (param
))
738 /* Parameters that are only defined but never used need not be copied. */
739 def
= gimple_default_def (cfun
, param
);
746 /* Eliminates tail call described by T. TMP_VARS is a list of
747 temporary variables used to copy the function arguments. */
750 eliminate_tail_call (struct tailcall
*t
)
756 basic_block bb
, first
;
759 gimple_stmt_iterator gsi
;
762 stmt
= orig_stmt
= gsi_stmt (t
->call_gsi
);
763 bb
= gsi_bb (t
->call_gsi
);
765 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
767 fprintf (dump_file
, "Eliminated tail recursion in bb %d : ",
769 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
770 fprintf (dump_file
, "\n");
773 gcc_assert (is_gimple_call (stmt
));
775 first
= single_succ (ENTRY_BLOCK_PTR
);
777 /* Remove the code after call_gsi that will become unreachable. The
778 possibly unreachable code in other blocks is removed later in
782 while (!gsi_end_p (gsi
))
784 gimple t
= gsi_stmt (gsi
);
785 /* Do not remove the return statement, so that redirect_edge_and_branch
786 sees how the block ends. */
787 if (gimple_code (t
) == GIMPLE_RETURN
)
790 gsi_remove (&gsi
, true);
794 /* Number of executions of function has reduced by the tailcall. */
795 e
= single_succ_edge (gsi_bb (t
->call_gsi
));
796 decrease_profile (EXIT_BLOCK_PTR
, e
->count
, EDGE_FREQUENCY (e
));
797 decrease_profile (ENTRY_BLOCK_PTR
, e
->count
, EDGE_FREQUENCY (e
));
798 if (e
->dest
!= EXIT_BLOCK_PTR
)
799 decrease_profile (e
->dest
, e
->count
, EDGE_FREQUENCY (e
));
801 /* Replace the call by a jump to the start of function. */
802 e
= redirect_edge_and_branch (single_succ_edge (gsi_bb (t
->call_gsi
)),
805 PENDING_STMT (e
) = NULL
;
807 /* Add phi node entries for arguments. The ordering of the phi nodes should
808 be the same as the ordering of the arguments. */
809 for (param
= DECL_ARGUMENTS (current_function_decl
),
810 idx
= 0, gsi
= gsi_start_phis (first
);
812 param
= TREE_CHAIN (param
), idx
++)
814 if (!arg_needs_copy_p (param
))
817 arg
= gimple_call_arg (stmt
, idx
);
818 phi
= gsi_stmt (gsi
);
819 gcc_assert (param
== SSA_NAME_VAR (PHI_RESULT (phi
)));
821 add_phi_arg (phi
, arg
, e
, gimple_location (stmt
));
825 /* Update the values of accumulators. */
826 adjust_accumulator_values (t
->call_gsi
, t
->mult
, t
->add
, e
);
828 call
= gsi_stmt (t
->call_gsi
);
829 rslt
= gimple_call_lhs (call
);
830 if (rslt
!= NULL_TREE
)
832 /* Result of the call will no longer be defined. So adjust the
833 SSA_NAME_DEF_STMT accordingly. */
834 SSA_NAME_DEF_STMT (rslt
) = gimple_build_nop ();
837 gsi_remove (&t
->call_gsi
, true);
841 /* Add phi nodes for the virtual operands defined in the function to the
842 header of the loop created by tail recursion elimination.
844 Originally, we used to add phi nodes only for call clobbered variables,
845 as the value of the non-call clobbered ones obviously cannot be used
846 or changed within the recursive call. However, the local variables
847 from multiple calls now share the same location, so the virtual ssa form
848 requires us to say that the location dies on further iterations of the loop,
849 which requires adding phi nodes.
852 add_virtual_phis (void)
854 referenced_var_iterator rvi
;
857 /* The problematic part is that there is no way how to know what
858 to put into phi nodes (there in fact does not have to be such
859 ssa name available). A solution would be to have an artificial
860 use/kill for all virtual operands in EXIT node. Unless we have
861 this, we cannot do much better than to rebuild the ssa form for
862 possibly affected virtual ssa names from scratch. */
864 FOR_EACH_REFERENCED_VAR (var
, rvi
)
866 if (!is_gimple_reg (var
) && gimple_default_def (cfun
, var
) != NULL_TREE
)
867 mark_sym_for_renaming (var
);
871 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
872 mark the tailcalls for the sibcall optimization. */
875 optimize_tail_call (struct tailcall
*t
, bool opt_tailcalls
)
877 if (t
->tail_recursion
)
879 eliminate_tail_call (t
);
885 gimple stmt
= gsi_stmt (t
->call_gsi
);
887 gimple_call_set_tail (stmt
, true);
888 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
890 fprintf (dump_file
, "Found tail call ");
891 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
892 fprintf (dump_file
, " in bb %i\n", (gsi_bb (t
->call_gsi
))->index
);
899 /* Creates a tail-call accumulator of the same type as the return type of the
900 current function. LABEL is the name used to creating the temporary
901 variable for the accumulator. The accumulator will be inserted in the
902 phis of a basic block BB with single predecessor with an initial value
903 INIT converted to the current function return type. */
906 create_tailcall_accumulator (const char *label
, basic_block bb
, tree init
)
908 tree ret_type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
909 tree tmp
= create_tmp_reg (ret_type
, label
);
912 add_referenced_var (tmp
);
913 phi
= create_phi_node (tmp
, bb
);
914 /* RET_TYPE can be a float when -ffast-maths is enabled. */
915 add_phi_arg (phi
, fold_convert (ret_type
, init
), single_pred_edge (bb
),
917 return PHI_RESULT (phi
);
920 /* Optimizes tail calls in the function, turning the tail recursion
924 tree_optimize_tail_calls_1 (bool opt_tailcalls
)
927 bool phis_constructed
= false;
928 struct tailcall
*tailcalls
= NULL
, *act
, *next
;
929 bool changed
= false;
930 basic_block first
= single_succ (ENTRY_BLOCK_PTR
);
935 if (!suitable_for_tail_opt_p ())
938 opt_tailcalls
= suitable_for_tail_call_opt_p ();
940 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
942 /* Only traverse the normal exits, i.e. those that end with return
944 stmt
= last_stmt (e
->src
);
947 && gimple_code (stmt
) == GIMPLE_RETURN
)
948 find_tail_calls (e
->src
, &tailcalls
);
951 /* Construct the phi nodes and accumulators if necessary. */
952 a_acc
= m_acc
= NULL_TREE
;
953 for (act
= tailcalls
; act
; act
= act
->next
)
955 if (!act
->tail_recursion
)
958 if (!phis_constructed
)
960 /* Ensure that there is only one predecessor of the block
961 or if there are existing degenerate PHI nodes. */
962 if (!single_pred_p (first
)
963 || !gimple_seq_empty_p (phi_nodes (first
)))
964 first
= split_edge (single_succ_edge (ENTRY_BLOCK_PTR
));
966 /* Copy the args if needed. */
967 for (param
= DECL_ARGUMENTS (current_function_decl
);
969 param
= TREE_CHAIN (param
))
970 if (arg_needs_copy_p (param
))
972 tree name
= gimple_default_def (cfun
, param
);
973 tree new_name
= make_ssa_name (param
, SSA_NAME_DEF_STMT (name
));
976 set_default_def (param
, new_name
);
977 phi
= create_phi_node (name
, first
);
978 SSA_NAME_DEF_STMT (name
) = phi
;
979 add_phi_arg (phi
, new_name
, single_pred_edge (first
),
980 EXPR_LOCATION (param
));
982 phis_constructed
= true;
985 if (act
->add
&& !a_acc
)
986 a_acc
= create_tailcall_accumulator ("add_acc", first
,
989 if (act
->mult
&& !m_acc
)
990 m_acc
= create_tailcall_accumulator ("mult_acc", first
,
994 for (; tailcalls
; tailcalls
= next
)
996 next
= tailcalls
->next
;
997 changed
|= optimize_tail_call (tailcalls
, opt_tailcalls
);
1003 /* Modify the remaining return statements. */
1004 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
1006 stmt
= last_stmt (e
->src
);
1009 && gimple_code (stmt
) == GIMPLE_RETURN
)
1010 adjust_return_value (e
->src
, m_acc
, a_acc
);
1015 free_dominance_info (CDI_DOMINATORS
);
1017 if (phis_constructed
)
1018 add_virtual_phis ();
1020 return TODO_cleanup_cfg
| TODO_update_ssa_only_virtuals
;
1025 execute_tail_recursion (void)
1027 return tree_optimize_tail_calls_1 (false);
1031 gate_tail_calls (void)
1033 return flag_optimize_sibling_calls
!= 0 && dbg_cnt (tail_call
);
1037 execute_tail_calls (void)
1039 return tree_optimize_tail_calls_1 (true);
1042 struct gimple_opt_pass pass_tail_recursion
=
1047 gate_tail_calls
, /* gate */
1048 execute_tail_recursion
, /* execute */
1051 0, /* static_pass_number */
1052 TV_NONE
, /* tv_id */
1053 PROP_cfg
| PROP_ssa
, /* properties_required */
1054 0, /* properties_provided */
1055 0, /* properties_destroyed */
1056 0, /* todo_flags_start */
1057 TODO_dump_func
| TODO_verify_ssa
/* todo_flags_finish */
1061 struct gimple_opt_pass pass_tail_calls
=
1066 gate_tail_calls
, /* gate */
1067 execute_tail_calls
, /* execute */
1070 0, /* static_pass_number */
1071 TV_NONE
, /* tv_id */
1072 PROP_cfg
| PROP_ssa
, /* properties_required */
1073 0, /* properties_provided */
1074 0, /* properties_destroyed */
1075 0, /* todo_flags_start */
1076 TODO_dump_func
| TODO_verify_ssa
/* todo_flags_finish */