1 /* Tail call optimization on trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "basic-block.h"
29 #include "tree-flow.h"
30 #include "tree-dump.h"
31 #include "gimple-pretty-print.h"
33 #include "tree-pass.h"
35 #include "langhooks.h"
38 #include "common/common-target.h"
40 /* The file implements the tail recursion elimination. It is also used to
41 analyze the tail calls in general, passing the results to the rtl level
42 where they are used for sibcall optimization.
44 In addition to the standard tail recursion elimination, we handle the most
45 trivial cases of making the call tail recursive by creating accumulators.
46 For example the following function
51 return n + sum (n - 1);
68 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
69 when we reach the return x statement, we should return a_acc + x * m_acc
70 instead. They are initially initialized to 0 and 1, respectively,
71 so the semantics of the function is obviously preserved. If we are
72 guaranteed that the value of the accumulator never change, we
75 There are three cases how the function may exit. The first one is
76 handled in adjust_return_value, the other two in adjust_accumulator_values
77 (the second case is actually a special case of the third one and we
78 present it separately just for clarity):
80 1) Just return x, where x is not in any of the remaining special shapes.
81 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
83 2) return f (...), where f is the current function, is rewritten in a
84 classical tail-recursion elimination way, into assignment of arguments
85 and jump to the start of the function. Values of the accumulators
88 3) return a + m * f(...), where a and m do not depend on call to f.
89 To preserve the semantics described before we want this to be rewritten
90 in such a way that we finally return
92 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
94 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
95 eliminate the tail call to f. Special cases when the value is just
96 added or just multiplied are obtained by setting a = 0 or m = 1.
98 TODO -- it is possible to do similar tricks for other operations. */
100 /* A structure that describes the tailcall. */
104 /* The iterator pointing to the call statement. */
105 gimple_stmt_iterator call_gsi
;
107 /* True if it is a call to the current function. */
110 /* The return value of the caller is mult * f + add, where f is the return
111 value of the call. */
114 /* Next tailcall in the chain. */
115 struct tailcall
*next
;
118 /* The variables holding the value of multiplicative and additive
120 static tree m_acc
, a_acc
;
122 static bool suitable_for_tail_opt_p (void);
123 static bool optimize_tail_call (struct tailcall
*, bool);
124 static void eliminate_tail_call (struct tailcall
*);
125 static void find_tail_calls (basic_block
, struct tailcall
**);
127 /* Returns false when the function is not suitable for tail call optimization
128 from some reason (e.g. if it takes variable number of arguments). */
131 suitable_for_tail_opt_p (void)
138 /* Returns false when the function is not suitable for tail call optimization
139 from some reason (e.g. if it takes variable number of arguments).
140 This test must pass in addition to suitable_for_tail_opt_p in order to make
141 tail call discovery happen. */
144 suitable_for_tail_call_opt_p (void)
148 /* alloca (until we have stack slot life analysis) inhibits
149 sibling call optimizations, but not tail recursion. */
150 if (cfun
->calls_alloca
)
153 /* If we are using sjlj exceptions, we may need to add a call to
154 _Unwind_SjLj_Unregister at exit of the function. Which means
155 that we cannot do any sibcall transformations. */
156 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
157 && current_function_has_exception_handlers ())
160 /* Any function that calls setjmp might have longjmp called from
161 any called function. ??? We really should represent this
162 properly in the CFG so that this needn't be special cased. */
163 if (cfun
->calls_setjmp
)
166 /* ??? It is OK if the argument of a function is taken in some cases,
167 but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
168 for (param
= DECL_ARGUMENTS (current_function_decl
);
170 param
= DECL_CHAIN (param
))
171 if (TREE_ADDRESSABLE (param
))
177 /* Checks whether the expression EXPR in stmt AT is independent of the
178 statement pointed to by GSI (in a sense that we already know EXPR's value
179 at GSI). We use the fact that we are only called from the chain of
180 basic blocks that have only single successor. Returns the expression
181 containing the value of EXPR at GSI. */
184 independent_of_stmt_p (tree expr
, gimple at
, gimple_stmt_iterator gsi
)
186 basic_block bb
, call_bb
, at_bb
;
190 if (is_gimple_min_invariant (expr
))
193 if (TREE_CODE (expr
) != SSA_NAME
)
196 /* Mark the blocks in the chain leading to the end. */
197 at_bb
= gimple_bb (at
);
198 call_bb
= gimple_bb (gsi_stmt (gsi
));
199 for (bb
= call_bb
; bb
!= at_bb
; bb
= single_succ (bb
))
205 at
= SSA_NAME_DEF_STMT (expr
);
208 /* The default definition or defined before the chain. */
214 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
215 if (gsi_stmt (gsi
) == at
)
218 if (!gsi_end_p (gsi
))
223 if (gimple_code (at
) != GIMPLE_PHI
)
229 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
234 expr
= PHI_ARG_DEF_FROM_EDGE (at
, e
);
235 if (TREE_CODE (expr
) != SSA_NAME
)
237 /* The value is a constant. */
242 /* Unmark the blocks. */
243 for (bb
= call_bb
; bb
!= at_bb
; bb
= single_succ (bb
))
250 /* Simulates the effect of an assignment STMT on the return value of the tail
251 recursive CALL passed in ASS_VAR. M and A are the multiplicative and the
252 additive factor for the real return value. */
255 process_assignment (gimple stmt
, gimple_stmt_iterator call
, tree
*m
,
256 tree
*a
, tree
*ass_var
)
258 tree op0
, op1
= NULL_TREE
, non_ass_var
= NULL_TREE
;
259 tree dest
= gimple_assign_lhs (stmt
);
260 enum tree_code code
= gimple_assign_rhs_code (stmt
);
261 enum gimple_rhs_class rhs_class
= get_gimple_rhs_class (code
);
262 tree src_var
= gimple_assign_rhs1 (stmt
);
264 /* See if this is a simple copy operation of an SSA name to the function
265 result. In that case we may have a simple tail call. Ignore type
266 conversions that can never produce extra code between the function
267 call and the function return. */
268 if ((rhs_class
== GIMPLE_SINGLE_RHS
|| gimple_assign_cast_p (stmt
))
269 && (TREE_CODE (src_var
) == SSA_NAME
))
271 /* Reject a tailcall if the type conversion might need
273 if (gimple_assign_cast_p (stmt
)
274 && TYPE_MODE (TREE_TYPE (dest
)) != TYPE_MODE (TREE_TYPE (src_var
)))
277 if (src_var
!= *ass_var
)
286 case GIMPLE_BINARY_RHS
:
287 op1
= gimple_assign_rhs2 (stmt
);
291 case GIMPLE_UNARY_RHS
:
292 op0
= gimple_assign_rhs1 (stmt
);
299 /* Accumulator optimizations will reverse the order of operations.
300 We can only do that for floating-point types if we're assuming
301 that addition and multiplication are associative. */
302 if (!flag_associative_math
)
303 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl
))))
306 if (rhs_class
== GIMPLE_UNARY_RHS
)
308 else if (op0
== *ass_var
309 && (non_ass_var
= independent_of_stmt_p (op1
, stmt
, call
)))
311 else if (op1
== *ass_var
312 && (non_ass_var
= independent_of_stmt_p (op0
, stmt
, call
)))
330 if (FLOAT_TYPE_P (TREE_TYPE (op0
)))
331 *m
= build_real (TREE_TYPE (op0
), dconstm1
);
333 *m
= build_int_cst (TREE_TYPE (op0
), -1);
340 *a
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (non_ass_var
), non_ass_var
);
343 if (FLOAT_TYPE_P (TREE_TYPE (non_ass_var
)))
344 *m
= build_real (TREE_TYPE (non_ass_var
), dconstm1
);
346 *m
= build_int_cst (TREE_TYPE (non_ass_var
), -1);
348 *a
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (non_ass_var
), non_ass_var
);
354 /* TODO -- Handle POINTER_PLUS_EXPR. */
361 /* Propagate VAR through phis on edge E. */
364 propagate_through_phis (tree var
, edge e
)
366 basic_block dest
= e
->dest
;
367 gimple_stmt_iterator gsi
;
369 for (gsi
= gsi_start_phis (dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
371 gimple phi
= gsi_stmt (gsi
);
372 if (PHI_ARG_DEF_FROM_EDGE (phi
, e
) == var
)
373 return PHI_RESULT (phi
);
378 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
379 added to the start of RET. */
382 find_tail_calls (basic_block bb
, struct tailcall
**ret
)
384 tree ass_var
= NULL_TREE
, ret_var
, func
, param
;
385 gimple stmt
, call
= NULL
;
386 gimple_stmt_iterator gsi
, agsi
;
394 referenced_var_iterator rvi
;
396 if (!single_succ_p (bb
))
399 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
401 stmt
= gsi_stmt (gsi
);
403 /* Ignore labels, returns and debug stmts. */
404 if (gimple_code (stmt
) == GIMPLE_LABEL
405 || gimple_code (stmt
) == GIMPLE_RETURN
406 || is_gimple_debug (stmt
))
409 /* Check for a call. */
410 if (is_gimple_call (stmt
))
413 ass_var
= gimple_call_lhs (stmt
);
417 /* If the statement references memory or volatile operands, fail. */
418 if (gimple_references_memory_p (stmt
)
419 || gimple_has_volatile_ops (stmt
))
426 /* Recurse to the predecessors. */
427 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
428 find_tail_calls (e
->src
, ret
);
433 /* If the LHS of our call is not just a simple register, we can't
434 transform this into a tail or sibling call. This situation happens,
435 in (e.g.) "*p = foo()" where foo returns a struct. In this case
436 we won't have a temporary here, but we need to carry out the side
437 effect anyway, so tailcall is impossible.
439 ??? In some situations (when the struct is returned in memory via
440 invisible argument) we could deal with this, e.g. by passing 'p'
441 itself as that argument to foo, but it's too early to do this here,
442 and expand_call() will not handle it anyway. If it ever can, then
443 we need to revisit this here, to allow that situation. */
444 if (ass_var
&& !is_gimple_reg (ass_var
))
447 /* We found the call, check whether it is suitable. */
448 tail_recursion
= false;
449 func
= gimple_call_fndecl (call
);
450 if (func
== current_function_decl
)
454 for (param
= DECL_ARGUMENTS (func
), idx
= 0;
455 param
&& idx
< gimple_call_num_args (call
);
456 param
= DECL_CHAIN (param
), idx
++)
458 arg
= gimple_call_arg (call
, idx
);
461 /* Make sure there are no problems with copying. The parameter
462 have a copyable type and the two arguments must have reasonably
463 equivalent types. The latter requirement could be relaxed if
464 we emitted a suitable type conversion statement. */
465 if (!is_gimple_reg_type (TREE_TYPE (param
))
466 || !useless_type_conversion_p (TREE_TYPE (param
),
470 /* The parameter should be a real operand, so that phi node
471 created for it at the start of the function has the meaning
472 of copying the value. This test implies is_gimple_reg_type
473 from the previous condition, however this one could be
474 relaxed by being more careful with copying the new value
475 of the parameter (emitting appropriate GIMPLE_ASSIGN and
476 updating the virtual operands). */
477 if (!is_gimple_reg (param
))
481 if (idx
== gimple_call_num_args (call
) && !param
)
482 tail_recursion
= true;
485 /* Make sure the tail invocation of this function does not refer
486 to local variables. */
487 FOR_EACH_REFERENCED_VAR (cfun
, var
, rvi
)
489 if (TREE_CODE (var
) != PARM_DECL
490 && auto_var_in_fn_p (var
, cfun
->decl
)
491 && (ref_maybe_used_by_stmt_p (call
, var
)
492 || call_may_clobber_ref_p (call
, var
)))
496 /* Now check the statements after the call. None of them has virtual
497 operands, so they may only depend on the call through its return
498 value. The return value should also be dependent on each of them,
499 since we are running after dce. */
507 tree tmp_a
= NULL_TREE
;
508 tree tmp_m
= NULL_TREE
;
511 while (gsi_end_p (agsi
))
513 ass_var
= propagate_through_phis (ass_var
, single_succ_edge (abb
));
514 abb
= single_succ (abb
);
515 agsi
= gsi_start_bb (abb
);
518 stmt
= gsi_stmt (agsi
);
520 if (gimple_code (stmt
) == GIMPLE_LABEL
)
523 if (gimple_code (stmt
) == GIMPLE_RETURN
)
526 if (is_gimple_debug (stmt
))
529 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
532 /* This is a gimple assign. */
533 if (! process_assignment (stmt
, gsi
, &tmp_m
, &tmp_a
, &ass_var
))
538 tree type
= TREE_TYPE (tmp_a
);
540 a
= fold_build2 (PLUS_EXPR
, type
, fold_convert (type
, a
), tmp_a
);
546 tree type
= TREE_TYPE (tmp_m
);
548 m
= fold_build2 (MULT_EXPR
, type
, fold_convert (type
, m
), tmp_m
);
553 a
= fold_build2 (MULT_EXPR
, type
, fold_convert (type
, a
), tmp_m
);
557 /* See if this is a tail call we can handle. */
558 ret_var
= gimple_return_retval (stmt
);
560 /* We may proceed if there either is no return value, or the return value
561 is identical to the call's return. */
563 && (ret_var
!= ass_var
))
566 /* If this is not a tail recursive call, we cannot handle addends or
568 if (!tail_recursion
&& (m
|| a
))
571 nw
= XNEW (struct tailcall
);
575 nw
->tail_recursion
= tail_recursion
;
584 /* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E. */
587 add_successor_phi_arg (edge e
, tree var
, tree phi_arg
)
589 gimple_stmt_iterator gsi
;
591 for (gsi
= gsi_start_phis (e
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
592 if (PHI_RESULT (gsi_stmt (gsi
)) == var
)
595 gcc_assert (!gsi_end_p (gsi
));
596 add_phi_arg (gsi_stmt (gsi
), phi_arg
, e
, UNKNOWN_LOCATION
);
599 /* Creates a GIMPLE statement which computes the operation specified by
600 CODE, OP0 and OP1 to a new variable with name LABEL and inserts the
601 statement in the position specified by GSI and UPDATE. Returns the
602 tree node of the statement's result. */
605 adjust_return_value_with_ops (enum tree_code code
, const char *label
,
606 tree acc
, tree op1
, gimple_stmt_iterator gsi
)
609 tree ret_type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
610 tree tmp
= create_tmp_reg (ret_type
, label
);
614 add_referenced_var (tmp
);
616 if (types_compatible_p (TREE_TYPE (acc
), TREE_TYPE (op1
)))
617 stmt
= gimple_build_assign_with_ops (code
, tmp
, acc
, op1
);
620 tree rhs
= fold_convert (TREE_TYPE (acc
),
623 fold_convert (TREE_TYPE (op1
), acc
),
625 rhs
= force_gimple_operand_gsi (&gsi
, rhs
,
626 false, NULL
, true, GSI_CONTINUE_LINKING
);
627 stmt
= gimple_build_assign (NULL_TREE
, rhs
);
630 result
= make_ssa_name (tmp
, stmt
);
631 gimple_assign_set_lhs (stmt
, result
);
633 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
637 /* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
638 the computation specified by CODE and OP1 and insert the statement
639 at the position specified by GSI as a new statement. Returns new SSA name
640 of updated accumulator. */
643 update_accumulator_with_ops (enum tree_code code
, tree acc
, tree op1
,
644 gimple_stmt_iterator gsi
)
648 if (types_compatible_p (TREE_TYPE (acc
), TREE_TYPE (op1
)))
649 stmt
= gimple_build_assign_with_ops (code
, SSA_NAME_VAR (acc
), acc
, op1
);
652 tree rhs
= fold_convert (TREE_TYPE (acc
),
655 fold_convert (TREE_TYPE (op1
), acc
),
657 rhs
= force_gimple_operand_gsi (&gsi
, rhs
,
658 false, NULL
, false, GSI_CONTINUE_LINKING
);
659 stmt
= gimple_build_assign (NULL_TREE
, rhs
);
661 var
= make_ssa_name (SSA_NAME_VAR (acc
), stmt
);
662 gimple_assign_set_lhs (stmt
, var
);
664 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
668 /* Adjust the accumulator values according to A and M after GSI, and update
669 the phi nodes on edge BACK. */
672 adjust_accumulator_values (gimple_stmt_iterator gsi
, tree m
, tree a
, edge back
)
674 tree var
, a_acc_arg
, m_acc_arg
;
677 m
= force_gimple_operand_gsi (&gsi
, m
, true, NULL
, true, GSI_SAME_STMT
);
679 a
= force_gimple_operand_gsi (&gsi
, a
, true, NULL
, true, GSI_SAME_STMT
);
687 if (integer_onep (a
))
690 var
= adjust_return_value_with_ops (MULT_EXPR
, "acc_tmp", m_acc
,
696 a_acc_arg
= update_accumulator_with_ops (PLUS_EXPR
, a_acc
, var
, gsi
);
700 m_acc_arg
= update_accumulator_with_ops (MULT_EXPR
, m_acc
, m
, gsi
);
703 add_successor_phi_arg (back
, a_acc
, a_acc_arg
);
706 add_successor_phi_arg (back
, m_acc
, m_acc_arg
);
709 /* Adjust value of the return at the end of BB according to M and A
713 adjust_return_value (basic_block bb
, tree m
, tree a
)
716 gimple ret_stmt
= gimple_seq_last_stmt (bb_seq (bb
));
717 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
719 gcc_assert (gimple_code (ret_stmt
) == GIMPLE_RETURN
);
721 retval
= gimple_return_retval (ret_stmt
);
722 if (!retval
|| retval
== error_mark_node
)
726 retval
= adjust_return_value_with_ops (MULT_EXPR
, "mul_tmp", m_acc
, retval
,
729 retval
= adjust_return_value_with_ops (PLUS_EXPR
, "acc_tmp", a_acc
, retval
,
731 gimple_return_set_retval (ret_stmt
, retval
);
732 update_stmt (ret_stmt
);
735 /* Subtract COUNT and FREQUENCY from the basic block and it's
738 decrease_profile (basic_block bb
, gcov_type count
, int frequency
)
744 bb
->frequency
-= frequency
;
745 if (bb
->frequency
< 0)
747 if (!single_succ_p (bb
))
749 gcc_assert (!EDGE_COUNT (bb
->succs
));
752 e
= single_succ_edge (bb
);
758 /* Returns true if argument PARAM of the tail recursive call needs to be copied
759 when the call is eliminated. */
762 arg_needs_copy_p (tree param
)
766 if (!is_gimple_reg (param
) || !var_ann (param
))
769 /* Parameters that are only defined but never used need not be copied. */
770 def
= gimple_default_def (cfun
, param
);
777 /* Eliminates tail call described by T. TMP_VARS is a list of
778 temporary variables used to copy the function arguments. */
781 eliminate_tail_call (struct tailcall
*t
)
787 basic_block bb
, first
;
790 gimple_stmt_iterator gsi
;
793 stmt
= orig_stmt
= gsi_stmt (t
->call_gsi
);
794 bb
= gsi_bb (t
->call_gsi
);
796 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
798 fprintf (dump_file
, "Eliminated tail recursion in bb %d : ",
800 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
801 fprintf (dump_file
, "\n");
804 gcc_assert (is_gimple_call (stmt
));
806 first
= single_succ (ENTRY_BLOCK_PTR
);
808 /* Remove the code after call_gsi that will become unreachable. The
809 possibly unreachable code in other blocks is removed later in
813 while (!gsi_end_p (gsi
))
815 gimple t
= gsi_stmt (gsi
);
816 /* Do not remove the return statement, so that redirect_edge_and_branch
817 sees how the block ends. */
818 if (gimple_code (t
) == GIMPLE_RETURN
)
821 gsi_remove (&gsi
, true);
825 /* Number of executions of function has reduced by the tailcall. */
826 e
= single_succ_edge (gsi_bb (t
->call_gsi
));
827 decrease_profile (EXIT_BLOCK_PTR
, e
->count
, EDGE_FREQUENCY (e
));
828 decrease_profile (ENTRY_BLOCK_PTR
, e
->count
, EDGE_FREQUENCY (e
));
829 if (e
->dest
!= EXIT_BLOCK_PTR
)
830 decrease_profile (e
->dest
, e
->count
, EDGE_FREQUENCY (e
));
832 /* Replace the call by a jump to the start of function. */
833 e
= redirect_edge_and_branch (single_succ_edge (gsi_bb (t
->call_gsi
)),
836 PENDING_STMT (e
) = NULL
;
838 /* Add phi node entries for arguments. The ordering of the phi nodes should
839 be the same as the ordering of the arguments. */
840 for (param
= DECL_ARGUMENTS (current_function_decl
),
841 idx
= 0, gsi
= gsi_start_phis (first
);
843 param
= DECL_CHAIN (param
), idx
++)
845 if (!arg_needs_copy_p (param
))
848 arg
= gimple_call_arg (stmt
, idx
);
849 phi
= gsi_stmt (gsi
);
850 gcc_assert (param
== SSA_NAME_VAR (PHI_RESULT (phi
)));
852 add_phi_arg (phi
, arg
, e
, gimple_location (stmt
));
856 /* Update the values of accumulators. */
857 adjust_accumulator_values (t
->call_gsi
, t
->mult
, t
->add
, e
);
859 call
= gsi_stmt (t
->call_gsi
);
860 rslt
= gimple_call_lhs (call
);
861 if (rslt
!= NULL_TREE
)
863 /* Result of the call will no longer be defined. So adjust the
864 SSA_NAME_DEF_STMT accordingly. */
865 SSA_NAME_DEF_STMT (rslt
) = gimple_build_nop ();
868 gsi_remove (&t
->call_gsi
, true);
872 /* Add phi nodes for the virtual operands defined in the function to the
873 header of the loop created by tail recursion elimination.
875 Originally, we used to add phi nodes only for call clobbered variables,
876 as the value of the non-call clobbered ones obviously cannot be used
877 or changed within the recursive call. However, the local variables
878 from multiple calls now share the same location, so the virtual ssa form
879 requires us to say that the location dies on further iterations of the loop,
880 which requires adding phi nodes.
883 add_virtual_phis (void)
885 referenced_var_iterator rvi
;
888 /* The problematic part is that there is no way how to know what
889 to put into phi nodes (there in fact does not have to be such
890 ssa name available). A solution would be to have an artificial
891 use/kill for all virtual operands in EXIT node. Unless we have
892 this, we cannot do much better than to rebuild the ssa form for
893 possibly affected virtual ssa names from scratch. */
895 FOR_EACH_REFERENCED_VAR (cfun
, var
, rvi
)
897 if (!is_gimple_reg (var
) && gimple_default_def (cfun
, var
) != NULL_TREE
)
898 mark_sym_for_renaming (var
);
902 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
903 mark the tailcalls for the sibcall optimization. */
906 optimize_tail_call (struct tailcall
*t
, bool opt_tailcalls
)
908 if (t
->tail_recursion
)
910 eliminate_tail_call (t
);
916 gimple stmt
= gsi_stmt (t
->call_gsi
);
918 gimple_call_set_tail (stmt
, true);
919 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
921 fprintf (dump_file
, "Found tail call ");
922 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
923 fprintf (dump_file
, " in bb %i\n", (gsi_bb (t
->call_gsi
))->index
);
930 /* Creates a tail-call accumulator of the same type as the return type of the
931 current function. LABEL is the name used to creating the temporary
932 variable for the accumulator. The accumulator will be inserted in the
933 phis of a basic block BB with single predecessor with an initial value
934 INIT converted to the current function return type. */
937 create_tailcall_accumulator (const char *label
, basic_block bb
, tree init
)
939 tree ret_type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
940 tree tmp
= create_tmp_reg (ret_type
, label
);
943 add_referenced_var (tmp
);
944 phi
= create_phi_node (tmp
, bb
);
945 /* RET_TYPE can be a float when -ffast-maths is enabled. */
946 add_phi_arg (phi
, fold_convert (ret_type
, init
), single_pred_edge (bb
),
948 return PHI_RESULT (phi
);
951 /* Optimizes tail calls in the function, turning the tail recursion
955 tree_optimize_tail_calls_1 (bool opt_tailcalls
)
958 bool phis_constructed
= false;
959 struct tailcall
*tailcalls
= NULL
, *act
, *next
;
960 bool changed
= false;
961 basic_block first
= single_succ (ENTRY_BLOCK_PTR
);
966 if (!suitable_for_tail_opt_p ())
969 opt_tailcalls
= suitable_for_tail_call_opt_p ();
971 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
973 /* Only traverse the normal exits, i.e. those that end with return
975 stmt
= last_stmt (e
->src
);
978 && gimple_code (stmt
) == GIMPLE_RETURN
)
979 find_tail_calls (e
->src
, &tailcalls
);
982 /* Construct the phi nodes and accumulators if necessary. */
983 a_acc
= m_acc
= NULL_TREE
;
984 for (act
= tailcalls
; act
; act
= act
->next
)
986 if (!act
->tail_recursion
)
989 if (!phis_constructed
)
991 /* Ensure that there is only one predecessor of the block
992 or if there are existing degenerate PHI nodes. */
993 if (!single_pred_p (first
)
994 || !gimple_seq_empty_p (phi_nodes (first
)))
995 first
= split_edge (single_succ_edge (ENTRY_BLOCK_PTR
));
997 /* Copy the args if needed. */
998 for (param
= DECL_ARGUMENTS (current_function_decl
);
1000 param
= DECL_CHAIN (param
))
1001 if (arg_needs_copy_p (param
))
1003 tree name
= gimple_default_def (cfun
, param
);
1004 tree new_name
= make_ssa_name (param
, SSA_NAME_DEF_STMT (name
));
1007 set_default_def (param
, new_name
);
1008 phi
= create_phi_node (name
, first
);
1009 SSA_NAME_DEF_STMT (name
) = phi
;
1010 add_phi_arg (phi
, new_name
, single_pred_edge (first
),
1011 EXPR_LOCATION (param
));
1013 phis_constructed
= true;
1016 if (act
->add
&& !a_acc
)
1017 a_acc
= create_tailcall_accumulator ("add_acc", first
,
1020 if (act
->mult
&& !m_acc
)
1021 m_acc
= create_tailcall_accumulator ("mult_acc", first
,
1027 /* When the tail call elimination using accumulators is performed,
1028 statements adding the accumulated value are inserted at all exits.
1029 This turns all other tail calls to non-tail ones. */
1030 opt_tailcalls
= false;
1033 for (; tailcalls
; tailcalls
= next
)
1035 next
= tailcalls
->next
;
1036 changed
|= optimize_tail_call (tailcalls
, opt_tailcalls
);
1042 /* Modify the remaining return statements. */
1043 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
1045 stmt
= last_stmt (e
->src
);
1048 && gimple_code (stmt
) == GIMPLE_RETURN
)
1049 adjust_return_value (e
->src
, m_acc
, a_acc
);
1054 free_dominance_info (CDI_DOMINATORS
);
1056 if (phis_constructed
)
1057 add_virtual_phis ();
1059 return TODO_cleanup_cfg
| TODO_update_ssa_only_virtuals
;
1064 execute_tail_recursion (void)
1066 return tree_optimize_tail_calls_1 (false);
1070 gate_tail_calls (void)
1072 return flag_optimize_sibling_calls
!= 0 && dbg_cnt (tail_call
);
1076 execute_tail_calls (void)
1078 return tree_optimize_tail_calls_1 (true);
1081 struct gimple_opt_pass pass_tail_recursion
=
1086 gate_tail_calls
, /* gate */
1087 execute_tail_recursion
, /* execute */
1090 0, /* static_pass_number */
1091 TV_NONE
, /* tv_id */
1092 PROP_cfg
| PROP_ssa
, /* properties_required */
1093 0, /* properties_provided */
1094 0, /* properties_destroyed */
1095 0, /* todo_flags_start */
1096 TODO_verify_ssa
/* todo_flags_finish */
1100 struct gimple_opt_pass pass_tail_calls
=
1105 gate_tail_calls
, /* gate */
1106 execute_tail_calls
, /* execute */
1109 0, /* static_pass_number */
1110 TV_NONE
, /* tv_id */
1111 PROP_cfg
| PROP_ssa
, /* properties_required */
1112 0, /* properties_provided */
1113 0, /* properties_destroyed */
1114 0, /* todo_flags_start */
1115 TODO_verify_ssa
/* todo_flags_finish */