1 /* Tail call optimization on trees.
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "double-int.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
38 #include "hard-reg-set.h"
40 #include "dominance.h"
42 #include "basic-block.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-expr.h"
48 #include "gimple-iterator.h"
49 #include "gimplify-me.h"
50 #include "gimple-ssa.h"
52 #include "tree-phinodes.h"
53 #include "stringpool.h"
54 #include "tree-ssanames.h"
55 #include "tree-into-ssa.h"
59 #include "statistics.h"
61 #include "fixed-value.h"
62 #include "insn-config.h"
72 #include "gimple-pretty-print.h"
74 #include "tree-pass.h"
75 #include "langhooks.h"
79 #include "common/common-target.h"
81 #include "plugin-api.h"
84 #include "ipa-utils.h"
86 /* The file implements the tail recursion elimination. It is also used to
87 analyze the tail calls in general, passing the results to the rtl level
88 where they are used for sibcall optimization.
90 In addition to the standard tail recursion elimination, we handle the most
91 trivial cases of making the call tail recursive by creating accumulators.
92 For example the following function
97 return n + sum (n - 1);
114 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
115 when we reach the return x statement, we should return a_acc + x * m_acc
116 instead. They are initially initialized to 0 and 1, respectively,
117 so the semantics of the function is obviously preserved. If we are
118 guaranteed that the value of the accumulator never change, we
119 omit the accumulator.
121 There are three cases how the function may exit. The first one is
122 handled in adjust_return_value, the other two in adjust_accumulator_values
123 (the second case is actually a special case of the third one and we
124 present it separately just for clarity):
126 1) Just return x, where x is not in any of the remaining special shapes.
127 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
129 2) return f (...), where f is the current function, is rewritten in a
130 classical tail-recursion elimination way, into assignment of arguments
131 and jump to the start of the function. Values of the accumulators
134 3) return a + m * f(...), where a and m do not depend on call to f.
135 To preserve the semantics described before we want this to be rewritten
136 in such a way that we finally return
138 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
140 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
141 eliminate the tail call to f. Special cases when the value is just
142 added or just multiplied are obtained by setting a = 0 or m = 1.
144 TODO -- it is possible to do similar tricks for other operations. */
146 /* A structure that describes the tailcall. */
150 /* The iterator pointing to the call statement. */
151 gimple_stmt_iterator call_gsi
;
153 /* True if it is a call to the current function. */
156 /* The return value of the caller is mult * f + add, where f is the return
157 value of the call. */
160 /* Next tailcall in the chain. */
161 struct tailcall
*next
;
164 /* The variables holding the value of multiplicative and additive
166 static tree m_acc
, a_acc
;
168 static bool suitable_for_tail_opt_p (void);
169 static bool optimize_tail_call (struct tailcall
*, bool);
170 static void eliminate_tail_call (struct tailcall
*);
171 static void find_tail_calls (basic_block
, struct tailcall
**);
173 /* Returns false when the function is not suitable for tail call optimization
174 from some reason (e.g. if it takes variable number of arguments). */
177 suitable_for_tail_opt_p (void)
184 /* Returns false when the function is not suitable for tail call optimization
185 from some reason (e.g. if it takes variable number of arguments).
186 This test must pass in addition to suitable_for_tail_opt_p in order to make
187 tail call discovery happen. */
190 suitable_for_tail_call_opt_p (void)
194 /* alloca (until we have stack slot life analysis) inhibits
195 sibling call optimizations, but not tail recursion. */
196 if (cfun
->calls_alloca
)
199 /* If we are using sjlj exceptions, we may need to add a call to
200 _Unwind_SjLj_Unregister at exit of the function. Which means
201 that we cannot do any sibcall transformations. */
202 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
203 && current_function_has_exception_handlers ())
206 /* Any function that calls setjmp might have longjmp called from
207 any called function. ??? We really should represent this
208 properly in the CFG so that this needn't be special cased. */
209 if (cfun
->calls_setjmp
)
212 /* ??? It is OK if the argument of a function is taken in some cases,
213 but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
214 for (param
= DECL_ARGUMENTS (current_function_decl
);
216 param
= DECL_CHAIN (param
))
217 if (TREE_ADDRESSABLE (param
))
223 /* Checks whether the expression EXPR in stmt AT is independent of the
224 statement pointed to by GSI (in a sense that we already know EXPR's value
225 at GSI). We use the fact that we are only called from the chain of
226 basic blocks that have only single successor. Returns the expression
227 containing the value of EXPR at GSI. */
230 independent_of_stmt_p (tree expr
, gimple at
, gimple_stmt_iterator gsi
)
232 basic_block bb
, call_bb
, at_bb
;
236 if (is_gimple_min_invariant (expr
))
239 if (TREE_CODE (expr
) != SSA_NAME
)
242 /* Mark the blocks in the chain leading to the end. */
243 at_bb
= gimple_bb (at
);
244 call_bb
= gimple_bb (gsi_stmt (gsi
));
245 for (bb
= call_bb
; bb
!= at_bb
; bb
= single_succ (bb
))
251 at
= SSA_NAME_DEF_STMT (expr
);
254 /* The default definition or defined before the chain. */
260 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
261 if (gsi_stmt (gsi
) == at
)
264 if (!gsi_end_p (gsi
))
269 if (gimple_code (at
) != GIMPLE_PHI
)
275 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
280 expr
= PHI_ARG_DEF_FROM_EDGE (at
, e
);
281 if (TREE_CODE (expr
) != SSA_NAME
)
283 /* The value is a constant. */
288 /* Unmark the blocks. */
289 for (bb
= call_bb
; bb
!= at_bb
; bb
= single_succ (bb
))
296 /* Simulates the effect of an assignment STMT on the return value of the tail
297 recursive CALL passed in ASS_VAR. M and A are the multiplicative and the
298 additive factor for the real return value. */
301 process_assignment (gassign
*stmt
, gimple_stmt_iterator call
, tree
*m
,
302 tree
*a
, tree
*ass_var
)
304 tree op0
, op1
= NULL_TREE
, non_ass_var
= NULL_TREE
;
305 tree dest
= gimple_assign_lhs (stmt
);
306 enum tree_code code
= gimple_assign_rhs_code (stmt
);
307 enum gimple_rhs_class rhs_class
= get_gimple_rhs_class (code
);
308 tree src_var
= gimple_assign_rhs1 (stmt
);
310 /* See if this is a simple copy operation of an SSA name to the function
311 result. In that case we may have a simple tail call. Ignore type
312 conversions that can never produce extra code between the function
313 call and the function return. */
314 if ((rhs_class
== GIMPLE_SINGLE_RHS
|| gimple_assign_cast_p (stmt
))
315 && (TREE_CODE (src_var
) == SSA_NAME
))
317 /* Reject a tailcall if the type conversion might need
319 if (gimple_assign_cast_p (stmt
))
321 if (TYPE_MODE (TREE_TYPE (dest
)) != TYPE_MODE (TREE_TYPE (src_var
)))
324 /* Even if the type modes are the same, if the precision of the
325 type is smaller than mode's precision,
326 reduce_to_bit_field_precision would generate additional code. */
327 if (INTEGRAL_TYPE_P (TREE_TYPE (dest
))
328 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (dest
)))
329 > TYPE_PRECISION (TREE_TYPE (dest
))))
333 if (src_var
!= *ass_var
)
342 case GIMPLE_BINARY_RHS
:
343 op1
= gimple_assign_rhs2 (stmt
);
347 case GIMPLE_UNARY_RHS
:
348 op0
= gimple_assign_rhs1 (stmt
);
355 /* Accumulator optimizations will reverse the order of operations.
356 We can only do that for floating-point types if we're assuming
357 that addition and multiplication are associative. */
358 if (!flag_associative_math
)
359 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl
))))
362 if (rhs_class
== GIMPLE_UNARY_RHS
)
364 else if (op0
== *ass_var
365 && (non_ass_var
= independent_of_stmt_p (op1
, stmt
, call
)))
367 else if (op1
== *ass_var
368 && (non_ass_var
= independent_of_stmt_p (op0
, stmt
, call
)))
380 case POINTER_PLUS_EXPR
:
393 *m
= build_minus_one_cst (TREE_TYPE (op0
));
399 *a
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (non_ass_var
), non_ass_var
);
402 *m
= build_minus_one_cst (TREE_TYPE (non_ass_var
));
403 *a
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (non_ass_var
), non_ass_var
);
409 /* TODO -- Handle POINTER_PLUS_EXPR. */
416 /* Propagate VAR through phis on edge E. */
419 propagate_through_phis (tree var
, edge e
)
421 basic_block dest
= e
->dest
;
424 for (gsi
= gsi_start_phis (dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
426 gphi
*phi
= gsi
.phi ();
427 if (PHI_ARG_DEF_FROM_EDGE (phi
, e
) == var
)
428 return PHI_RESULT (phi
);
433 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
434 added to the start of RET. */
437 find_tail_calls (basic_block bb
, struct tailcall
**ret
)
439 tree ass_var
= NULL_TREE
, ret_var
, func
, param
;
442 gimple_stmt_iterator gsi
, agsi
;
451 if (!single_succ_p (bb
))
454 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
456 stmt
= gsi_stmt (gsi
);
458 /* Ignore labels, returns, clobbers and debug stmts. */
459 if (gimple_code (stmt
) == GIMPLE_LABEL
460 || gimple_code (stmt
) == GIMPLE_RETURN
461 || gimple_clobber_p (stmt
)
462 || is_gimple_debug (stmt
))
465 /* Check for a call. */
466 if (is_gimple_call (stmt
))
468 call
= as_a
<gcall
*> (stmt
);
469 ass_var
= gimple_call_lhs (call
);
473 /* If the statement references memory or volatile operands, fail. */
474 if (gimple_references_memory_p (stmt
)
475 || gimple_has_volatile_ops (stmt
))
482 /* Recurse to the predecessors. */
483 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
484 find_tail_calls (e
->src
, ret
);
489 /* If the LHS of our call is not just a simple register, we can't
490 transform this into a tail or sibling call. This situation happens,
491 in (e.g.) "*p = foo()" where foo returns a struct. In this case
492 we won't have a temporary here, but we need to carry out the side
493 effect anyway, so tailcall is impossible.
495 ??? In some situations (when the struct is returned in memory via
496 invisible argument) we could deal with this, e.g. by passing 'p'
497 itself as that argument to foo, but it's too early to do this here,
498 and expand_call() will not handle it anyway. If it ever can, then
499 we need to revisit this here, to allow that situation. */
500 if (ass_var
&& !is_gimple_reg (ass_var
))
503 /* We found the call, check whether it is suitable. */
504 tail_recursion
= false;
505 func
= gimple_call_fndecl (call
);
507 && !DECL_BUILT_IN (func
)
508 && recursive_call_p (current_function_decl
, func
))
512 for (param
= DECL_ARGUMENTS (func
), idx
= 0;
513 param
&& idx
< gimple_call_num_args (call
);
514 param
= DECL_CHAIN (param
), idx
++)
516 arg
= gimple_call_arg (call
, idx
);
519 /* Make sure there are no problems with copying. The parameter
520 have a copyable type and the two arguments must have reasonably
521 equivalent types. The latter requirement could be relaxed if
522 we emitted a suitable type conversion statement. */
523 if (!is_gimple_reg_type (TREE_TYPE (param
))
524 || !useless_type_conversion_p (TREE_TYPE (param
),
528 /* The parameter should be a real operand, so that phi node
529 created for it at the start of the function has the meaning
530 of copying the value. This test implies is_gimple_reg_type
531 from the previous condition, however this one could be
532 relaxed by being more careful with copying the new value
533 of the parameter (emitting appropriate GIMPLE_ASSIGN and
534 updating the virtual operands). */
535 if (!is_gimple_reg (param
))
539 if (idx
== gimple_call_num_args (call
) && !param
)
540 tail_recursion
= true;
543 /* Make sure the tail invocation of this function does not refer
544 to local variables. */
545 FOR_EACH_LOCAL_DECL (cfun
, idx
, var
)
547 if (TREE_CODE (var
) != PARM_DECL
548 && auto_var_in_fn_p (var
, cfun
->decl
)
549 && (ref_maybe_used_by_stmt_p (call
, var
)
550 || call_may_clobber_ref_p (call
, var
)))
554 /* Now check the statements after the call. None of them has virtual
555 operands, so they may only depend on the call through its return
556 value. The return value should also be dependent on each of them,
557 since we are running after dce. */
565 tree tmp_a
= NULL_TREE
;
566 tree tmp_m
= NULL_TREE
;
569 while (gsi_end_p (agsi
))
571 ass_var
= propagate_through_phis (ass_var
, single_succ_edge (abb
));
572 abb
= single_succ (abb
);
573 agsi
= gsi_start_bb (abb
);
576 stmt
= gsi_stmt (agsi
);
578 if (gimple_code (stmt
) == GIMPLE_LABEL
)
581 if (gimple_code (stmt
) == GIMPLE_RETURN
)
584 if (gimple_clobber_p (stmt
))
587 if (is_gimple_debug (stmt
))
590 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
593 /* This is a gimple assign. */
594 if (! process_assignment (as_a
<gassign
*> (stmt
), gsi
, &tmp_m
,
600 tree type
= TREE_TYPE (tmp_a
);
602 a
= fold_build2 (PLUS_EXPR
, type
, fold_convert (type
, a
), tmp_a
);
608 tree type
= TREE_TYPE (tmp_m
);
610 m
= fold_build2 (MULT_EXPR
, type
, fold_convert (type
, m
), tmp_m
);
615 a
= fold_build2 (MULT_EXPR
, type
, fold_convert (type
, a
), tmp_m
);
619 /* See if this is a tail call we can handle. */
620 ret_var
= gimple_return_retval (as_a
<greturn
*> (stmt
));
622 /* We may proceed if there either is no return value, or the return value
623 is identical to the call's return. */
625 && (ret_var
!= ass_var
))
628 /* If this is not a tail recursive call, we cannot handle addends or
630 if (!tail_recursion
&& (m
|| a
))
633 /* For pointers only allow additions. */
634 if (m
&& POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl
))))
637 nw
= XNEW (struct tailcall
);
641 nw
->tail_recursion
= tail_recursion
;
650 /* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E. */
653 add_successor_phi_arg (edge e
, tree var
, tree phi_arg
)
657 for (gsi
= gsi_start_phis (e
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
658 if (PHI_RESULT (gsi
.phi ()) == var
)
661 gcc_assert (!gsi_end_p (gsi
));
662 add_phi_arg (gsi
.phi (), phi_arg
, e
, UNKNOWN_LOCATION
);
665 /* Creates a GIMPLE statement which computes the operation specified by
666 CODE, ACC and OP1 to a new variable with name LABEL and inserts the
667 statement in the position specified by GSI. Returns the
668 tree node of the statement's result. */
671 adjust_return_value_with_ops (enum tree_code code
, const char *label
,
672 tree acc
, tree op1
, gimple_stmt_iterator gsi
)
675 tree ret_type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
676 tree result
= make_temp_ssa_name (ret_type
, NULL
, label
);
679 if (POINTER_TYPE_P (ret_type
))
681 gcc_assert (code
== PLUS_EXPR
&& TREE_TYPE (acc
) == sizetype
);
682 code
= POINTER_PLUS_EXPR
;
684 if (types_compatible_p (TREE_TYPE (acc
), TREE_TYPE (op1
))
685 && code
!= POINTER_PLUS_EXPR
)
686 stmt
= gimple_build_assign (result
, code
, acc
, op1
);
690 if (code
== POINTER_PLUS_EXPR
)
691 tem
= fold_build2 (code
, TREE_TYPE (op1
), op1
, acc
);
693 tem
= fold_build2 (code
, TREE_TYPE (op1
),
694 fold_convert (TREE_TYPE (op1
), acc
), op1
);
695 tree rhs
= fold_convert (ret_type
, tem
);
696 rhs
= force_gimple_operand_gsi (&gsi
, rhs
,
697 false, NULL
, true, GSI_SAME_STMT
);
698 stmt
= gimple_build_assign (result
, rhs
);
701 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
705 /* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
706 the computation specified by CODE and OP1 and insert the statement
707 at the position specified by GSI as a new statement. Returns new SSA name
708 of updated accumulator. */
711 update_accumulator_with_ops (enum tree_code code
, tree acc
, tree op1
,
712 gimple_stmt_iterator gsi
)
715 tree var
= copy_ssa_name (acc
);
716 if (types_compatible_p (TREE_TYPE (acc
), TREE_TYPE (op1
)))
717 stmt
= gimple_build_assign (var
, code
, acc
, op1
);
720 tree rhs
= fold_convert (TREE_TYPE (acc
),
723 fold_convert (TREE_TYPE (op1
), acc
),
725 rhs
= force_gimple_operand_gsi (&gsi
, rhs
,
726 false, NULL
, false, GSI_CONTINUE_LINKING
);
727 stmt
= gimple_build_assign (var
, rhs
);
729 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
733 /* Adjust the accumulator values according to A and M after GSI, and update
734 the phi nodes on edge BACK. */
737 adjust_accumulator_values (gimple_stmt_iterator gsi
, tree m
, tree a
, edge back
)
739 tree var
, a_acc_arg
, m_acc_arg
;
742 m
= force_gimple_operand_gsi (&gsi
, m
, true, NULL
, true, GSI_SAME_STMT
);
744 a
= force_gimple_operand_gsi (&gsi
, a
, true, NULL
, true, GSI_SAME_STMT
);
752 if (integer_onep (a
))
755 var
= adjust_return_value_with_ops (MULT_EXPR
, "acc_tmp", m_acc
,
761 a_acc_arg
= update_accumulator_with_ops (PLUS_EXPR
, a_acc
, var
, gsi
);
765 m_acc_arg
= update_accumulator_with_ops (MULT_EXPR
, m_acc
, m
, gsi
);
768 add_successor_phi_arg (back
, a_acc
, a_acc_arg
);
771 add_successor_phi_arg (back
, m_acc
, m_acc_arg
);
774 /* Adjust value of the return at the end of BB according to M and A
778 adjust_return_value (basic_block bb
, tree m
, tree a
)
781 greturn
*ret_stmt
= as_a
<greturn
*> (gimple_seq_last_stmt (bb_seq (bb
)));
782 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
784 gcc_assert (gimple_code (ret_stmt
) == GIMPLE_RETURN
);
786 retval
= gimple_return_retval (ret_stmt
);
787 if (!retval
|| retval
== error_mark_node
)
791 retval
= adjust_return_value_with_ops (MULT_EXPR
, "mul_tmp", m_acc
, retval
,
794 retval
= adjust_return_value_with_ops (PLUS_EXPR
, "acc_tmp", a_acc
, retval
,
796 gimple_return_set_retval (ret_stmt
, retval
);
797 update_stmt (ret_stmt
);
800 /* Subtract COUNT and FREQUENCY from the basic block and it's
803 decrease_profile (basic_block bb
, gcov_type count
, int frequency
)
809 bb
->frequency
-= frequency
;
810 if (bb
->frequency
< 0)
812 if (!single_succ_p (bb
))
814 gcc_assert (!EDGE_COUNT (bb
->succs
));
817 e
= single_succ_edge (bb
);
823 /* Returns true if argument PARAM of the tail recursive call needs to be copied
824 when the call is eliminated. */
827 arg_needs_copy_p (tree param
)
831 if (!is_gimple_reg (param
))
834 /* Parameters that are only defined but never used need not be copied. */
835 def
= ssa_default_def (cfun
, param
);
842 /* Eliminates tail call described by T. TMP_VARS is a list of
843 temporary variables used to copy the function arguments. */
846 eliminate_tail_call (struct tailcall
*t
)
852 basic_block bb
, first
;
856 gimple_stmt_iterator gsi
;
859 stmt
= orig_stmt
= gsi_stmt (t
->call_gsi
);
860 bb
= gsi_bb (t
->call_gsi
);
862 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
864 fprintf (dump_file
, "Eliminated tail recursion in bb %d : ",
866 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
867 fprintf (dump_file
, "\n");
870 gcc_assert (is_gimple_call (stmt
));
872 first
= single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
874 /* Remove the code after call_gsi that will become unreachable. The
875 possibly unreachable code in other blocks is removed later in
879 while (!gsi_end_p (gsi
))
881 gimple t
= gsi_stmt (gsi
);
882 /* Do not remove the return statement, so that redirect_edge_and_branch
883 sees how the block ends. */
884 if (gimple_code (t
) == GIMPLE_RETURN
)
887 gsi_remove (&gsi
, true);
891 /* Number of executions of function has reduced by the tailcall. */
892 e
= single_succ_edge (gsi_bb (t
->call_gsi
));
893 decrease_profile (EXIT_BLOCK_PTR_FOR_FN (cfun
), e
->count
, EDGE_FREQUENCY (e
));
894 decrease_profile (ENTRY_BLOCK_PTR_FOR_FN (cfun
), e
->count
,
896 if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
897 decrease_profile (e
->dest
, e
->count
, EDGE_FREQUENCY (e
));
899 /* Replace the call by a jump to the start of function. */
900 e
= redirect_edge_and_branch (single_succ_edge (gsi_bb (t
->call_gsi
)),
903 PENDING_STMT (e
) = NULL
;
905 /* Add phi node entries for arguments. The ordering of the phi nodes should
906 be the same as the ordering of the arguments. */
907 for (param
= DECL_ARGUMENTS (current_function_decl
),
908 idx
= 0, gpi
= gsi_start_phis (first
);
910 param
= DECL_CHAIN (param
), idx
++)
912 if (!arg_needs_copy_p (param
))
915 arg
= gimple_call_arg (stmt
, idx
);
917 gcc_assert (param
== SSA_NAME_VAR (PHI_RESULT (phi
)));
919 add_phi_arg (phi
, arg
, e
, gimple_location (stmt
));
923 /* Update the values of accumulators. */
924 adjust_accumulator_values (t
->call_gsi
, t
->mult
, t
->add
, e
);
926 call
= gsi_stmt (t
->call_gsi
);
927 rslt
= gimple_call_lhs (call
);
928 if (rslt
!= NULL_TREE
)
930 /* Result of the call will no longer be defined. So adjust the
931 SSA_NAME_DEF_STMT accordingly. */
932 SSA_NAME_DEF_STMT (rslt
) = gimple_build_nop ();
935 gsi_remove (&t
->call_gsi
, true);
939 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
940 mark the tailcalls for the sibcall optimization. */
943 optimize_tail_call (struct tailcall
*t
, bool opt_tailcalls
)
945 if (t
->tail_recursion
)
947 eliminate_tail_call (t
);
953 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (t
->call_gsi
));
955 gimple_call_set_tail (stmt
, true);
956 cfun
->tail_call_marked
= true;
957 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
959 fprintf (dump_file
, "Found tail call ");
960 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
961 fprintf (dump_file
, " in bb %i\n", (gsi_bb (t
->call_gsi
))->index
);
968 /* Creates a tail-call accumulator of the same type as the return type of the
969 current function. LABEL is the name used to creating the temporary
970 variable for the accumulator. The accumulator will be inserted in the
971 phis of a basic block BB with single predecessor with an initial value
972 INIT converted to the current function return type. */
975 create_tailcall_accumulator (const char *label
, basic_block bb
, tree init
)
977 tree ret_type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
978 if (POINTER_TYPE_P (ret_type
))
981 tree tmp
= make_temp_ssa_name (ret_type
, NULL
, label
);
984 phi
= create_phi_node (tmp
, bb
);
985 /* RET_TYPE can be a float when -ffast-maths is enabled. */
986 add_phi_arg (phi
, fold_convert (ret_type
, init
), single_pred_edge (bb
),
988 return PHI_RESULT (phi
);
991 /* Optimizes tail calls in the function, turning the tail recursion
995 tree_optimize_tail_calls_1 (bool opt_tailcalls
)
998 bool phis_constructed
= false;
999 struct tailcall
*tailcalls
= NULL
, *act
, *next
;
1000 bool changed
= false;
1001 basic_block first
= single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
1006 if (!suitable_for_tail_opt_p ())
1009 opt_tailcalls
= suitable_for_tail_call_opt_p ();
1011 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
1013 /* Only traverse the normal exits, i.e. those that end with return
1015 stmt
= last_stmt (e
->src
);
1018 && gimple_code (stmt
) == GIMPLE_RETURN
)
1019 find_tail_calls (e
->src
, &tailcalls
);
1022 /* Construct the phi nodes and accumulators if necessary. */
1023 a_acc
= m_acc
= NULL_TREE
;
1024 for (act
= tailcalls
; act
; act
= act
->next
)
1026 if (!act
->tail_recursion
)
1029 if (!phis_constructed
)
1031 /* Ensure that there is only one predecessor of the block
1032 or if there are existing degenerate PHI nodes. */
1033 if (!single_pred_p (first
)
1034 || !gimple_seq_empty_p (phi_nodes (first
)))
1036 split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
1038 /* Copy the args if needed. */
1039 for (param
= DECL_ARGUMENTS (current_function_decl
);
1041 param
= DECL_CHAIN (param
))
1042 if (arg_needs_copy_p (param
))
1044 tree name
= ssa_default_def (cfun
, param
);
1045 tree new_name
= make_ssa_name (param
, SSA_NAME_DEF_STMT (name
));
1048 set_ssa_default_def (cfun
, param
, new_name
);
1049 phi
= create_phi_node (name
, first
);
1050 add_phi_arg (phi
, new_name
, single_pred_edge (first
),
1051 EXPR_LOCATION (param
));
1053 phis_constructed
= true;
1056 if (act
->add
&& !a_acc
)
1057 a_acc
= create_tailcall_accumulator ("add_acc", first
,
1060 if (act
->mult
&& !m_acc
)
1061 m_acc
= create_tailcall_accumulator ("mult_acc", first
,
1067 /* When the tail call elimination using accumulators is performed,
1068 statements adding the accumulated value are inserted at all exits.
1069 This turns all other tail calls to non-tail ones. */
1070 opt_tailcalls
= false;
1073 for (; tailcalls
; tailcalls
= next
)
1075 next
= tailcalls
->next
;
1076 changed
|= optimize_tail_call (tailcalls
, opt_tailcalls
);
1082 /* Modify the remaining return statements. */
1083 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
1085 stmt
= last_stmt (e
->src
);
1088 && gimple_code (stmt
) == GIMPLE_RETURN
)
1089 adjust_return_value (e
->src
, m_acc
, a_acc
);
1095 /* We may have created new loops. Make them magically appear. */
1096 loops_state_set (LOOPS_NEED_FIXUP
);
1097 free_dominance_info (CDI_DOMINATORS
);
1100 /* Add phi nodes for the virtual operands defined in the function to the
1101 header of the loop created by tail recursion elimination. Do so
1102 by triggering the SSA renamer. */
1103 if (phis_constructed
)
1104 mark_virtual_operands_for_renaming (cfun
);
1107 return TODO_cleanup_cfg
| TODO_update_ssa_only_virtuals
;
1112 gate_tail_calls (void)
1114 return flag_optimize_sibling_calls
!= 0 && dbg_cnt (tail_call
);
1118 execute_tail_calls (void)
1120 return tree_optimize_tail_calls_1 (true);
1125 const pass_data pass_data_tail_recursion
=
1127 GIMPLE_PASS
, /* type */
1129 OPTGROUP_NONE
, /* optinfo_flags */
1130 TV_NONE
, /* tv_id */
1131 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1132 0, /* properties_provided */
1133 0, /* properties_destroyed */
1134 0, /* todo_flags_start */
1135 0, /* todo_flags_finish */
1138 class pass_tail_recursion
: public gimple_opt_pass
1141 pass_tail_recursion (gcc::context
*ctxt
)
1142 : gimple_opt_pass (pass_data_tail_recursion
, ctxt
)
1145 /* opt_pass methods: */
1146 opt_pass
* clone () { return new pass_tail_recursion (m_ctxt
); }
1147 virtual bool gate (function
*) { return gate_tail_calls (); }
1148 virtual unsigned int execute (function
*)
1150 return tree_optimize_tail_calls_1 (false);
1153 }; // class pass_tail_recursion
1158 make_pass_tail_recursion (gcc::context
*ctxt
)
1160 return new pass_tail_recursion (ctxt
);
1165 const pass_data pass_data_tail_calls
=
1167 GIMPLE_PASS
, /* type */
1169 OPTGROUP_NONE
, /* optinfo_flags */
1170 TV_NONE
, /* tv_id */
1171 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1172 0, /* properties_provided */
1173 0, /* properties_destroyed */
1174 0, /* todo_flags_start */
1175 0, /* todo_flags_finish */
1178 class pass_tail_calls
: public gimple_opt_pass
1181 pass_tail_calls (gcc::context
*ctxt
)
1182 : gimple_opt_pass (pass_data_tail_calls
, ctxt
)
1185 /* opt_pass methods: */
1186 virtual bool gate (function
*) { return gate_tail_calls (); }
1187 virtual unsigned int execute (function
*) { return execute_tail_calls (); }
1189 }; // class pass_tail_calls
1194 make_pass_tail_calls (gcc::context
*ctxt
)
1196 return new pass_tail_calls (ctxt
);