1 /* Tail call optimization on trees.
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "fold-const.h"
28 #include "stor-layout.h"
31 #include "hard-reg-set.h"
33 #include "dominance.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-expr.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-ssa.h"
44 #include "tree-phinodes.h"
45 #include "stringpool.h"
46 #include "tree-ssanames.h"
47 #include "tree-into-ssa.h"
50 #include "insn-config.h"
60 #include "gimple-pretty-print.h"
62 #include "tree-pass.h"
63 #include "langhooks.h"
67 #include "common/common-target.h"
69 #include "ipa-utils.h"
71 /* The file implements the tail recursion elimination. It is also used to
72 analyze the tail calls in general, passing the results to the rtl level
73 where they are used for sibcall optimization.
75 In addition to the standard tail recursion elimination, we handle the most
76 trivial cases of making the call tail recursive by creating accumulators.
77 For example the following function
82 return n + sum (n - 1);
99 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
100 when we reach the return x statement, we should return a_acc + x * m_acc
101 instead. They are initially initialized to 0 and 1, respectively,
102 so the semantics of the function is obviously preserved. If we are
103 guaranteed that the value of the accumulator never change, we
104 omit the accumulator.
106 There are three cases how the function may exit. The first one is
107 handled in adjust_return_value, the other two in adjust_accumulator_values
108 (the second case is actually a special case of the third one and we
109 present it separately just for clarity):
111 1) Just return x, where x is not in any of the remaining special shapes.
112 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
114 2) return f (...), where f is the current function, is rewritten in a
115 classical tail-recursion elimination way, into assignment of arguments
116 and jump to the start of the function. Values of the accumulators
119 3) return a + m * f(...), where a and m do not depend on call to f.
120 To preserve the semantics described before we want this to be rewritten
121 in such a way that we finally return
123 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
125 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
126 eliminate the tail call to f. Special cases when the value is just
127 added or just multiplied are obtained by setting a = 0 or m = 1.
129 TODO -- it is possible to do similar tricks for other operations. */
131 /* A structure that describes the tailcall. */
135 /* The iterator pointing to the call statement. */
136 gimple_stmt_iterator call_gsi
;
138 /* True if it is a call to the current function. */
141 /* The return value of the caller is mult * f + add, where f is the return
142 value of the call. */
145 /* Next tailcall in the chain. */
146 struct tailcall
*next
;
149 /* The variables holding the value of multiplicative and additive
151 static tree m_acc
, a_acc
;
153 static bool optimize_tail_call (struct tailcall
*, bool);
154 static void eliminate_tail_call (struct tailcall
*);
156 /* Returns false when the function is not suitable for tail call optimization
157 from some reason (e.g. if it takes variable number of arguments). */
160 suitable_for_tail_opt_p (void)
167 /* Returns false when the function is not suitable for tail call optimization
168 for some reason (e.g. if it takes variable number of arguments).
169 This test must pass in addition to suitable_for_tail_opt_p in order to make
170 tail call discovery happen. */
173 suitable_for_tail_call_opt_p (void)
177 /* alloca (until we have stack slot life analysis) inhibits
178 sibling call optimizations, but not tail recursion. */
179 if (cfun
->calls_alloca
)
182 /* If we are using sjlj exceptions, we may need to add a call to
183 _Unwind_SjLj_Unregister at exit of the function. Which means
184 that we cannot do any sibcall transformations. */
185 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
186 && current_function_has_exception_handlers ())
189 /* Any function that calls setjmp might have longjmp called from
190 any called function. ??? We really should represent this
191 properly in the CFG so that this needn't be special cased. */
192 if (cfun
->calls_setjmp
)
195 /* ??? It is OK if the argument of a function is taken in some cases,
196 but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
197 for (param
= DECL_ARGUMENTS (current_function_decl
);
199 param
= DECL_CHAIN (param
))
200 if (TREE_ADDRESSABLE (param
))
206 /* Checks whether the expression EXPR in stmt AT is independent of the
207 statement pointed to by GSI (in a sense that we already know EXPR's value
208 at GSI). We use the fact that we are only called from the chain of
209 basic blocks that have only single successor. Returns the expression
210 containing the value of EXPR at GSI. */
213 independent_of_stmt_p (tree expr
, gimple at
, gimple_stmt_iterator gsi
)
215 basic_block bb
, call_bb
, at_bb
;
219 if (is_gimple_min_invariant (expr
))
222 if (TREE_CODE (expr
) != SSA_NAME
)
225 /* Mark the blocks in the chain leading to the end. */
226 at_bb
= gimple_bb (at
);
227 call_bb
= gimple_bb (gsi_stmt (gsi
));
228 for (bb
= call_bb
; bb
!= at_bb
; bb
= single_succ (bb
))
234 at
= SSA_NAME_DEF_STMT (expr
);
237 /* The default definition or defined before the chain. */
243 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
244 if (gsi_stmt (gsi
) == at
)
247 if (!gsi_end_p (gsi
))
252 if (gimple_code (at
) != GIMPLE_PHI
)
258 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
263 expr
= PHI_ARG_DEF_FROM_EDGE (at
, e
);
264 if (TREE_CODE (expr
) != SSA_NAME
)
266 /* The value is a constant. */
271 /* Unmark the blocks. */
272 for (bb
= call_bb
; bb
!= at_bb
; bb
= single_succ (bb
))
279 /* Simulates the effect of an assignment STMT on the return value of the tail
280 recursive CALL passed in ASS_VAR. M and A are the multiplicative and the
281 additive factor for the real return value. */
284 process_assignment (gassign
*stmt
, gimple_stmt_iterator call
, tree
*m
,
285 tree
*a
, tree
*ass_var
)
287 tree op0
, op1
= NULL_TREE
, non_ass_var
= NULL_TREE
;
288 tree dest
= gimple_assign_lhs (stmt
);
289 enum tree_code code
= gimple_assign_rhs_code (stmt
);
290 enum gimple_rhs_class rhs_class
= get_gimple_rhs_class (code
);
291 tree src_var
= gimple_assign_rhs1 (stmt
);
293 /* See if this is a simple copy operation of an SSA name to the function
294 result. In that case we may have a simple tail call. Ignore type
295 conversions that can never produce extra code between the function
296 call and the function return. */
297 if ((rhs_class
== GIMPLE_SINGLE_RHS
|| gimple_assign_cast_p (stmt
))
298 && (TREE_CODE (src_var
) == SSA_NAME
))
300 /* Reject a tailcall if the type conversion might need
302 if (gimple_assign_cast_p (stmt
))
304 if (TYPE_MODE (TREE_TYPE (dest
)) != TYPE_MODE (TREE_TYPE (src_var
)))
307 /* Even if the type modes are the same, if the precision of the
308 type is smaller than mode's precision,
309 reduce_to_bit_field_precision would generate additional code. */
310 if (INTEGRAL_TYPE_P (TREE_TYPE (dest
))
311 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (dest
)))
312 > TYPE_PRECISION (TREE_TYPE (dest
))))
316 if (src_var
!= *ass_var
)
325 case GIMPLE_BINARY_RHS
:
326 op1
= gimple_assign_rhs2 (stmt
);
330 case GIMPLE_UNARY_RHS
:
331 op0
= gimple_assign_rhs1 (stmt
);
338 /* Accumulator optimizations will reverse the order of operations.
339 We can only do that for floating-point types if we're assuming
340 that addition and multiplication are associative. */
341 if (!flag_associative_math
)
342 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl
))))
345 if (rhs_class
== GIMPLE_UNARY_RHS
)
347 else if (op0
== *ass_var
348 && (non_ass_var
= independent_of_stmt_p (op1
, stmt
, call
)))
350 else if (op1
== *ass_var
351 && (non_ass_var
= independent_of_stmt_p (op0
, stmt
, call
)))
363 case POINTER_PLUS_EXPR
:
376 *m
= build_minus_one_cst (TREE_TYPE (op0
));
382 *a
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (non_ass_var
), non_ass_var
);
385 *m
= build_minus_one_cst (TREE_TYPE (non_ass_var
));
386 *a
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (non_ass_var
), non_ass_var
);
392 /* TODO -- Handle POINTER_PLUS_EXPR. */
399 /* Propagate VAR through phis on edge E. */
402 propagate_through_phis (tree var
, edge e
)
404 basic_block dest
= e
->dest
;
407 for (gsi
= gsi_start_phis (dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
409 gphi
*phi
= gsi
.phi ();
410 if (PHI_ARG_DEF_FROM_EDGE (phi
, e
) == var
)
411 return PHI_RESULT (phi
);
416 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
417 added to the start of RET. */
420 find_tail_calls (basic_block bb
, struct tailcall
**ret
)
422 tree ass_var
= NULL_TREE
, ret_var
, func
, param
;
425 gimple_stmt_iterator gsi
, agsi
;
434 if (!single_succ_p (bb
))
437 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
439 stmt
= gsi_stmt (gsi
);
441 /* Ignore labels, returns, clobbers and debug stmts. */
442 if (gimple_code (stmt
) == GIMPLE_LABEL
443 || gimple_code (stmt
) == GIMPLE_RETURN
444 || gimple_clobber_p (stmt
)
445 || is_gimple_debug (stmt
))
448 /* Check for a call. */
449 if (is_gimple_call (stmt
))
451 call
= as_a
<gcall
*> (stmt
);
452 ass_var
= gimple_call_lhs (call
);
456 /* If the statement references memory or volatile operands, fail. */
457 if (gimple_references_memory_p (stmt
)
458 || gimple_has_volatile_ops (stmt
))
465 /* Recurse to the predecessors. */
466 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
467 find_tail_calls (e
->src
, ret
);
472 /* If the LHS of our call is not just a simple register, we can't
473 transform this into a tail or sibling call. This situation happens,
474 in (e.g.) "*p = foo()" where foo returns a struct. In this case
475 we won't have a temporary here, but we need to carry out the side
476 effect anyway, so tailcall is impossible.
478 ??? In some situations (when the struct is returned in memory via
479 invisible argument) we could deal with this, e.g. by passing 'p'
480 itself as that argument to foo, but it's too early to do this here,
481 and expand_call() will not handle it anyway. If it ever can, then
482 we need to revisit this here, to allow that situation. */
483 if (ass_var
&& !is_gimple_reg (ass_var
))
486 /* We found the call, check whether it is suitable. */
487 tail_recursion
= false;
488 func
= gimple_call_fndecl (call
);
490 && !DECL_BUILT_IN (func
)
491 && recursive_call_p (current_function_decl
, func
))
495 for (param
= DECL_ARGUMENTS (func
), idx
= 0;
496 param
&& idx
< gimple_call_num_args (call
);
497 param
= DECL_CHAIN (param
), idx
++)
499 arg
= gimple_call_arg (call
, idx
);
502 /* Make sure there are no problems with copying. The parameter
503 have a copyable type and the two arguments must have reasonably
504 equivalent types. The latter requirement could be relaxed if
505 we emitted a suitable type conversion statement. */
506 if (!is_gimple_reg_type (TREE_TYPE (param
))
507 || !useless_type_conversion_p (TREE_TYPE (param
),
511 /* The parameter should be a real operand, so that phi node
512 created for it at the start of the function has the meaning
513 of copying the value. This test implies is_gimple_reg_type
514 from the previous condition, however this one could be
515 relaxed by being more careful with copying the new value
516 of the parameter (emitting appropriate GIMPLE_ASSIGN and
517 updating the virtual operands). */
518 if (!is_gimple_reg (param
))
522 if (idx
== gimple_call_num_args (call
) && !param
)
523 tail_recursion
= true;
526 /* Make sure the tail invocation of this function does not refer
527 to local variables. */
528 FOR_EACH_LOCAL_DECL (cfun
, idx
, var
)
530 if (TREE_CODE (var
) != PARM_DECL
531 && auto_var_in_fn_p (var
, cfun
->decl
)
532 && (ref_maybe_used_by_stmt_p (call
, var
)
533 || call_may_clobber_ref_p (call
, var
)))
537 /* Now check the statements after the call. None of them has virtual
538 operands, so they may only depend on the call through its return
539 value. The return value should also be dependent on each of them,
540 since we are running after dce. */
548 tree tmp_a
= NULL_TREE
;
549 tree tmp_m
= NULL_TREE
;
552 while (gsi_end_p (agsi
))
554 ass_var
= propagate_through_phis (ass_var
, single_succ_edge (abb
));
555 abb
= single_succ (abb
);
556 agsi
= gsi_start_bb (abb
);
559 stmt
= gsi_stmt (agsi
);
561 if (gimple_code (stmt
) == GIMPLE_LABEL
)
564 if (gimple_code (stmt
) == GIMPLE_RETURN
)
567 if (gimple_clobber_p (stmt
))
570 if (is_gimple_debug (stmt
))
573 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
576 /* This is a gimple assign. */
577 if (! process_assignment (as_a
<gassign
*> (stmt
), gsi
, &tmp_m
,
583 tree type
= TREE_TYPE (tmp_a
);
585 a
= fold_build2 (PLUS_EXPR
, type
, fold_convert (type
, a
), tmp_a
);
591 tree type
= TREE_TYPE (tmp_m
);
593 m
= fold_build2 (MULT_EXPR
, type
, fold_convert (type
, m
), tmp_m
);
598 a
= fold_build2 (MULT_EXPR
, type
, fold_convert (type
, a
), tmp_m
);
602 /* See if this is a tail call we can handle. */
603 ret_var
= gimple_return_retval (as_a
<greturn
*> (stmt
));
605 /* We may proceed if there either is no return value, or the return value
606 is identical to the call's return. */
608 && (ret_var
!= ass_var
))
611 /* If this is not a tail recursive call, we cannot handle addends or
613 if (!tail_recursion
&& (m
|| a
))
616 /* For pointers only allow additions. */
617 if (m
&& POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl
))))
620 nw
= XNEW (struct tailcall
);
624 nw
->tail_recursion
= tail_recursion
;
633 /* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E. */
636 add_successor_phi_arg (edge e
, tree var
, tree phi_arg
)
640 for (gsi
= gsi_start_phis (e
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
641 if (PHI_RESULT (gsi
.phi ()) == var
)
644 gcc_assert (!gsi_end_p (gsi
));
645 add_phi_arg (gsi
.phi (), phi_arg
, e
, UNKNOWN_LOCATION
);
648 /* Creates a GIMPLE statement which computes the operation specified by
649 CODE, ACC and OP1 to a new variable with name LABEL and inserts the
650 statement in the position specified by GSI. Returns the
651 tree node of the statement's result. */
654 adjust_return_value_with_ops (enum tree_code code
, const char *label
,
655 tree acc
, tree op1
, gimple_stmt_iterator gsi
)
658 tree ret_type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
659 tree result
= make_temp_ssa_name (ret_type
, NULL
, label
);
662 if (POINTER_TYPE_P (ret_type
))
664 gcc_assert (code
== PLUS_EXPR
&& TREE_TYPE (acc
) == sizetype
);
665 code
= POINTER_PLUS_EXPR
;
667 if (types_compatible_p (TREE_TYPE (acc
), TREE_TYPE (op1
))
668 && code
!= POINTER_PLUS_EXPR
)
669 stmt
= gimple_build_assign (result
, code
, acc
, op1
);
673 if (code
== POINTER_PLUS_EXPR
)
674 tem
= fold_build2 (code
, TREE_TYPE (op1
), op1
, acc
);
676 tem
= fold_build2 (code
, TREE_TYPE (op1
),
677 fold_convert (TREE_TYPE (op1
), acc
), op1
);
678 tree rhs
= fold_convert (ret_type
, tem
);
679 rhs
= force_gimple_operand_gsi (&gsi
, rhs
,
680 false, NULL
, true, GSI_SAME_STMT
);
681 stmt
= gimple_build_assign (result
, rhs
);
684 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
688 /* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
689 the computation specified by CODE and OP1 and insert the statement
690 at the position specified by GSI as a new statement. Returns new SSA name
691 of updated accumulator. */
694 update_accumulator_with_ops (enum tree_code code
, tree acc
, tree op1
,
695 gimple_stmt_iterator gsi
)
698 tree var
= copy_ssa_name (acc
);
699 if (types_compatible_p (TREE_TYPE (acc
), TREE_TYPE (op1
)))
700 stmt
= gimple_build_assign (var
, code
, acc
, op1
);
703 tree rhs
= fold_convert (TREE_TYPE (acc
),
706 fold_convert (TREE_TYPE (op1
), acc
),
708 rhs
= force_gimple_operand_gsi (&gsi
, rhs
,
709 false, NULL
, false, GSI_CONTINUE_LINKING
);
710 stmt
= gimple_build_assign (var
, rhs
);
712 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
716 /* Adjust the accumulator values according to A and M after GSI, and update
717 the phi nodes on edge BACK. */
720 adjust_accumulator_values (gimple_stmt_iterator gsi
, tree m
, tree a
, edge back
)
722 tree var
, a_acc_arg
, m_acc_arg
;
725 m
= force_gimple_operand_gsi (&gsi
, m
, true, NULL
, true, GSI_SAME_STMT
);
727 a
= force_gimple_operand_gsi (&gsi
, a
, true, NULL
, true, GSI_SAME_STMT
);
735 if (integer_onep (a
))
738 var
= adjust_return_value_with_ops (MULT_EXPR
, "acc_tmp", m_acc
,
744 a_acc_arg
= update_accumulator_with_ops (PLUS_EXPR
, a_acc
, var
, gsi
);
748 m_acc_arg
= update_accumulator_with_ops (MULT_EXPR
, m_acc
, m
, gsi
);
751 add_successor_phi_arg (back
, a_acc
, a_acc_arg
);
754 add_successor_phi_arg (back
, m_acc
, m_acc_arg
);
757 /* Adjust value of the return at the end of BB according to M and A
761 adjust_return_value (basic_block bb
, tree m
, tree a
)
764 greturn
*ret_stmt
= as_a
<greturn
*> (gimple_seq_last_stmt (bb_seq (bb
)));
765 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
767 gcc_assert (gimple_code (ret_stmt
) == GIMPLE_RETURN
);
769 retval
= gimple_return_retval (ret_stmt
);
770 if (!retval
|| retval
== error_mark_node
)
774 retval
= adjust_return_value_with_ops (MULT_EXPR
, "mul_tmp", m_acc
, retval
,
777 retval
= adjust_return_value_with_ops (PLUS_EXPR
, "acc_tmp", a_acc
, retval
,
779 gimple_return_set_retval (ret_stmt
, retval
);
780 update_stmt (ret_stmt
);
783 /* Subtract COUNT and FREQUENCY from the basic block and it's
786 decrease_profile (basic_block bb
, gcov_type count
, int frequency
)
792 bb
->frequency
-= frequency
;
793 if (bb
->frequency
< 0)
795 if (!single_succ_p (bb
))
797 gcc_assert (!EDGE_COUNT (bb
->succs
));
800 e
= single_succ_edge (bb
);
806 /* Returns true if argument PARAM of the tail recursive call needs to be copied
807 when the call is eliminated. */
810 arg_needs_copy_p (tree param
)
814 if (!is_gimple_reg (param
))
817 /* Parameters that are only defined but never used need not be copied. */
818 def
= ssa_default_def (cfun
, param
);
825 /* Eliminates tail call described by T. TMP_VARS is a list of
826 temporary variables used to copy the function arguments. */
829 eliminate_tail_call (struct tailcall
*t
)
835 basic_block bb
, first
;
839 gimple_stmt_iterator gsi
;
842 stmt
= orig_stmt
= gsi_stmt (t
->call_gsi
);
843 bb
= gsi_bb (t
->call_gsi
);
845 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
847 fprintf (dump_file
, "Eliminated tail recursion in bb %d : ",
849 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
850 fprintf (dump_file
, "\n");
853 gcc_assert (is_gimple_call (stmt
));
855 first
= single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
857 /* Remove the code after call_gsi that will become unreachable. The
858 possibly unreachable code in other blocks is removed later in
862 while (!gsi_end_p (gsi
))
864 gimple t
= gsi_stmt (gsi
);
865 /* Do not remove the return statement, so that redirect_edge_and_branch
866 sees how the block ends. */
867 if (gimple_code (t
) == GIMPLE_RETURN
)
870 gsi_remove (&gsi
, true);
874 /* Number of executions of function has reduced by the tailcall. */
875 e
= single_succ_edge (gsi_bb (t
->call_gsi
));
876 decrease_profile (EXIT_BLOCK_PTR_FOR_FN (cfun
), e
->count
, EDGE_FREQUENCY (e
));
877 decrease_profile (ENTRY_BLOCK_PTR_FOR_FN (cfun
), e
->count
,
879 if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
880 decrease_profile (e
->dest
, e
->count
, EDGE_FREQUENCY (e
));
882 /* Replace the call by a jump to the start of function. */
883 e
= redirect_edge_and_branch (single_succ_edge (gsi_bb (t
->call_gsi
)),
886 PENDING_STMT (e
) = NULL
;
888 /* Add phi node entries for arguments. The ordering of the phi nodes should
889 be the same as the ordering of the arguments. */
890 for (param
= DECL_ARGUMENTS (current_function_decl
),
891 idx
= 0, gpi
= gsi_start_phis (first
);
893 param
= DECL_CHAIN (param
), idx
++)
895 if (!arg_needs_copy_p (param
))
898 arg
= gimple_call_arg (stmt
, idx
);
900 gcc_assert (param
== SSA_NAME_VAR (PHI_RESULT (phi
)));
902 add_phi_arg (phi
, arg
, e
, gimple_location (stmt
));
906 /* Update the values of accumulators. */
907 adjust_accumulator_values (t
->call_gsi
, t
->mult
, t
->add
, e
);
909 call
= gsi_stmt (t
->call_gsi
);
910 rslt
= gimple_call_lhs (call
);
911 if (rslt
!= NULL_TREE
)
913 /* Result of the call will no longer be defined. So adjust the
914 SSA_NAME_DEF_STMT accordingly. */
915 SSA_NAME_DEF_STMT (rslt
) = gimple_build_nop ();
918 gsi_remove (&t
->call_gsi
, true);
922 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
923 mark the tailcalls for the sibcall optimization. */
926 optimize_tail_call (struct tailcall
*t
, bool opt_tailcalls
)
928 if (t
->tail_recursion
)
930 eliminate_tail_call (t
);
936 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (t
->call_gsi
));
938 gimple_call_set_tail (stmt
, true);
939 cfun
->tail_call_marked
= true;
940 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
942 fprintf (dump_file
, "Found tail call ");
943 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
944 fprintf (dump_file
, " in bb %i\n", (gsi_bb (t
->call_gsi
))->index
);
951 /* Creates a tail-call accumulator of the same type as the return type of the
952 current function. LABEL is the name used to creating the temporary
953 variable for the accumulator. The accumulator will be inserted in the
954 phis of a basic block BB with single predecessor with an initial value
955 INIT converted to the current function return type. */
958 create_tailcall_accumulator (const char *label
, basic_block bb
, tree init
)
960 tree ret_type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
961 if (POINTER_TYPE_P (ret_type
))
964 tree tmp
= make_temp_ssa_name (ret_type
, NULL
, label
);
967 phi
= create_phi_node (tmp
, bb
);
968 /* RET_TYPE can be a float when -ffast-maths is enabled. */
969 add_phi_arg (phi
, fold_convert (ret_type
, init
), single_pred_edge (bb
),
971 return PHI_RESULT (phi
);
974 /* Optimizes tail calls in the function, turning the tail recursion
978 tree_optimize_tail_calls_1 (bool opt_tailcalls
)
981 bool phis_constructed
= false;
982 struct tailcall
*tailcalls
= NULL
, *act
, *next
;
983 bool changed
= false;
984 basic_block first
= single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
989 if (!suitable_for_tail_opt_p ())
992 opt_tailcalls
= suitable_for_tail_call_opt_p ();
994 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
996 /* Only traverse the normal exits, i.e. those that end with return
998 stmt
= last_stmt (e
->src
);
1001 && gimple_code (stmt
) == GIMPLE_RETURN
)
1002 find_tail_calls (e
->src
, &tailcalls
);
1005 /* Construct the phi nodes and accumulators if necessary. */
1006 a_acc
= m_acc
= NULL_TREE
;
1007 for (act
= tailcalls
; act
; act
= act
->next
)
1009 if (!act
->tail_recursion
)
1012 if (!phis_constructed
)
1014 /* Ensure that there is only one predecessor of the block
1015 or if there are existing degenerate PHI nodes. */
1016 if (!single_pred_p (first
)
1017 || !gimple_seq_empty_p (phi_nodes (first
)))
1019 split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
1021 /* Copy the args if needed. */
1022 for (param
= DECL_ARGUMENTS (current_function_decl
);
1024 param
= DECL_CHAIN (param
))
1025 if (arg_needs_copy_p (param
))
1027 tree name
= ssa_default_def (cfun
, param
);
1028 tree new_name
= make_ssa_name (param
, SSA_NAME_DEF_STMT (name
));
1031 set_ssa_default_def (cfun
, param
, new_name
);
1032 phi
= create_phi_node (name
, first
);
1033 add_phi_arg (phi
, new_name
, single_pred_edge (first
),
1034 EXPR_LOCATION (param
));
1036 phis_constructed
= true;
1039 if (act
->add
&& !a_acc
)
1040 a_acc
= create_tailcall_accumulator ("add_acc", first
,
1043 if (act
->mult
&& !m_acc
)
1044 m_acc
= create_tailcall_accumulator ("mult_acc", first
,
1050 /* When the tail call elimination using accumulators is performed,
1051 statements adding the accumulated value are inserted at all exits.
1052 This turns all other tail calls to non-tail ones. */
1053 opt_tailcalls
= false;
1056 for (; tailcalls
; tailcalls
= next
)
1058 next
= tailcalls
->next
;
1059 changed
|= optimize_tail_call (tailcalls
, opt_tailcalls
);
1065 /* Modify the remaining return statements. */
1066 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
1068 stmt
= last_stmt (e
->src
);
1071 && gimple_code (stmt
) == GIMPLE_RETURN
)
1072 adjust_return_value (e
->src
, m_acc
, a_acc
);
1078 /* We may have created new loops. Make them magically appear. */
1079 loops_state_set (LOOPS_NEED_FIXUP
);
1080 free_dominance_info (CDI_DOMINATORS
);
1083 /* Add phi nodes for the virtual operands defined in the function to the
1084 header of the loop created by tail recursion elimination. Do so
1085 by triggering the SSA renamer. */
1086 if (phis_constructed
)
1087 mark_virtual_operands_for_renaming (cfun
);
1090 return TODO_cleanup_cfg
| TODO_update_ssa_only_virtuals
;
1095 gate_tail_calls (void)
1097 return flag_optimize_sibling_calls
!= 0 && dbg_cnt (tail_call
);
1101 execute_tail_calls (void)
1103 return tree_optimize_tail_calls_1 (true);
1108 const pass_data pass_data_tail_recursion
=
1110 GIMPLE_PASS
, /* type */
1112 OPTGROUP_NONE
, /* optinfo_flags */
1113 TV_NONE
, /* tv_id */
1114 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1115 0, /* properties_provided */
1116 0, /* properties_destroyed */
1117 0, /* todo_flags_start */
1118 0, /* todo_flags_finish */
1121 class pass_tail_recursion
: public gimple_opt_pass
1124 pass_tail_recursion (gcc::context
*ctxt
)
1125 : gimple_opt_pass (pass_data_tail_recursion
, ctxt
)
1128 /* opt_pass methods: */
1129 opt_pass
* clone () { return new pass_tail_recursion (m_ctxt
); }
1130 virtual bool gate (function
*) { return gate_tail_calls (); }
1131 virtual unsigned int execute (function
*)
1133 return tree_optimize_tail_calls_1 (false);
1136 }; // class pass_tail_recursion
1141 make_pass_tail_recursion (gcc::context
*ctxt
)
1143 return new pass_tail_recursion (ctxt
);
1148 const pass_data pass_data_tail_calls
=
1150 GIMPLE_PASS
, /* type */
1152 OPTGROUP_NONE
, /* optinfo_flags */
1153 TV_NONE
, /* tv_id */
1154 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1155 0, /* properties_provided */
1156 0, /* properties_destroyed */
1157 0, /* todo_flags_start */
1158 0, /* todo_flags_finish */
1161 class pass_tail_calls
: public gimple_opt_pass
1164 pass_tail_calls (gcc::context
*ctxt
)
1165 : gimple_opt_pass (pass_data_tail_calls
, ctxt
)
1168 /* opt_pass methods: */
1169 virtual bool gate (function
*) { return gate_tail_calls (); }
1170 virtual unsigned int execute (function
*) { return execute_tail_calls (); }
1172 }; // class pass_tail_calls
1177 make_pass_tail_calls (gcc::context
*ctxt
)
1179 return new pass_tail_calls (ctxt
);