1 /* Tail call optimization on trees.
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
25 #include "stor-layout.h"
32 #include "hard-reg-set.h"
35 #include "dominance.h"
37 #include "basic-block.h"
38 #include "tree-ssa-alias.h"
39 #include "internal-fn.h"
40 #include "gimple-expr.h"
43 #include "gimple-iterator.h"
44 #include "gimplify-me.h"
45 #include "gimple-ssa.h"
47 #include "tree-phinodes.h"
48 #include "stringpool.h"
49 #include "tree-ssanames.h"
50 #include "tree-into-ssa.h"
53 #include "gimple-pretty-print.h"
55 #include "tree-pass.h"
57 #include "langhooks.h"
61 #include "common/common-target.h"
63 #include "plugin-api.h"
66 #include "ipa-utils.h"
68 /* The file implements the tail recursion elimination. It is also used to
69 analyze the tail calls in general, passing the results to the rtl level
70 where they are used for sibcall optimization.
72 In addition to the standard tail recursion elimination, we handle the most
73 trivial cases of making the call tail recursive by creating accumulators.
74 For example the following function
79 return n + sum (n - 1);
96 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
97 when we reach the return x statement, we should return a_acc + x * m_acc
98 instead. They are initially initialized to 0 and 1, respectively,
99 so the semantics of the function is obviously preserved. If we are
100 guaranteed that the value of the accumulator never change, we
101 omit the accumulator.
103 There are three cases how the function may exit. The first one is
104 handled in adjust_return_value, the other two in adjust_accumulator_values
105 (the second case is actually a special case of the third one and we
106 present it separately just for clarity):
108 1) Just return x, where x is not in any of the remaining special shapes.
109 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
111 2) return f (...), where f is the current function, is rewritten in a
112 classical tail-recursion elimination way, into assignment of arguments
113 and jump to the start of the function. Values of the accumulators
116 3) return a + m * f(...), where a and m do not depend on call to f.
117 To preserve the semantics described before we want this to be rewritten
118 in such a way that we finally return
120 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
122 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
123 eliminate the tail call to f. Special cases when the value is just
124 added or just multiplied are obtained by setting a = 0 or m = 1.
126 TODO -- it is possible to do similar tricks for other operations. */
128 /* A structure that describes the tailcall. */
132 /* The iterator pointing to the call statement. */
133 gimple_stmt_iterator call_gsi
;
135 /* True if it is a call to the current function. */
138 /* The return value of the caller is mult * f + add, where f is the return
139 value of the call. */
142 /* Next tailcall in the chain. */
143 struct tailcall
*next
;
146 /* The variables holding the value of multiplicative and additive
148 static tree m_acc
, a_acc
;
150 static bool suitable_for_tail_opt_p (void);
151 static bool optimize_tail_call (struct tailcall
*, bool);
152 static void eliminate_tail_call (struct tailcall
*);
153 static void find_tail_calls (basic_block
, struct tailcall
**);
155 /* Returns false when the function is not suitable for tail call optimization
156 from some reason (e.g. if it takes variable number of arguments). */
159 suitable_for_tail_opt_p (void)
166 /* Returns false when the function is not suitable for tail call optimization
167 from some reason (e.g. if it takes variable number of arguments).
168 This test must pass in addition to suitable_for_tail_opt_p in order to make
169 tail call discovery happen. */
172 suitable_for_tail_call_opt_p (void)
176 /* alloca (until we have stack slot life analysis) inhibits
177 sibling call optimizations, but not tail recursion. */
178 if (cfun
->calls_alloca
)
181 /* If we are using sjlj exceptions, we may need to add a call to
182 _Unwind_SjLj_Unregister at exit of the function. Which means
183 that we cannot do any sibcall transformations. */
184 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
185 && current_function_has_exception_handlers ())
188 /* Any function that calls setjmp might have longjmp called from
189 any called function. ??? We really should represent this
190 properly in the CFG so that this needn't be special cased. */
191 if (cfun
->calls_setjmp
)
194 /* ??? It is OK if the argument of a function is taken in some cases,
195 but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
196 for (param
= DECL_ARGUMENTS (current_function_decl
);
198 param
= DECL_CHAIN (param
))
199 if (TREE_ADDRESSABLE (param
))
205 /* Checks whether the expression EXPR in stmt AT is independent of the
206 statement pointed to by GSI (in a sense that we already know EXPR's value
207 at GSI). We use the fact that we are only called from the chain of
208 basic blocks that have only single successor. Returns the expression
209 containing the value of EXPR at GSI. */
212 independent_of_stmt_p (tree expr
, gimple at
, gimple_stmt_iterator gsi
)
214 basic_block bb
, call_bb
, at_bb
;
218 if (is_gimple_min_invariant (expr
))
221 if (TREE_CODE (expr
) != SSA_NAME
)
224 /* Mark the blocks in the chain leading to the end. */
225 at_bb
= gimple_bb (at
);
226 call_bb
= gimple_bb (gsi_stmt (gsi
));
227 for (bb
= call_bb
; bb
!= at_bb
; bb
= single_succ (bb
))
233 at
= SSA_NAME_DEF_STMT (expr
);
236 /* The default definition or defined before the chain. */
242 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
243 if (gsi_stmt (gsi
) == at
)
246 if (!gsi_end_p (gsi
))
251 if (gimple_code (at
) != GIMPLE_PHI
)
257 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
262 expr
= PHI_ARG_DEF_FROM_EDGE (at
, e
);
263 if (TREE_CODE (expr
) != SSA_NAME
)
265 /* The value is a constant. */
270 /* Unmark the blocks. */
271 for (bb
= call_bb
; bb
!= at_bb
; bb
= single_succ (bb
))
278 /* Simulates the effect of an assignment STMT on the return value of the tail
279 recursive CALL passed in ASS_VAR. M and A are the multiplicative and the
280 additive factor for the real return value. */
283 process_assignment (gimple stmt
, gimple_stmt_iterator call
, tree
*m
,
284 tree
*a
, tree
*ass_var
)
286 tree op0
, op1
= NULL_TREE
, non_ass_var
= NULL_TREE
;
287 tree dest
= gimple_assign_lhs (stmt
);
288 enum tree_code code
= gimple_assign_rhs_code (stmt
);
289 enum gimple_rhs_class rhs_class
= get_gimple_rhs_class (code
);
290 tree src_var
= gimple_assign_rhs1 (stmt
);
292 /* See if this is a simple copy operation of an SSA name to the function
293 result. In that case we may have a simple tail call. Ignore type
294 conversions that can never produce extra code between the function
295 call and the function return. */
296 if ((rhs_class
== GIMPLE_SINGLE_RHS
|| gimple_assign_cast_p (stmt
))
297 && (TREE_CODE (src_var
) == SSA_NAME
))
299 /* Reject a tailcall if the type conversion might need
301 if (gimple_assign_cast_p (stmt
))
303 if (TYPE_MODE (TREE_TYPE (dest
)) != TYPE_MODE (TREE_TYPE (src_var
)))
306 /* Even if the type modes are the same, if the precision of the
307 type is smaller than mode's precision,
308 reduce_to_bit_field_precision would generate additional code. */
309 if (INTEGRAL_TYPE_P (TREE_TYPE (dest
))
310 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (dest
)))
311 > TYPE_PRECISION (TREE_TYPE (dest
))))
315 if (src_var
!= *ass_var
)
324 case GIMPLE_BINARY_RHS
:
325 op1
= gimple_assign_rhs2 (stmt
);
329 case GIMPLE_UNARY_RHS
:
330 op0
= gimple_assign_rhs1 (stmt
);
337 /* Accumulator optimizations will reverse the order of operations.
338 We can only do that for floating-point types if we're assuming
339 that addition and multiplication are associative. */
340 if (!flag_associative_math
)
341 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl
))))
344 if (rhs_class
== GIMPLE_UNARY_RHS
)
346 else if (op0
== *ass_var
347 && (non_ass_var
= independent_of_stmt_p (op1
, stmt
, call
)))
349 else if (op1
== *ass_var
350 && (non_ass_var
= independent_of_stmt_p (op0
, stmt
, call
)))
362 case POINTER_PLUS_EXPR
:
375 *m
= build_minus_one_cst (TREE_TYPE (op0
));
381 *a
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (non_ass_var
), non_ass_var
);
384 *m
= build_minus_one_cst (TREE_TYPE (non_ass_var
));
385 *a
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (non_ass_var
), non_ass_var
);
391 /* TODO -- Handle POINTER_PLUS_EXPR. */
398 /* Propagate VAR through phis on edge E. */
401 propagate_through_phis (tree var
, edge e
)
403 basic_block dest
= e
->dest
;
404 gimple_stmt_iterator gsi
;
406 for (gsi
= gsi_start_phis (dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
408 gimple phi
= gsi_stmt (gsi
);
409 if (PHI_ARG_DEF_FROM_EDGE (phi
, e
) == var
)
410 return PHI_RESULT (phi
);
415 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
416 added to the start of RET. */
419 find_tail_calls (basic_block bb
, struct tailcall
**ret
)
421 tree ass_var
= NULL_TREE
, ret_var
, func
, param
;
422 gimple stmt
, call
= NULL
;
423 gimple_stmt_iterator gsi
, agsi
;
432 if (!single_succ_p (bb
))
435 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
437 stmt
= gsi_stmt (gsi
);
439 /* Ignore labels, returns, clobbers and debug stmts. */
440 if (gimple_code (stmt
) == GIMPLE_LABEL
441 || gimple_code (stmt
) == GIMPLE_RETURN
442 || gimple_clobber_p (stmt
)
443 || is_gimple_debug (stmt
))
446 /* Check for a call. */
447 if (is_gimple_call (stmt
))
450 ass_var
= gimple_call_lhs (stmt
);
454 /* If the statement references memory or volatile operands, fail. */
455 if (gimple_references_memory_p (stmt
)
456 || gimple_has_volatile_ops (stmt
))
463 /* Recurse to the predecessors. */
464 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
465 find_tail_calls (e
->src
, ret
);
470 /* If the LHS of our call is not just a simple register, we can't
471 transform this into a tail or sibling call. This situation happens,
472 in (e.g.) "*p = foo()" where foo returns a struct. In this case
473 we won't have a temporary here, but we need to carry out the side
474 effect anyway, so tailcall is impossible.
476 ??? In some situations (when the struct is returned in memory via
477 invisible argument) we could deal with this, e.g. by passing 'p'
478 itself as that argument to foo, but it's too early to do this here,
479 and expand_call() will not handle it anyway. If it ever can, then
480 we need to revisit this here, to allow that situation. */
481 if (ass_var
&& !is_gimple_reg (ass_var
))
484 /* We found the call, check whether it is suitable. */
485 tail_recursion
= false;
486 func
= gimple_call_fndecl (call
);
488 && !DECL_BUILT_IN (func
)
489 && recursive_call_p (current_function_decl
, func
))
493 for (param
= DECL_ARGUMENTS (func
), idx
= 0;
494 param
&& idx
< gimple_call_num_args (call
);
495 param
= DECL_CHAIN (param
), idx
++)
497 arg
= gimple_call_arg (call
, idx
);
500 /* Make sure there are no problems with copying. The parameter
501 have a copyable type and the two arguments must have reasonably
502 equivalent types. The latter requirement could be relaxed if
503 we emitted a suitable type conversion statement. */
504 if (!is_gimple_reg_type (TREE_TYPE (param
))
505 || !useless_type_conversion_p (TREE_TYPE (param
),
509 /* The parameter should be a real operand, so that phi node
510 created for it at the start of the function has the meaning
511 of copying the value. This test implies is_gimple_reg_type
512 from the previous condition, however this one could be
513 relaxed by being more careful with copying the new value
514 of the parameter (emitting appropriate GIMPLE_ASSIGN and
515 updating the virtual operands). */
516 if (!is_gimple_reg (param
))
520 if (idx
== gimple_call_num_args (call
) && !param
)
521 tail_recursion
= true;
524 /* Make sure the tail invocation of this function does not refer
525 to local variables. */
526 FOR_EACH_LOCAL_DECL (cfun
, idx
, var
)
528 if (TREE_CODE (var
) != PARM_DECL
529 && auto_var_in_fn_p (var
, cfun
->decl
)
530 && (ref_maybe_used_by_stmt_p (call
, var
)
531 || call_may_clobber_ref_p (call
, var
)))
535 /* Now check the statements after the call. None of them has virtual
536 operands, so they may only depend on the call through its return
537 value. The return value should also be dependent on each of them,
538 since we are running after dce. */
546 tree tmp_a
= NULL_TREE
;
547 tree tmp_m
= NULL_TREE
;
550 while (gsi_end_p (agsi
))
552 ass_var
= propagate_through_phis (ass_var
, single_succ_edge (abb
));
553 abb
= single_succ (abb
);
554 agsi
= gsi_start_bb (abb
);
557 stmt
= gsi_stmt (agsi
);
559 if (gimple_code (stmt
) == GIMPLE_LABEL
)
562 if (gimple_code (stmt
) == GIMPLE_RETURN
)
565 if (gimple_clobber_p (stmt
))
568 if (is_gimple_debug (stmt
))
571 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
574 /* This is a gimple assign. */
575 if (! process_assignment (stmt
, gsi
, &tmp_m
, &tmp_a
, &ass_var
))
580 tree type
= TREE_TYPE (tmp_a
);
582 a
= fold_build2 (PLUS_EXPR
, type
, fold_convert (type
, a
), tmp_a
);
588 tree type
= TREE_TYPE (tmp_m
);
590 m
= fold_build2 (MULT_EXPR
, type
, fold_convert (type
, m
), tmp_m
);
595 a
= fold_build2 (MULT_EXPR
, type
, fold_convert (type
, a
), tmp_m
);
599 /* See if this is a tail call we can handle. */
600 ret_var
= gimple_return_retval (stmt
);
602 /* We may proceed if there either is no return value, or the return value
603 is identical to the call's return. */
605 && (ret_var
!= ass_var
))
608 /* If this is not a tail recursive call, we cannot handle addends or
610 if (!tail_recursion
&& (m
|| a
))
613 /* For pointers only allow additions. */
614 if (m
&& POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl
))))
617 nw
= XNEW (struct tailcall
);
621 nw
->tail_recursion
= tail_recursion
;
630 /* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E. */
633 add_successor_phi_arg (edge e
, tree var
, tree phi_arg
)
635 gimple_stmt_iterator gsi
;
637 for (gsi
= gsi_start_phis (e
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
638 if (PHI_RESULT (gsi_stmt (gsi
)) == var
)
641 gcc_assert (!gsi_end_p (gsi
));
642 add_phi_arg (gsi_stmt (gsi
), phi_arg
, e
, UNKNOWN_LOCATION
);
645 /* Creates a GIMPLE statement which computes the operation specified by
646 CODE, ACC and OP1 to a new variable with name LABEL and inserts the
647 statement in the position specified by GSI. Returns the
648 tree node of the statement's result. */
651 adjust_return_value_with_ops (enum tree_code code
, const char *label
,
652 tree acc
, tree op1
, gimple_stmt_iterator gsi
)
655 tree ret_type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
656 tree result
= make_temp_ssa_name (ret_type
, NULL
, label
);
659 if (POINTER_TYPE_P (ret_type
))
661 gcc_assert (code
== PLUS_EXPR
&& TREE_TYPE (acc
) == sizetype
);
662 code
= POINTER_PLUS_EXPR
;
664 if (types_compatible_p (TREE_TYPE (acc
), TREE_TYPE (op1
))
665 && code
!= POINTER_PLUS_EXPR
)
666 stmt
= gimple_build_assign_with_ops (code
, result
, acc
, op1
);
670 if (code
== POINTER_PLUS_EXPR
)
671 tem
= fold_build2 (code
, TREE_TYPE (op1
), op1
, acc
);
673 tem
= fold_build2 (code
, TREE_TYPE (op1
),
674 fold_convert (TREE_TYPE (op1
), acc
), op1
);
675 tree rhs
= fold_convert (ret_type
, tem
);
676 rhs
= force_gimple_operand_gsi (&gsi
, rhs
,
677 false, NULL
, true, GSI_SAME_STMT
);
678 stmt
= gimple_build_assign (result
, rhs
);
681 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
685 /* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
686 the computation specified by CODE and OP1 and insert the statement
687 at the position specified by GSI as a new statement. Returns new SSA name
688 of updated accumulator. */
691 update_accumulator_with_ops (enum tree_code code
, tree acc
, tree op1
,
692 gimple_stmt_iterator gsi
)
695 tree var
= copy_ssa_name (acc
, NULL
);
696 if (types_compatible_p (TREE_TYPE (acc
), TREE_TYPE (op1
)))
697 stmt
= gimple_build_assign_with_ops (code
, var
, acc
, op1
);
700 tree rhs
= fold_convert (TREE_TYPE (acc
),
703 fold_convert (TREE_TYPE (op1
), acc
),
705 rhs
= force_gimple_operand_gsi (&gsi
, rhs
,
706 false, NULL
, false, GSI_CONTINUE_LINKING
);
707 stmt
= gimple_build_assign (var
, rhs
);
709 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
713 /* Adjust the accumulator values according to A and M after GSI, and update
714 the phi nodes on edge BACK. */
717 adjust_accumulator_values (gimple_stmt_iterator gsi
, tree m
, tree a
, edge back
)
719 tree var
, a_acc_arg
, m_acc_arg
;
722 m
= force_gimple_operand_gsi (&gsi
, m
, true, NULL
, true, GSI_SAME_STMT
);
724 a
= force_gimple_operand_gsi (&gsi
, a
, true, NULL
, true, GSI_SAME_STMT
);
732 if (integer_onep (a
))
735 var
= adjust_return_value_with_ops (MULT_EXPR
, "acc_tmp", m_acc
,
741 a_acc_arg
= update_accumulator_with_ops (PLUS_EXPR
, a_acc
, var
, gsi
);
745 m_acc_arg
= update_accumulator_with_ops (MULT_EXPR
, m_acc
, m
, gsi
);
748 add_successor_phi_arg (back
, a_acc
, a_acc_arg
);
751 add_successor_phi_arg (back
, m_acc
, m_acc_arg
);
754 /* Adjust value of the return at the end of BB according to M and A
758 adjust_return_value (basic_block bb
, tree m
, tree a
)
761 gimple ret_stmt
= gimple_seq_last_stmt (bb_seq (bb
));
762 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
764 gcc_assert (gimple_code (ret_stmt
) == GIMPLE_RETURN
);
766 retval
= gimple_return_retval (ret_stmt
);
767 if (!retval
|| retval
== error_mark_node
)
771 retval
= adjust_return_value_with_ops (MULT_EXPR
, "mul_tmp", m_acc
, retval
,
774 retval
= adjust_return_value_with_ops (PLUS_EXPR
, "acc_tmp", a_acc
, retval
,
776 gimple_return_set_retval (ret_stmt
, retval
);
777 update_stmt (ret_stmt
);
780 /* Subtract COUNT and FREQUENCY from the basic block and it's
783 decrease_profile (basic_block bb
, gcov_type count
, int frequency
)
789 bb
->frequency
-= frequency
;
790 if (bb
->frequency
< 0)
792 if (!single_succ_p (bb
))
794 gcc_assert (!EDGE_COUNT (bb
->succs
));
797 e
= single_succ_edge (bb
);
803 /* Returns true if argument PARAM of the tail recursive call needs to be copied
804 when the call is eliminated. */
807 arg_needs_copy_p (tree param
)
811 if (!is_gimple_reg (param
))
814 /* Parameters that are only defined but never used need not be copied. */
815 def
= ssa_default_def (cfun
, param
);
822 /* Eliminates tail call described by T. TMP_VARS is a list of
823 temporary variables used to copy the function arguments. */
826 eliminate_tail_call (struct tailcall
*t
)
832 basic_block bb
, first
;
835 gimple_stmt_iterator gsi
;
838 stmt
= orig_stmt
= gsi_stmt (t
->call_gsi
);
839 bb
= gsi_bb (t
->call_gsi
);
841 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
843 fprintf (dump_file
, "Eliminated tail recursion in bb %d : ",
845 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
846 fprintf (dump_file
, "\n");
849 gcc_assert (is_gimple_call (stmt
));
851 first
= single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
853 /* Remove the code after call_gsi that will become unreachable. The
854 possibly unreachable code in other blocks is removed later in
858 while (!gsi_end_p (gsi
))
860 gimple t
= gsi_stmt (gsi
);
861 /* Do not remove the return statement, so that redirect_edge_and_branch
862 sees how the block ends. */
863 if (gimple_code (t
) == GIMPLE_RETURN
)
866 gsi_remove (&gsi
, true);
870 /* Number of executions of function has reduced by the tailcall. */
871 e
= single_succ_edge (gsi_bb (t
->call_gsi
));
872 decrease_profile (EXIT_BLOCK_PTR_FOR_FN (cfun
), e
->count
, EDGE_FREQUENCY (e
));
873 decrease_profile (ENTRY_BLOCK_PTR_FOR_FN (cfun
), e
->count
,
875 if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
876 decrease_profile (e
->dest
, e
->count
, EDGE_FREQUENCY (e
));
878 /* Replace the call by a jump to the start of function. */
879 e
= redirect_edge_and_branch (single_succ_edge (gsi_bb (t
->call_gsi
)),
882 PENDING_STMT (e
) = NULL
;
884 /* Add phi node entries for arguments. The ordering of the phi nodes should
885 be the same as the ordering of the arguments. */
886 for (param
= DECL_ARGUMENTS (current_function_decl
),
887 idx
= 0, gsi
= gsi_start_phis (first
);
889 param
= DECL_CHAIN (param
), idx
++)
891 if (!arg_needs_copy_p (param
))
894 arg
= gimple_call_arg (stmt
, idx
);
895 phi
= gsi_stmt (gsi
);
896 gcc_assert (param
== SSA_NAME_VAR (PHI_RESULT (phi
)));
898 add_phi_arg (phi
, arg
, e
, gimple_location (stmt
));
902 /* Update the values of accumulators. */
903 adjust_accumulator_values (t
->call_gsi
, t
->mult
, t
->add
, e
);
905 call
= gsi_stmt (t
->call_gsi
);
906 rslt
= gimple_call_lhs (call
);
907 if (rslt
!= NULL_TREE
)
909 /* Result of the call will no longer be defined. So adjust the
910 SSA_NAME_DEF_STMT accordingly. */
911 SSA_NAME_DEF_STMT (rslt
) = gimple_build_nop ();
914 gsi_remove (&t
->call_gsi
, true);
918 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
919 mark the tailcalls for the sibcall optimization. */
922 optimize_tail_call (struct tailcall
*t
, bool opt_tailcalls
)
924 if (t
->tail_recursion
)
926 eliminate_tail_call (t
);
932 gimple stmt
= gsi_stmt (t
->call_gsi
);
934 gimple_call_set_tail (stmt
, true);
935 cfun
->tail_call_marked
= true;
936 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
938 fprintf (dump_file
, "Found tail call ");
939 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
940 fprintf (dump_file
, " in bb %i\n", (gsi_bb (t
->call_gsi
))->index
);
947 /* Creates a tail-call accumulator of the same type as the return type of the
948 current function. LABEL is the name used to creating the temporary
949 variable for the accumulator. The accumulator will be inserted in the
950 phis of a basic block BB with single predecessor with an initial value
951 INIT converted to the current function return type. */
954 create_tailcall_accumulator (const char *label
, basic_block bb
, tree init
)
956 tree ret_type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
957 if (POINTER_TYPE_P (ret_type
))
960 tree tmp
= make_temp_ssa_name (ret_type
, NULL
, label
);
963 phi
= create_phi_node (tmp
, bb
);
964 /* RET_TYPE can be a float when -ffast-maths is enabled. */
965 add_phi_arg (phi
, fold_convert (ret_type
, init
), single_pred_edge (bb
),
967 return PHI_RESULT (phi
);
970 /* Optimizes tail calls in the function, turning the tail recursion
974 tree_optimize_tail_calls_1 (bool opt_tailcalls
)
977 bool phis_constructed
= false;
978 struct tailcall
*tailcalls
= NULL
, *act
, *next
;
979 bool changed
= false;
980 basic_block first
= single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
985 if (!suitable_for_tail_opt_p ())
988 opt_tailcalls
= suitable_for_tail_call_opt_p ();
990 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
992 /* Only traverse the normal exits, i.e. those that end with return
994 stmt
= last_stmt (e
->src
);
997 && gimple_code (stmt
) == GIMPLE_RETURN
)
998 find_tail_calls (e
->src
, &tailcalls
);
1001 /* Construct the phi nodes and accumulators if necessary. */
1002 a_acc
= m_acc
= NULL_TREE
;
1003 for (act
= tailcalls
; act
; act
= act
->next
)
1005 if (!act
->tail_recursion
)
1008 if (!phis_constructed
)
1010 /* Ensure that there is only one predecessor of the block
1011 or if there are existing degenerate PHI nodes. */
1012 if (!single_pred_p (first
)
1013 || !gimple_seq_empty_p (phi_nodes (first
)))
1015 split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
1017 /* Copy the args if needed. */
1018 for (param
= DECL_ARGUMENTS (current_function_decl
);
1020 param
= DECL_CHAIN (param
))
1021 if (arg_needs_copy_p (param
))
1023 tree name
= ssa_default_def (cfun
, param
);
1024 tree new_name
= make_ssa_name (param
, SSA_NAME_DEF_STMT (name
));
1027 set_ssa_default_def (cfun
, param
, new_name
);
1028 phi
= create_phi_node (name
, first
);
1029 add_phi_arg (phi
, new_name
, single_pred_edge (first
),
1030 EXPR_LOCATION (param
));
1032 phis_constructed
= true;
1035 if (act
->add
&& !a_acc
)
1036 a_acc
= create_tailcall_accumulator ("add_acc", first
,
1039 if (act
->mult
&& !m_acc
)
1040 m_acc
= create_tailcall_accumulator ("mult_acc", first
,
1046 /* When the tail call elimination using accumulators is performed,
1047 statements adding the accumulated value are inserted at all exits.
1048 This turns all other tail calls to non-tail ones. */
1049 opt_tailcalls
= false;
1052 for (; tailcalls
; tailcalls
= next
)
1054 next
= tailcalls
->next
;
1055 changed
|= optimize_tail_call (tailcalls
, opt_tailcalls
);
1061 /* Modify the remaining return statements. */
1062 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
1064 stmt
= last_stmt (e
->src
);
1067 && gimple_code (stmt
) == GIMPLE_RETURN
)
1068 adjust_return_value (e
->src
, m_acc
, a_acc
);
1074 /* We may have created new loops. Make them magically appear. */
1075 loops_state_set (LOOPS_NEED_FIXUP
);
1076 free_dominance_info (CDI_DOMINATORS
);
1079 /* Add phi nodes for the virtual operands defined in the function to the
1080 header of the loop created by tail recursion elimination. Do so
1081 by triggering the SSA renamer. */
1082 if (phis_constructed
)
1083 mark_virtual_operands_for_renaming (cfun
);
1086 return TODO_cleanup_cfg
| TODO_update_ssa_only_virtuals
;
1091 gate_tail_calls (void)
1093 return flag_optimize_sibling_calls
!= 0 && dbg_cnt (tail_call
);
1097 execute_tail_calls (void)
1099 return tree_optimize_tail_calls_1 (true);
1104 const pass_data pass_data_tail_recursion
=
1106 GIMPLE_PASS
, /* type */
1108 OPTGROUP_NONE
, /* optinfo_flags */
1109 TV_NONE
, /* tv_id */
1110 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1111 0, /* properties_provided */
1112 0, /* properties_destroyed */
1113 0, /* todo_flags_start */
1114 0, /* todo_flags_finish */
1117 class pass_tail_recursion
: public gimple_opt_pass
1120 pass_tail_recursion (gcc::context
*ctxt
)
1121 : gimple_opt_pass (pass_data_tail_recursion
, ctxt
)
1124 /* opt_pass methods: */
1125 opt_pass
* clone () { return new pass_tail_recursion (m_ctxt
); }
1126 virtual bool gate (function
*) { return gate_tail_calls (); }
1127 virtual unsigned int execute (function
*)
1129 return tree_optimize_tail_calls_1 (false);
1132 }; // class pass_tail_recursion
1137 make_pass_tail_recursion (gcc::context
*ctxt
)
1139 return new pass_tail_recursion (ctxt
);
1144 const pass_data pass_data_tail_calls
=
1146 GIMPLE_PASS
, /* type */
1148 OPTGROUP_NONE
, /* optinfo_flags */
1149 TV_NONE
, /* tv_id */
1150 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1151 0, /* properties_provided */
1152 0, /* properties_destroyed */
1153 0, /* todo_flags_start */
1154 0, /* todo_flags_finish */
1157 class pass_tail_calls
: public gimple_opt_pass
1160 pass_tail_calls (gcc::context
*ctxt
)
1161 : gimple_opt_pass (pass_data_tail_calls
, ctxt
)
1164 /* opt_pass methods: */
1165 virtual bool gate (function
*) { return gate_tail_calls (); }
1166 virtual unsigned int execute (function
*) { return execute_tail_calls (); }
1168 }; // class pass_tail_calls
1173 make_pass_tail_calls (gcc::context
*ctxt
)
1175 return new pass_tail_calls (ctxt
);