RISC-V: Fix rtl checking enabled failure with -msave-restore.
[official-gcc.git] / gcc / tree-tailcall.c
blob9ffe739f601da617b9137dbf92984f876a42ed90
1 /* Tail call optimization on trees.
2 Copyright (C) 2003-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "cfghooks.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "cgraph.h"
31 #include "gimple-pretty-print.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "gimple-iterator.h"
35 #include "gimplify-me.h"
36 #include "tree-cfg.h"
37 #include "tree-into-ssa.h"
38 #include "tree-dfa.h"
39 #include "except.h"
40 #include "tree-eh.h"
41 #include "dbgcnt.h"
42 #include "cfgloop.h"
43 #include "common/common-target.h"
44 #include "ipa-utils.h"
45 #include "tree-ssa-live.h"
47 /* The file implements the tail recursion elimination. It is also used to
48 analyze the tail calls in general, passing the results to the rtl level
49 where they are used for sibcall optimization.
51 In addition to the standard tail recursion elimination, we handle the most
52 trivial cases of making the call tail recursive by creating accumulators.
53 For example the following function
55 int sum (int n)
57 if (n > 0)
58 return n + sum (n - 1);
59 else
60 return 0;
63 is transformed into
65 int sum (int n)
67 int acc = 0;
69 while (n > 0)
70 acc += n--;
72 return acc;
75 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
76 when we reach the return x statement, we should return a_acc + x * m_acc
77 instead. They are initially initialized to 0 and 1, respectively,
78 so the semantics of the function is obviously preserved. If we are
79 guaranteed that the value of the accumulator never change, we
80 omit the accumulator.
82 There are three cases how the function may exit. The first one is
83 handled in adjust_return_value, the other two in adjust_accumulator_values
84 (the second case is actually a special case of the third one and we
85 present it separately just for clarity):
87 1) Just return x, where x is not in any of the remaining special shapes.
88 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
90 2) return f (...), where f is the current function, is rewritten in a
91 classical tail-recursion elimination way, into assignment of arguments
92 and jump to the start of the function. Values of the accumulators
93 are unchanged.
95 3) return a + m * f(...), where a and m do not depend on call to f.
96 To preserve the semantics described before we want this to be rewritten
97 in such a way that we finally return
99 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
101 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
102 eliminate the tail call to f. Special cases when the value is just
103 added or just multiplied are obtained by setting a = 0 or m = 1.
105 TODO -- it is possible to do similar tricks for other operations. */
107 /* A structure that describes the tailcall. */
109 struct tailcall
111 /* The iterator pointing to the call statement. */
112 gimple_stmt_iterator call_gsi;
114 /* True if it is a call to the current function. */
115 bool tail_recursion;
117 /* The return value of the caller is mult * f + add, where f is the return
118 value of the call. */
119 tree mult, add;
121 /* Next tailcall in the chain. */
122 struct tailcall *next;
125 /* The variables holding the value of multiplicative and additive
126 accumulator. */
127 static tree m_acc, a_acc;
129 /* Bitmap with a bit for each function parameter which is set to true if we
130 have to copy the parameter for conversion of tail-recursive calls. */
132 static bitmap tailr_arg_needs_copy;
134 static bool optimize_tail_call (struct tailcall *, bool);
135 static void eliminate_tail_call (struct tailcall *);
137 /* Returns false when the function is not suitable for tail call optimization
138 from some reason (e.g. if it takes variable number of arguments). */
140 static bool
141 suitable_for_tail_opt_p (void)
143 if (cfun->stdarg)
144 return false;
146 return true;
149 /* Returns false when the function is not suitable for tail call optimization
150 for some reason (e.g. if it takes variable number of arguments).
151 This test must pass in addition to suitable_for_tail_opt_p in order to make
152 tail call discovery happen. */
154 static bool
155 suitable_for_tail_call_opt_p (void)
157 tree param;
159 /* alloca (until we have stack slot life analysis) inhibits
160 sibling call optimizations, but not tail recursion. */
161 if (cfun->calls_alloca)
162 return false;
164 /* If we are using sjlj exceptions, we may need to add a call to
165 _Unwind_SjLj_Unregister at exit of the function. Which means
166 that we cannot do any sibcall transformations. */
167 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
168 && current_function_has_exception_handlers ())
169 return false;
171 /* Any function that calls setjmp might have longjmp called from
172 any called function. ??? We really should represent this
173 properly in the CFG so that this needn't be special cased. */
174 if (cfun->calls_setjmp)
175 return false;
177 /* Various targets don't handle tail calls correctly in functions
178 that call __builtin_eh_return. */
179 if (cfun->calls_eh_return)
180 return false;
182 /* ??? It is OK if the argument of a function is taken in some cases,
183 but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
184 for (param = DECL_ARGUMENTS (current_function_decl);
185 param;
186 param = DECL_CHAIN (param))
187 if (TREE_ADDRESSABLE (param))
188 return false;
190 return true;
193 /* Checks whether the expression EXPR in stmt AT is independent of the
194 statement pointed to by GSI (in a sense that we already know EXPR's value
195 at GSI). We use the fact that we are only called from the chain of
196 basic blocks that have only single successor. Returns the expression
197 containing the value of EXPR at GSI. */
199 static tree
200 independent_of_stmt_p (tree expr, gimple *at, gimple_stmt_iterator gsi,
201 bitmap to_move)
203 basic_block bb, call_bb, at_bb;
204 edge e;
205 edge_iterator ei;
207 if (is_gimple_min_invariant (expr))
208 return expr;
210 if (TREE_CODE (expr) != SSA_NAME)
211 return NULL_TREE;
213 if (bitmap_bit_p (to_move, SSA_NAME_VERSION (expr)))
214 return expr;
216 /* Mark the blocks in the chain leading to the end. */
217 at_bb = gimple_bb (at);
218 call_bb = gimple_bb (gsi_stmt (gsi));
219 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
220 bb->aux = &bb->aux;
221 bb->aux = &bb->aux;
223 while (1)
225 at = SSA_NAME_DEF_STMT (expr);
226 bb = gimple_bb (at);
228 /* The default definition or defined before the chain. */
229 if (!bb || !bb->aux)
230 break;
232 if (bb == call_bb)
234 for (; !gsi_end_p (gsi); gsi_next (&gsi))
235 if (gsi_stmt (gsi) == at)
236 break;
238 if (!gsi_end_p (gsi))
239 expr = NULL_TREE;
240 break;
243 if (gimple_code (at) != GIMPLE_PHI)
245 expr = NULL_TREE;
246 break;
249 FOR_EACH_EDGE (e, ei, bb->preds)
250 if (e->src->aux)
251 break;
252 gcc_assert (e);
254 expr = PHI_ARG_DEF_FROM_EDGE (at, e);
255 if (TREE_CODE (expr) != SSA_NAME)
257 /* The value is a constant. */
258 break;
262 /* Unmark the blocks. */
263 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
264 bb->aux = NULL;
265 bb->aux = NULL;
267 return expr;
270 enum par { FAIL, OK, TRY_MOVE };
272 /* Simulates the effect of an assignment STMT on the return value of the tail
273 recursive CALL passed in ASS_VAR. M and A are the multiplicative and the
274 additive factor for the real return value. */
276 static par
277 process_assignment (gassign *stmt,
278 gimple_stmt_iterator call, tree *m,
279 tree *a, tree *ass_var, bitmap to_move)
281 tree op0, op1 = NULL_TREE, non_ass_var = NULL_TREE;
282 tree dest = gimple_assign_lhs (stmt);
283 enum tree_code code = gimple_assign_rhs_code (stmt);
284 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
285 tree src_var = gimple_assign_rhs1 (stmt);
287 /* See if this is a simple copy operation of an SSA name to the function
288 result. In that case we may have a simple tail call. Ignore type
289 conversions that can never produce extra code between the function
290 call and the function return. */
291 if ((rhs_class == GIMPLE_SINGLE_RHS || gimple_assign_cast_p (stmt))
292 && src_var == *ass_var)
294 /* Reject a tailcall if the type conversion might need
295 additional code. */
296 if (gimple_assign_cast_p (stmt))
298 if (TYPE_MODE (TREE_TYPE (dest)) != TYPE_MODE (TREE_TYPE (src_var)))
299 return FAIL;
301 /* Even if the type modes are the same, if the precision of the
302 type is smaller than mode's precision,
303 reduce_to_bit_field_precision would generate additional code. */
304 if (INTEGRAL_TYPE_P (TREE_TYPE (dest))
305 && !type_has_mode_precision_p (TREE_TYPE (dest)))
306 return FAIL;
309 *ass_var = dest;
310 return OK;
313 switch (rhs_class)
315 case GIMPLE_BINARY_RHS:
316 op1 = gimple_assign_rhs2 (stmt);
318 /* Fall through. */
320 case GIMPLE_UNARY_RHS:
321 op0 = gimple_assign_rhs1 (stmt);
322 break;
324 default:
325 return FAIL;
328 /* Accumulator optimizations will reverse the order of operations.
329 We can only do that for floating-point types if we're assuming
330 that addition and multiplication are associative. */
331 if (!flag_associative_math)
332 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
333 return FAIL;
335 if (rhs_class == GIMPLE_UNARY_RHS
336 && op0 == *ass_var)
338 else if (op0 == *ass_var
339 && (non_ass_var = independent_of_stmt_p (op1, stmt, call,
340 to_move)))
342 else if (op1 == *ass_var
343 && (non_ass_var = independent_of_stmt_p (op0, stmt, call,
344 to_move)))
346 else
347 return TRY_MOVE;
349 switch (code)
351 case PLUS_EXPR:
352 *a = non_ass_var;
353 *ass_var = dest;
354 return OK;
356 case POINTER_PLUS_EXPR:
357 if (op0 != *ass_var)
358 return FAIL;
359 *a = non_ass_var;
360 *ass_var = dest;
361 return OK;
363 case MULT_EXPR:
364 *m = non_ass_var;
365 *ass_var = dest;
366 return OK;
368 case NEGATE_EXPR:
369 *m = build_minus_one_cst (TREE_TYPE (op0));
370 *ass_var = dest;
371 return OK;
373 case MINUS_EXPR:
374 if (*ass_var == op0)
375 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
376 else
378 *m = build_minus_one_cst (TREE_TYPE (non_ass_var));
379 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
382 *ass_var = dest;
383 return OK;
385 default:
386 return FAIL;
390 /* Propagate VAR through phis on edge E. */
392 static tree
393 propagate_through_phis (tree var, edge e)
395 basic_block dest = e->dest;
396 gphi_iterator gsi;
398 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
400 gphi *phi = gsi.phi ();
401 if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
402 return PHI_RESULT (phi);
404 return var;
407 /* Argument for compute_live_vars/live_vars_at_stmt and what compute_live_vars
408 returns. Computed lazily, but just once for the function. */
409 static live_vars_map *live_vars;
410 static vec<bitmap_head> live_vars_vec;
412 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
413 added to the start of RET. */
415 static void
416 find_tail_calls (basic_block bb, struct tailcall **ret)
418 tree ass_var = NULL_TREE, ret_var, func, param;
419 gimple *stmt;
420 gcall *call = NULL;
421 gimple_stmt_iterator gsi, agsi;
422 bool tail_recursion;
423 struct tailcall *nw;
424 edge e;
425 tree m, a;
426 basic_block abb;
427 size_t idx;
428 tree var;
430 if (!single_succ_p (bb))
431 return;
433 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
435 stmt = gsi_stmt (gsi);
437 /* Ignore labels, returns, nops, clobbers and debug stmts. */
438 if (gimple_code (stmt) == GIMPLE_LABEL
439 || gimple_code (stmt) == GIMPLE_RETURN
440 || gimple_code (stmt) == GIMPLE_NOP
441 || gimple_code (stmt) == GIMPLE_PREDICT
442 || gimple_clobber_p (stmt)
443 || is_gimple_debug (stmt))
444 continue;
446 /* Check for a call. */
447 if (is_gimple_call (stmt))
449 call = as_a <gcall *> (stmt);
450 ass_var = gimple_call_lhs (call);
451 break;
454 /* Allow simple copies between local variables, even if they're
455 aggregates. */
456 if (is_gimple_assign (stmt)
457 && auto_var_in_fn_p (gimple_assign_lhs (stmt), cfun->decl)
458 && auto_var_in_fn_p (gimple_assign_rhs1 (stmt), cfun->decl))
459 continue;
461 /* If the statement references memory or volatile operands, fail. */
462 if (gimple_references_memory_p (stmt)
463 || gimple_has_volatile_ops (stmt))
464 return;
467 if (gsi_end_p (gsi))
469 edge_iterator ei;
470 /* Recurse to the predecessors. */
471 FOR_EACH_EDGE (e, ei, bb->preds)
472 find_tail_calls (e->src, ret);
474 return;
477 /* If the LHS of our call is not just a simple register or local
478 variable, we can't transform this into a tail or sibling call.
479 This situation happens, in (e.g.) "*p = foo()" where foo returns a
480 struct. In this case we won't have a temporary here, but we need
481 to carry out the side effect anyway, so tailcall is impossible.
483 ??? In some situations (when the struct is returned in memory via
484 invisible argument) we could deal with this, e.g. by passing 'p'
485 itself as that argument to foo, but it's too early to do this here,
486 and expand_call() will not handle it anyway. If it ever can, then
487 we need to revisit this here, to allow that situation. */
488 if (ass_var
489 && !is_gimple_reg (ass_var)
490 && !auto_var_in_fn_p (ass_var, cfun->decl))
491 return;
493 /* If the call might throw an exception that wouldn't propagate out of
494 cfun, we can't transform to a tail or sibling call (82081). */
495 if (stmt_could_throw_p (cfun, stmt)
496 && !stmt_can_throw_external (cfun, stmt))
497 return;
499 /* If the function returns a value, then at present, the tail call
500 must return the same type of value. There is conceptually a copy
501 between the object returned by the tail call candidate and the
502 object returned by CFUN itself.
504 This means that if we have:
506 lhs = f (&<retval>); // f reads from <retval>
507 // (lhs is usually also <retval>)
509 there is a copy between the temporary object returned by f and lhs,
510 meaning that any use of <retval> in f occurs before the assignment
511 to lhs begins. Thus the <retval> that is live on entry to the call
512 to f is really an independent local variable V that happens to be
513 stored in the RESULT_DECL rather than a local VAR_DECL.
515 Turning this into a tail call would remove the copy and make the
516 lifetimes of the return value and V overlap. The same applies to
517 tail recursion, since if f can read from <retval>, we have to assume
518 that CFUN might already have written to <retval> before the call.
520 The problem doesn't apply when <retval> is passed by value, but that
521 isn't a case we handle anyway. */
522 tree result_decl = DECL_RESULT (cfun->decl);
523 if (result_decl
524 && may_be_aliased (result_decl)
525 && ref_maybe_used_by_stmt_p (call, result_decl))
526 return;
528 /* We found the call, check whether it is suitable. */
529 tail_recursion = false;
530 func = gimple_call_fndecl (call);
531 if (func
532 && !fndecl_built_in_p (func)
533 && recursive_call_p (current_function_decl, func))
535 tree arg;
537 for (param = DECL_ARGUMENTS (current_function_decl), idx = 0;
538 param && idx < gimple_call_num_args (call);
539 param = DECL_CHAIN (param), idx ++)
541 arg = gimple_call_arg (call, idx);
542 if (param != arg)
544 /* Make sure there are no problems with copying. The parameter
545 have a copyable type and the two arguments must have reasonably
546 equivalent types. The latter requirement could be relaxed if
547 we emitted a suitable type conversion statement. */
548 if (!is_gimple_reg_type (TREE_TYPE (param))
549 || !useless_type_conversion_p (TREE_TYPE (param),
550 TREE_TYPE (arg)))
551 break;
553 /* The parameter should be a real operand, so that phi node
554 created for it at the start of the function has the meaning
555 of copying the value. This test implies is_gimple_reg_type
556 from the previous condition, however this one could be
557 relaxed by being more careful with copying the new value
558 of the parameter (emitting appropriate GIMPLE_ASSIGN and
559 updating the virtual operands). */
560 if (!is_gimple_reg (param))
561 break;
564 if (idx == gimple_call_num_args (call) && !param)
565 tail_recursion = true;
568 /* Compute live vars if not computed yet. */
569 if (live_vars == NULL)
571 unsigned int cnt = 0;
572 FOR_EACH_LOCAL_DECL (cfun, idx, var)
573 if (VAR_P (var)
574 && auto_var_in_fn_p (var, cfun->decl)
575 && may_be_aliased (var))
577 if (live_vars == NULL)
578 live_vars = new live_vars_map;
579 live_vars->put (DECL_UID (var), cnt++);
581 if (live_vars)
582 live_vars_vec = compute_live_vars (cfun, live_vars);
585 /* Determine a bitmap of variables which are still in scope after the
586 call. */
587 bitmap local_live_vars = NULL;
588 if (live_vars)
589 local_live_vars = live_vars_at_stmt (live_vars_vec, live_vars, call);
591 /* Make sure the tail invocation of this function does not indirectly
592 refer to local variables. (Passing variables directly by value
593 is OK.) */
594 FOR_EACH_LOCAL_DECL (cfun, idx, var)
596 if (TREE_CODE (var) != PARM_DECL
597 && auto_var_in_fn_p (var, cfun->decl)
598 && may_be_aliased (var)
599 && (ref_maybe_used_by_stmt_p (call, var)
600 || call_may_clobber_ref_p (call, var)))
602 if (!VAR_P (var))
604 if (local_live_vars)
605 BITMAP_FREE (local_live_vars);
606 return;
608 else
610 unsigned int *v = live_vars->get (DECL_UID (var));
611 if (bitmap_bit_p (local_live_vars, *v))
613 BITMAP_FREE (local_live_vars);
614 return;
620 if (local_live_vars)
621 BITMAP_FREE (local_live_vars);
623 /* Now check the statements after the call. None of them has virtual
624 operands, so they may only depend on the call through its return
625 value. The return value should also be dependent on each of them,
626 since we are running after dce. */
627 m = NULL_TREE;
628 a = NULL_TREE;
629 auto_bitmap to_move_defs;
630 auto_vec<gimple *> to_move_stmts;
632 abb = bb;
633 agsi = gsi;
634 while (1)
636 tree tmp_a = NULL_TREE;
637 tree tmp_m = NULL_TREE;
638 gsi_next (&agsi);
640 while (gsi_end_p (agsi))
642 ass_var = propagate_through_phis (ass_var, single_succ_edge (abb));
643 abb = single_succ (abb);
644 agsi = gsi_start_bb (abb);
647 stmt = gsi_stmt (agsi);
648 if (gimple_code (stmt) == GIMPLE_RETURN)
649 break;
651 if (gimple_code (stmt) == GIMPLE_LABEL
652 || gimple_code (stmt) == GIMPLE_NOP
653 || gimple_code (stmt) == GIMPLE_PREDICT
654 || gimple_clobber_p (stmt)
655 || is_gimple_debug (stmt))
656 continue;
658 if (gimple_code (stmt) != GIMPLE_ASSIGN)
659 return;
661 /* This is a gimple assign. */
662 par ret = process_assignment (as_a <gassign *> (stmt), gsi,
663 &tmp_m, &tmp_a, &ass_var, to_move_defs);
664 if (ret == FAIL)
665 return;
666 else if (ret == TRY_MOVE)
668 if (! tail_recursion)
669 return;
670 /* Do not deal with checking dominance, the real fix is to
671 do path isolation for the transform phase anyway, removing
672 the need to compute the accumulators with new stmts. */
673 if (abb != bb)
674 return;
675 for (unsigned opno = 1; opno < gimple_num_ops (stmt); ++opno)
677 tree op = gimple_op (stmt, opno);
678 if (independent_of_stmt_p (op, stmt, gsi, to_move_defs) != op)
679 return;
681 bitmap_set_bit (to_move_defs,
682 SSA_NAME_VERSION (gimple_assign_lhs (stmt)));
683 to_move_stmts.safe_push (stmt);
684 continue;
687 if (tmp_a)
689 tree type = TREE_TYPE (tmp_a);
690 if (a)
691 a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a);
692 else
693 a = tmp_a;
695 if (tmp_m)
697 tree type = TREE_TYPE (tmp_m);
698 if (m)
699 m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m);
700 else
701 m = tmp_m;
703 if (a)
704 a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m);
708 /* See if this is a tail call we can handle. */
709 ret_var = gimple_return_retval (as_a <greturn *> (stmt));
711 /* We may proceed if there either is no return value, or the return value
712 is identical to the call's return. */
713 if (ret_var
714 && (ret_var != ass_var))
715 return;
717 /* If this is not a tail recursive call, we cannot handle addends or
718 multiplicands. */
719 if (!tail_recursion && (m || a))
720 return;
722 /* For pointers only allow additions. */
723 if (m && POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
724 return;
726 /* Move queued defs. */
727 if (tail_recursion)
729 unsigned i;
730 FOR_EACH_VEC_ELT (to_move_stmts, i, stmt)
732 gimple_stmt_iterator mgsi = gsi_for_stmt (stmt);
733 gsi_move_before (&mgsi, &gsi);
735 if (!tailr_arg_needs_copy)
736 tailr_arg_needs_copy = BITMAP_ALLOC (NULL);
737 for (param = DECL_ARGUMENTS (current_function_decl), idx = 0;
738 param;
739 param = DECL_CHAIN (param), idx++)
741 tree ddef, arg = gimple_call_arg (call, idx);
742 if (is_gimple_reg (param)
743 && (ddef = ssa_default_def (cfun, param))
744 && (arg != ddef))
745 bitmap_set_bit (tailr_arg_needs_copy, idx);
749 nw = XNEW (struct tailcall);
751 nw->call_gsi = gsi;
753 nw->tail_recursion = tail_recursion;
755 nw->mult = m;
756 nw->add = a;
758 nw->next = *ret;
759 *ret = nw;
762 /* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E. */
764 static void
765 add_successor_phi_arg (edge e, tree var, tree phi_arg)
767 gphi_iterator gsi;
769 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
770 if (PHI_RESULT (gsi.phi ()) == var)
771 break;
773 gcc_assert (!gsi_end_p (gsi));
774 add_phi_arg (gsi.phi (), phi_arg, e, UNKNOWN_LOCATION);
777 /* Creates a GIMPLE statement which computes the operation specified by
778 CODE, ACC and OP1 to a new variable with name LABEL and inserts the
779 statement in the position specified by GSI. Returns the
780 tree node of the statement's result. */
782 static tree
783 adjust_return_value_with_ops (enum tree_code code, const char *label,
784 tree acc, tree op1, gimple_stmt_iterator gsi)
787 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
788 tree result = make_temp_ssa_name (ret_type, NULL, label);
789 gassign *stmt;
791 if (POINTER_TYPE_P (ret_type))
793 gcc_assert (code == PLUS_EXPR && TREE_TYPE (acc) == sizetype);
794 code = POINTER_PLUS_EXPR;
796 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1))
797 && code != POINTER_PLUS_EXPR)
798 stmt = gimple_build_assign (result, code, acc, op1);
799 else
801 tree tem;
802 if (code == POINTER_PLUS_EXPR)
803 tem = fold_build2 (code, TREE_TYPE (op1), op1, acc);
804 else
805 tem = fold_build2 (code, TREE_TYPE (op1),
806 fold_convert (TREE_TYPE (op1), acc), op1);
807 tree rhs = fold_convert (ret_type, tem);
808 rhs = force_gimple_operand_gsi (&gsi, rhs,
809 false, NULL, true, GSI_SAME_STMT);
810 stmt = gimple_build_assign (result, rhs);
813 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
814 return result;
817 /* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
818 the computation specified by CODE and OP1 and insert the statement
819 at the position specified by GSI as a new statement. Returns new SSA name
820 of updated accumulator. */
822 static tree
823 update_accumulator_with_ops (enum tree_code code, tree acc, tree op1,
824 gimple_stmt_iterator gsi)
826 gassign *stmt;
827 tree var = copy_ssa_name (acc);
828 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
829 stmt = gimple_build_assign (var, code, acc, op1);
830 else
832 tree rhs = fold_convert (TREE_TYPE (acc),
833 fold_build2 (code,
834 TREE_TYPE (op1),
835 fold_convert (TREE_TYPE (op1), acc),
836 op1));
837 rhs = force_gimple_operand_gsi (&gsi, rhs,
838 false, NULL, false, GSI_CONTINUE_LINKING);
839 stmt = gimple_build_assign (var, rhs);
841 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
842 return var;
845 /* Adjust the accumulator values according to A and M after GSI, and update
846 the phi nodes on edge BACK. */
848 static void
849 adjust_accumulator_values (gimple_stmt_iterator gsi, tree m, tree a, edge back)
851 tree var, a_acc_arg, m_acc_arg;
853 if (m)
854 m = force_gimple_operand_gsi (&gsi, m, true, NULL, true, GSI_SAME_STMT);
855 if (a)
856 a = force_gimple_operand_gsi (&gsi, a, true, NULL, true, GSI_SAME_STMT);
858 a_acc_arg = a_acc;
859 m_acc_arg = m_acc;
860 if (a)
862 if (m_acc)
864 if (integer_onep (a))
865 var = m_acc;
866 else
867 var = adjust_return_value_with_ops (MULT_EXPR, "acc_tmp", m_acc,
868 a, gsi);
870 else
871 var = a;
873 a_acc_arg = update_accumulator_with_ops (PLUS_EXPR, a_acc, var, gsi);
876 if (m)
877 m_acc_arg = update_accumulator_with_ops (MULT_EXPR, m_acc, m, gsi);
879 if (a_acc)
880 add_successor_phi_arg (back, a_acc, a_acc_arg);
882 if (m_acc)
883 add_successor_phi_arg (back, m_acc, m_acc_arg);
886 /* Adjust value of the return at the end of BB according to M and A
887 accumulators. */
889 static void
890 adjust_return_value (basic_block bb, tree m, tree a)
892 tree retval;
893 greturn *ret_stmt = as_a <greturn *> (gimple_seq_last_stmt (bb_seq (bb)));
894 gimple_stmt_iterator gsi = gsi_last_bb (bb);
896 gcc_assert (gimple_code (ret_stmt) == GIMPLE_RETURN);
898 retval = gimple_return_retval (ret_stmt);
899 if (!retval || retval == error_mark_node)
900 return;
902 if (m)
903 retval = adjust_return_value_with_ops (MULT_EXPR, "mul_tmp", m_acc, retval,
904 gsi);
905 if (a)
906 retval = adjust_return_value_with_ops (PLUS_EXPR, "acc_tmp", a_acc, retval,
907 gsi);
908 gimple_return_set_retval (ret_stmt, retval);
909 update_stmt (ret_stmt);
912 /* Subtract COUNT and FREQUENCY from the basic block and it's
913 outgoing edge. */
914 static void
915 decrease_profile (basic_block bb, profile_count count)
917 bb->count = bb->count - count;
918 if (!single_succ_p (bb))
920 gcc_assert (!EDGE_COUNT (bb->succs));
921 return;
925 /* Eliminates tail call described by T. TMP_VARS is a list of
926 temporary variables used to copy the function arguments. */
928 static void
929 eliminate_tail_call (struct tailcall *t)
931 tree param, rslt;
932 gimple *stmt, *call;
933 tree arg;
934 size_t idx;
935 basic_block bb, first;
936 edge e;
937 gphi *phi;
938 gphi_iterator gpi;
939 gimple_stmt_iterator gsi;
940 gimple *orig_stmt;
942 stmt = orig_stmt = gsi_stmt (t->call_gsi);
943 bb = gsi_bb (t->call_gsi);
945 if (dump_file && (dump_flags & TDF_DETAILS))
947 fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
948 bb->index);
949 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
950 fprintf (dump_file, "\n");
953 gcc_assert (is_gimple_call (stmt));
955 first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
957 /* Remove the code after call_gsi that will become unreachable. The
958 possibly unreachable code in other blocks is removed later in
959 cfg cleanup. */
960 gsi = t->call_gsi;
961 gimple_stmt_iterator gsi2 = gsi_last_bb (gimple_bb (gsi_stmt (gsi)));
962 while (gsi_stmt (gsi2) != gsi_stmt (gsi))
964 gimple *t = gsi_stmt (gsi2);
965 /* Do not remove the return statement, so that redirect_edge_and_branch
966 sees how the block ends. */
967 if (gimple_code (t) != GIMPLE_RETURN)
969 gimple_stmt_iterator gsi3 = gsi2;
970 gsi_prev (&gsi2);
971 gsi_remove (&gsi3, true);
972 release_defs (t);
974 else
975 gsi_prev (&gsi2);
978 /* Number of executions of function has reduced by the tailcall. */
979 e = single_succ_edge (gsi_bb (t->call_gsi));
981 profile_count count = e->count ();
983 /* When profile is inconsistent and the recursion edge is more frequent
984 than number of executions of functions, scale it down, so we do not end
985 up with 0 executions of entry block. */
986 if (count >= ENTRY_BLOCK_PTR_FOR_FN (cfun)->count)
987 count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (7, 8);
988 decrease_profile (EXIT_BLOCK_PTR_FOR_FN (cfun), count);
989 decrease_profile (ENTRY_BLOCK_PTR_FOR_FN (cfun), count);
990 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
991 decrease_profile (e->dest, count);
993 /* Replace the call by a jump to the start of function. */
994 e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)),
995 first);
996 gcc_assert (e);
997 PENDING_STMT (e) = NULL;
999 /* Add phi node entries for arguments. The ordering of the phi nodes should
1000 be the same as the ordering of the arguments. */
1001 for (param = DECL_ARGUMENTS (current_function_decl),
1002 idx = 0, gpi = gsi_start_phis (first);
1003 param;
1004 param = DECL_CHAIN (param), idx++)
1006 if (!bitmap_bit_p (tailr_arg_needs_copy, idx))
1007 continue;
1009 arg = gimple_call_arg (stmt, idx);
1010 phi = gpi.phi ();
1011 gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));
1013 add_phi_arg (phi, arg, e, gimple_location (stmt));
1014 gsi_next (&gpi);
1017 /* Update the values of accumulators. */
1018 adjust_accumulator_values (t->call_gsi, t->mult, t->add, e);
1020 call = gsi_stmt (t->call_gsi);
1021 rslt = gimple_call_lhs (call);
1022 if (rslt != NULL_TREE && TREE_CODE (rslt) == SSA_NAME)
1024 /* Result of the call will no longer be defined. So adjust the
1025 SSA_NAME_DEF_STMT accordingly. */
1026 SSA_NAME_DEF_STMT (rslt) = gimple_build_nop ();
1029 gsi_remove (&t->call_gsi, true);
1030 release_defs (call);
1033 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
1034 mark the tailcalls for the sibcall optimization. */
1036 static bool
1037 optimize_tail_call (struct tailcall *t, bool opt_tailcalls)
1039 if (t->tail_recursion)
1041 eliminate_tail_call (t);
1042 return true;
1045 if (opt_tailcalls)
1047 gcall *stmt = as_a <gcall *> (gsi_stmt (t->call_gsi));
1049 gimple_call_set_tail (stmt, true);
1050 cfun->tail_call_marked = true;
1051 if (dump_file && (dump_flags & TDF_DETAILS))
1053 fprintf (dump_file, "Found tail call ");
1054 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1055 fprintf (dump_file, " in bb %i\n", (gsi_bb (t->call_gsi))->index);
1059 return false;
1062 /* Creates a tail-call accumulator of the same type as the return type of the
1063 current function. LABEL is the name used to creating the temporary
1064 variable for the accumulator. The accumulator will be inserted in the
1065 phis of a basic block BB with single predecessor with an initial value
1066 INIT converted to the current function return type. */
1068 static tree
1069 create_tailcall_accumulator (const char *label, basic_block bb, tree init)
1071 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
1072 if (POINTER_TYPE_P (ret_type))
1073 ret_type = sizetype;
1075 tree tmp = make_temp_ssa_name (ret_type, NULL, label);
1076 gphi *phi;
1078 phi = create_phi_node (tmp, bb);
1079 /* RET_TYPE can be a float when -ffast-maths is enabled. */
1080 add_phi_arg (phi, fold_convert (ret_type, init), single_pred_edge (bb),
1081 UNKNOWN_LOCATION);
1082 return PHI_RESULT (phi);
1085 /* Optimizes tail calls in the function, turning the tail recursion
1086 into iteration. */
1088 static unsigned int
1089 tree_optimize_tail_calls_1 (bool opt_tailcalls)
1091 edge e;
1092 bool phis_constructed = false;
1093 struct tailcall *tailcalls = NULL, *act, *next;
1094 bool changed = false;
1095 basic_block first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
1096 tree param;
1097 gimple *stmt;
1098 edge_iterator ei;
1100 if (!suitable_for_tail_opt_p ())
1101 return 0;
1102 if (opt_tailcalls)
1103 opt_tailcalls = suitable_for_tail_call_opt_p ();
1105 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
1107 /* Only traverse the normal exits, i.e. those that end with return
1108 statement. */
1109 stmt = last_stmt (e->src);
1111 if (stmt
1112 && gimple_code (stmt) == GIMPLE_RETURN)
1113 find_tail_calls (e->src, &tailcalls);
1116 if (live_vars)
1118 destroy_live_vars (live_vars_vec);
1119 delete live_vars;
1120 live_vars = NULL;
1123 /* Construct the phi nodes and accumulators if necessary. */
1124 a_acc = m_acc = NULL_TREE;
1125 for (act = tailcalls; act; act = act->next)
1127 if (!act->tail_recursion)
1128 continue;
1130 if (!phis_constructed)
1132 /* Ensure that there is only one predecessor of the block
1133 or if there are existing degenerate PHI nodes. */
1134 if (!single_pred_p (first)
1135 || !gimple_seq_empty_p (phi_nodes (first)))
1136 first =
1137 split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1139 /* Copy the args if needed. */
1140 unsigned idx;
1141 for (param = DECL_ARGUMENTS (current_function_decl), idx = 0;
1142 param;
1143 param = DECL_CHAIN (param), idx++)
1144 if (bitmap_bit_p (tailr_arg_needs_copy, idx))
1146 tree name = ssa_default_def (cfun, param);
1147 tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
1148 gphi *phi;
1150 set_ssa_default_def (cfun, param, new_name);
1151 phi = create_phi_node (name, first);
1152 add_phi_arg (phi, new_name, single_pred_edge (first),
1153 EXPR_LOCATION (param));
1155 phis_constructed = true;
1158 if (act->add && !a_acc)
1159 a_acc = create_tailcall_accumulator ("add_acc", first,
1160 integer_zero_node);
1162 if (act->mult && !m_acc)
1163 m_acc = create_tailcall_accumulator ("mult_acc", first,
1164 integer_one_node);
1167 if (a_acc || m_acc)
1169 /* When the tail call elimination using accumulators is performed,
1170 statements adding the accumulated value are inserted at all exits.
1171 This turns all other tail calls to non-tail ones. */
1172 opt_tailcalls = false;
1175 for (; tailcalls; tailcalls = next)
1177 next = tailcalls->next;
1178 changed |= optimize_tail_call (tailcalls, opt_tailcalls);
1179 free (tailcalls);
1182 if (a_acc || m_acc)
1184 /* Modify the remaining return statements. */
1185 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
1187 stmt = last_stmt (e->src);
1189 if (stmt
1190 && gimple_code (stmt) == GIMPLE_RETURN)
1191 adjust_return_value (e->src, m_acc, a_acc);
1195 if (changed)
1197 /* We may have created new loops. Make them magically appear. */
1198 loops_state_set (LOOPS_NEED_FIXUP);
1199 free_dominance_info (CDI_DOMINATORS);
1202 /* Add phi nodes for the virtual operands defined in the function to the
1203 header of the loop created by tail recursion elimination. Do so
1204 by triggering the SSA renamer. */
1205 if (phis_constructed)
1206 mark_virtual_operands_for_renaming (cfun);
1208 if (tailr_arg_needs_copy)
1209 BITMAP_FREE (tailr_arg_needs_copy);
1211 if (changed)
1212 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
1213 return 0;
1216 static bool
1217 gate_tail_calls (void)
1219 return flag_optimize_sibling_calls != 0 && dbg_cnt (tail_call);
1222 static unsigned int
1223 execute_tail_calls (void)
1225 return tree_optimize_tail_calls_1 (true);
1228 namespace {
1230 const pass_data pass_data_tail_recursion =
1232 GIMPLE_PASS, /* type */
1233 "tailr", /* name */
1234 OPTGROUP_NONE, /* optinfo_flags */
1235 TV_NONE, /* tv_id */
1236 ( PROP_cfg | PROP_ssa ), /* properties_required */
1237 0, /* properties_provided */
1238 0, /* properties_destroyed */
1239 0, /* todo_flags_start */
1240 0, /* todo_flags_finish */
1243 class pass_tail_recursion : public gimple_opt_pass
1245 public:
1246 pass_tail_recursion (gcc::context *ctxt)
1247 : gimple_opt_pass (pass_data_tail_recursion, ctxt)
1250 /* opt_pass methods: */
1251 opt_pass * clone () { return new pass_tail_recursion (m_ctxt); }
1252 virtual bool gate (function *) { return gate_tail_calls (); }
1253 virtual unsigned int execute (function *)
1255 return tree_optimize_tail_calls_1 (false);
1258 }; // class pass_tail_recursion
1260 } // anon namespace
1262 gimple_opt_pass *
1263 make_pass_tail_recursion (gcc::context *ctxt)
1265 return new pass_tail_recursion (ctxt);
1268 namespace {
1270 const pass_data pass_data_tail_calls =
1272 GIMPLE_PASS, /* type */
1273 "tailc", /* name */
1274 OPTGROUP_NONE, /* optinfo_flags */
1275 TV_NONE, /* tv_id */
1276 ( PROP_cfg | PROP_ssa ), /* properties_required */
1277 0, /* properties_provided */
1278 0, /* properties_destroyed */
1279 0, /* todo_flags_start */
1280 0, /* todo_flags_finish */
1283 class pass_tail_calls : public gimple_opt_pass
1285 public:
1286 pass_tail_calls (gcc::context *ctxt)
1287 : gimple_opt_pass (pass_data_tail_calls, ctxt)
1290 /* opt_pass methods: */
1291 virtual bool gate (function *) { return gate_tail_calls (); }
1292 virtual unsigned int execute (function *) { return execute_tail_calls (); }
1294 }; // class pass_tail_calls
1296 } // anon namespace
1298 gimple_opt_pass *
1299 make_pass_tail_calls (gcc::context *ctxt)
1301 return new pass_tail_calls (ctxt);