1 /* Shrink-wrapping related optimizations.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file handles shrink-wrapping related optimizations. */
24 #include "coretypes.h"
30 #include "rtl-error.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
35 #include "stringpool.h"
38 #include "insn-config.h"
46 #include "insn-codes.h"
53 #include "langhooks.h"
55 #include "common/common-target.h"
56 #include "gimple-expr.h"
58 #include "tree-pass.h"
61 #include "bb-reorder.h"
62 #include "shrink-wrap.h"
67 /* Return true if INSN requires the stack frame to be set up.
68 PROLOGUE_USED contains the hard registers used in the function
69 prologue. SET_UP_BY_PROLOGUE is the set of registers we expect the
70 prologue to set up for the function. */
72 requires_stack_frame_p (rtx_insn
*insn
, HARD_REG_SET prologue_used
,
73 HARD_REG_SET set_up_by_prologue
)
76 HARD_REG_SET hardregs
;
80 return !SIBLING_CALL_P (insn
);
82 /* We need a frame to get the unique CFA expected by the unwinder. */
83 if (cfun
->can_throw_non_call_exceptions
&& can_throw_internal (insn
))
86 CLEAR_HARD_REG_SET (hardregs
);
87 FOR_EACH_INSN_DEF (def
, insn
)
89 rtx dreg
= DF_REF_REG (def
);
94 add_to_hard_reg_set (&hardregs
, GET_MODE (dreg
),
97 if (hard_reg_set_intersect_p (hardregs
, prologue_used
))
99 AND_COMPL_HARD_REG_SET (hardregs
, call_used_reg_set
);
100 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
101 if (TEST_HARD_REG_BIT (hardregs
, regno
)
102 && df_regs_ever_live_p (regno
))
105 FOR_EACH_INSN_USE (use
, insn
)
107 rtx reg
= DF_REF_REG (use
);
112 add_to_hard_reg_set (&hardregs
, GET_MODE (reg
),
115 if (hard_reg_set_intersect_p (hardregs
, set_up_by_prologue
))
121 /* See whether there has a single live edge from BB, which dest uses
122 [REGNO, END_REGNO). Return the live edge if its dest bb has
123 one or two predecessors. Otherwise return NULL. */
126 live_edge_for_reg (basic_block bb
, int regno
, int end_regno
)
134 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
136 live
= df_get_live_in (e
->dest
);
137 for (i
= regno
; i
< end_regno
; i
++)
138 if (REGNO_REG_SET_P (live
, i
))
140 if (live_edge
&& live_edge
!= e
)
146 /* We can sometimes encounter dead code. Don't try to move it
147 into the exit block. */
148 if (!live_edge
|| live_edge
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
151 /* Reject targets of abnormal edges. This is needed for correctness
152 on ports like Alpha and MIPS, whose pic_offset_table_rtx can die on
153 exception edges even though it is generally treated as call-saved
154 for the majority of the compilation. Moving across abnormal edges
155 isn't going to be interesting for shrink-wrap usage anyway. */
156 if (live_edge
->flags
& EDGE_ABNORMAL
)
159 /* When live_edge->dest->preds == 2, we can create a new block on
160 the edge to make it meet the requirement. */
161 if (EDGE_COUNT (live_edge
->dest
->preds
) > 2)
167 /* Try to move INSN from BB to a successor. Return true on success.
168 USES and DEFS are the set of registers that are used and defined
169 after INSN in BB. SPLIT_P indicates whether a live edge from BB
170 is splitted or not. */
173 move_insn_for_shrink_wrap (basic_block bb
, rtx_insn
*insn
,
174 const HARD_REG_SET uses
,
175 const HARD_REG_SET defs
,
179 bitmap live_out
, live_in
, bb_uses
, bb_defs
;
180 unsigned int i
, dregno
, end_dregno
;
181 unsigned int sregno
= FIRST_PSEUDO_REGISTER
;
182 unsigned int end_sregno
= FIRST_PSEUDO_REGISTER
;
183 basic_block next_block
;
186 /* Look for a simple register assignment. We don't use single_set here
187 because we can't deal with any CLOBBERs, USEs, or REG_UNUSED secondary
191 set
= PATTERN (insn
);
192 if (GET_CODE (set
) != SET
)
195 dest
= SET_DEST (set
);
197 /* For the destination, we want only a register. Also disallow STACK
198 or FRAME related adjustments. They are likely part of the prologue,
199 so keep them in the entry block. */
201 || dest
== stack_pointer_rtx
202 || dest
== frame_pointer_rtx
203 || dest
== hard_frame_pointer_rtx
)
206 /* For the source, we want one of:
207 (1) A (non-overlapping) register
209 (3) An expression involving no more than one register.
211 That last point comes from the code following, which was originally
212 written to handle only register move operations, and still only handles
213 a single source register when checking for overlaps. Happily, the
214 same checks can be applied to expressions like (plus reg const). */
216 if (CONSTANT_P (src
))
218 else if (!REG_P (src
))
220 rtx src_inner
= NULL_RTX
;
222 if (can_throw_internal (insn
))
225 subrtx_var_iterator::array_type array
;
226 FOR_EACH_SUBRTX_VAR (iter
, array
, src
, ALL
)
229 switch (GET_RTX_CLASS (GET_CODE (x
)))
233 case RTX_COMM_COMPARE
:
238 /* Constant or expression. Continue. */
243 switch (GET_CODE (x
))
247 case STRICT_LOW_PART
:
254 /* Fail if we see a second inner register. */
255 if (src_inner
!= NULL
)
270 if (src_inner
!= NULL
)
274 /* Make sure that the source register isn't defined later in BB. */
277 sregno
= REGNO (src
);
278 end_sregno
= END_REGNO (src
);
279 if (overlaps_hard_reg_set_p (defs
, GET_MODE (src
), sregno
))
283 /* Make sure that the destination register isn't referenced later in BB. */
284 dregno
= REGNO (dest
);
285 end_dregno
= END_REGNO (dest
);
286 if (overlaps_hard_reg_set_p (uses
, GET_MODE (dest
), dregno
)
287 || overlaps_hard_reg_set_p (defs
, GET_MODE (dest
), dregno
))
290 /* See whether there is a successor block to which we could move INSN. */
291 live_edge
= live_edge_for_reg (bb
, dregno
, end_dregno
);
295 next_block
= live_edge
->dest
;
296 /* Create a new basic block on the edge. */
297 if (EDGE_COUNT (next_block
->preds
) == 2)
299 /* split_edge for a block with only one successor is meaningless. */
300 if (EDGE_COUNT (bb
->succs
) == 1)
303 /* If DF_LIVE doesn't exist, i.e. at -O1, just give up. */
307 basic_block old_dest
= live_edge
->dest
;
308 next_block
= split_edge (live_edge
);
310 /* We create a new basic block. Call df_grow_bb_info to make sure
311 all data structures are allocated. */
312 df_grow_bb_info (df_live
);
314 bitmap_and (df_get_live_in (next_block
), df_get_live_out (bb
),
315 df_get_live_in (old_dest
));
316 df_set_bb_dirty (next_block
);
318 /* We should not split more than once for a function. */
325 /* At this point we are committed to moving INSN, but let's try to
326 move it as far as we can. */
329 live_out
= df_get_live_out (bb
);
330 live_in
= df_get_live_in (next_block
);
333 /* Check whether BB uses DEST or clobbers DEST. We need to add
334 INSN to BB if so. Either way, DEST is no longer live on entry,
335 except for any part that overlaps SRC (next loop). */
336 bb_uses
= &DF_LR_BB_INFO (bb
)->use
;
337 bb_defs
= &DF_LR_BB_INFO (bb
)->def
;
340 for (i
= dregno
; i
< end_dregno
; i
++)
343 || REGNO_REG_SET_P (bb_uses
, i
)
344 || REGNO_REG_SET_P (bb_defs
, i
)
345 || REGNO_REG_SET_P (&DF_LIVE_BB_INFO (bb
)->gen
, i
))
347 CLEAR_REGNO_REG_SET (live_out
, i
);
348 CLEAR_REGNO_REG_SET (live_in
, i
);
351 /* Check whether BB clobbers SRC. We need to add INSN to BB if so.
352 Either way, SRC is now live on entry. */
353 for (i
= sregno
; i
< end_sregno
; i
++)
356 || REGNO_REG_SET_P (bb_defs
, i
)
357 || REGNO_REG_SET_P (&DF_LIVE_BB_INFO (bb
)->gen
, i
))
359 SET_REGNO_REG_SET (live_out
, i
);
360 SET_REGNO_REG_SET (live_in
, i
);
365 /* DF_LR_BB_INFO (bb)->def does not comprise the DF_REF_PARTIAL and
366 DF_REF_CONDITIONAL defs. So if DF_LIVE doesn't exist, i.e.
367 at -O1, just give up searching NEXT_BLOCK. */
369 for (i
= dregno
; i
< end_dregno
; i
++)
371 CLEAR_REGNO_REG_SET (live_out
, i
);
372 CLEAR_REGNO_REG_SET (live_in
, i
);
375 for (i
= sregno
; i
< end_sregno
; i
++)
377 SET_REGNO_REG_SET (live_out
, i
);
378 SET_REGNO_REG_SET (live_in
, i
);
382 /* If we don't need to add the move to BB, look for a single
386 live_edge
= live_edge_for_reg (next_block
, dregno
, end_dregno
);
387 if (!live_edge
|| EDGE_COUNT (live_edge
->dest
->preds
) > 1)
389 next_block
= live_edge
->dest
;
394 /* For the new created basic block, there is no dataflow info at all.
395 So skip the following dataflow update and check. */
398 /* BB now defines DEST. It only uses the parts of DEST that overlap SRC
400 for (i
= dregno
; i
< end_dregno
; i
++)
402 CLEAR_REGNO_REG_SET (bb_uses
, i
);
403 SET_REGNO_REG_SET (bb_defs
, i
);
406 /* BB now uses SRC. */
407 for (i
= sregno
; i
< end_sregno
; i
++)
408 SET_REGNO_REG_SET (bb_uses
, i
);
411 emit_insn_after (PATTERN (insn
), bb_note (bb
));
416 /* Look for register copies in the first block of the function, and move
417 them down into successor blocks if the register is used only on one
418 path. This exposes more opportunities for shrink-wrapping. These
419 kinds of sets often occur when incoming argument registers are moved
420 to call-saved registers because their values are live across one or
421 more calls during the function. */
424 prepare_shrink_wrap (basic_block entry_block
)
426 rtx_insn
*insn
, *curr
;
428 HARD_REG_SET uses
, defs
;
430 bool split_p
= false;
432 if (JUMP_P (BB_END (entry_block
)))
434 /* To have more shrink-wrapping opportunities, prepare_shrink_wrap tries
435 to sink the copies from parameter to callee saved register out of
436 entry block. copyprop_hardreg_forward_bb_without_debug_insn is called
437 to release some dependences. */
438 copyprop_hardreg_forward_bb_without_debug_insn (entry_block
);
441 CLEAR_HARD_REG_SET (uses
);
442 CLEAR_HARD_REG_SET (defs
);
443 FOR_BB_INSNS_REVERSE_SAFE (entry_block
, insn
, curr
)
444 if (NONDEBUG_INSN_P (insn
)
445 && !move_insn_for_shrink_wrap (entry_block
, insn
, uses
, defs
,
448 /* Add all defined registers to DEFs. */
449 FOR_EACH_INSN_DEF (def
, insn
)
451 x
= DF_REF_REG (def
);
452 if (REG_P (x
) && HARD_REGISTER_P (x
))
453 SET_HARD_REG_BIT (defs
, REGNO (x
));
456 /* Add all used registers to USESs. */
457 FOR_EACH_INSN_USE (use
, insn
)
459 x
= DF_REF_REG (use
);
460 if (REG_P (x
) && HARD_REGISTER_P (x
))
461 SET_HARD_REG_BIT (uses
, REGNO (x
));
466 /* Create a copy of BB instructions and insert at BEFORE. Redirect
467 preds of BB to COPY_BB if they don't appear in NEED_PROLOGUE. */
469 dup_block_and_redirect (basic_block bb
, basic_block copy_bb
, rtx_insn
*before
,
470 bitmap_head
*need_prologue
)
474 rtx_insn
*insn
= BB_END (bb
);
476 /* We know BB has a single successor, so there is no need to copy a
477 simple jump at the end of BB. */
478 if (simplejump_p (insn
))
479 insn
= PREV_INSN (insn
);
482 duplicate_insn_chain (BB_HEAD (bb
), insn
);
486 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
487 if (active_insn_p (insn
))
489 fprintf (dump_file
, "Duplicating bb %d to bb %d, %u active insns.\n",
490 bb
->index
, copy_bb
->index
, count
);
494 emit_insn_before (insn
, before
);
496 /* Redirect all the paths that need no prologue into copy_bb. */
497 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
));)
498 if (!bitmap_bit_p (need_prologue
, e
->src
->index
))
500 int freq
= EDGE_FREQUENCY (e
);
501 copy_bb
->count
+= e
->count
;
502 copy_bb
->frequency
+= EDGE_FREQUENCY (e
);
503 e
->dest
->count
-= e
->count
;
504 if (e
->dest
->count
< 0)
506 e
->dest
->frequency
-= freq
;
507 if (e
->dest
->frequency
< 0)
508 e
->dest
->frequency
= 0;
509 redirect_edge_and_branch_force (e
, copy_bb
);
517 /* Try to perform a kind of shrink-wrapping, making sure the
518 prologue/epilogue is emitted only around those parts of the
519 function that require it. */
522 try_shrink_wrapping (edge
*entry_edge
, edge orig_entry_edge
,
523 bitmap_head
*bb_flags
, rtx_insn
*prologue_seq
)
527 bool nonempty_prologue
= false;
528 unsigned max_grow_size
;
531 for (seq
= prologue_seq
; seq
; seq
= NEXT_INSN (seq
))
532 if (!NOTE_P (seq
) || NOTE_KIND (seq
) != NOTE_INSN_PROLOGUE_END
)
534 nonempty_prologue
= true;
538 if (SHRINK_WRAPPING_ENABLED
539 && (targetm
.profile_before_prologue () || !crtl
->profile
)
540 && nonempty_prologue
&& !crtl
->calls_eh_return
)
542 HARD_REG_SET prologue_clobbered
, prologue_used
, live_on_edge
;
543 struct hard_reg_set_container set_up_by_prologue
;
545 vec
<basic_block
> vec
;
547 bitmap_head bb_antic_flags
;
548 bitmap_head bb_on_list
;
552 fprintf (dump_file
, "Attempting shrink-wrapping optimization.\n");
554 /* Compute the registers set and used in the prologue. */
555 CLEAR_HARD_REG_SET (prologue_clobbered
);
556 CLEAR_HARD_REG_SET (prologue_used
);
557 for (p_insn
= prologue_seq
; p_insn
; p_insn
= NEXT_INSN (p_insn
))
559 HARD_REG_SET this_used
;
560 if (!NONDEBUG_INSN_P (p_insn
))
563 CLEAR_HARD_REG_SET (this_used
);
564 note_uses (&PATTERN (p_insn
), record_hard_reg_uses
,
566 AND_COMPL_HARD_REG_SET (this_used
, prologue_clobbered
);
567 IOR_HARD_REG_SET (prologue_used
, this_used
);
568 note_stores (PATTERN (p_insn
), record_hard_reg_sets
,
569 &prologue_clobbered
);
572 prepare_shrink_wrap ((*entry_edge
)->dest
);
574 bitmap_initialize (&bb_antic_flags
, &bitmap_default_obstack
);
575 bitmap_initialize (&bb_on_list
, &bitmap_default_obstack
);
576 bitmap_initialize (&bb_tail
, &bitmap_default_obstack
);
578 /* Find the set of basic blocks that require a stack frame,
579 and blocks that are too big to be duplicated. */
581 vec
.create (n_basic_blocks_for_fn (cfun
));
583 CLEAR_HARD_REG_SET (set_up_by_prologue
.set
);
584 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
,
585 STACK_POINTER_REGNUM
);
586 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
, ARG_POINTER_REGNUM
);
587 if (frame_pointer_needed
)
588 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
,
589 HARD_FRAME_POINTER_REGNUM
);
590 if (pic_offset_table_rtx
591 && (unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
592 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
,
593 PIC_OFFSET_TABLE_REGNUM
);
595 add_to_hard_reg_set (&set_up_by_prologue
.set
,
596 GET_MODE (crtl
->drap_reg
),
597 REGNO (crtl
->drap_reg
));
598 if (targetm
.set_up_by_prologue
)
599 targetm
.set_up_by_prologue (&set_up_by_prologue
);
601 /* We don't use a different max size depending on
602 optimize_bb_for_speed_p because increasing shrink-wrapping
603 opportunities by duplicating tail blocks can actually result
604 in an overall decrease in code size. */
605 max_grow_size
= get_uncond_jump_length ();
606 max_grow_size
*= PARAM_VALUE (PARAM_MAX_GROW_COPY_BB_INSNS
);
608 FOR_EACH_BB_FN (bb
, cfun
)
613 FOR_BB_INSNS (bb
, insn
)
614 if (NONDEBUG_INSN_P (insn
))
616 if (requires_stack_frame_p (insn
, prologue_used
,
617 set_up_by_prologue
.set
))
619 if (bb
== (*entry_edge
)->dest
)
620 goto fail_shrinkwrap
;
621 bitmap_set_bit (bb_flags
, bb
->index
);
625 else if (size
<= max_grow_size
)
627 size
+= get_attr_min_length (insn
);
628 if (size
> max_grow_size
)
629 bitmap_set_bit (&bb_on_list
, bb
->index
);
634 /* Blocks that really need a prologue, or are too big for tails. */
635 bitmap_ior_into (&bb_on_list
, bb_flags
);
637 /* For every basic block that needs a prologue, mark all blocks
638 reachable from it, so as to ensure they are also seen as
639 requiring a prologue. */
640 while (!vec
.is_empty ())
642 basic_block tmp_bb
= vec
.pop ();
644 FOR_EACH_EDGE (e
, ei
, tmp_bb
->succs
)
645 if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
646 && bitmap_set_bit (bb_flags
, e
->dest
->index
))
647 vec
.quick_push (e
->dest
);
650 /* Find the set of basic blocks that need no prologue, have a
651 single successor, can be duplicated, meet a max size
652 requirement, and go to the exit via like blocks. */
653 vec
.quick_push (EXIT_BLOCK_PTR_FOR_FN (cfun
));
654 while (!vec
.is_empty ())
656 basic_block tmp_bb
= vec
.pop ();
658 FOR_EACH_EDGE (e
, ei
, tmp_bb
->preds
)
659 if (single_succ_p (e
->src
)
660 && !bitmap_bit_p (&bb_on_list
, e
->src
->index
)
661 && can_duplicate_block_p (e
->src
))
666 /* If there is predecessor of e->src which doesn't
667 need prologue and the edge is complex,
668 we might not be able to redirect the branch
669 to a copy of e->src. */
670 FOR_EACH_EDGE (pe
, pei
, e
->src
->preds
)
671 if ((pe
->flags
& EDGE_COMPLEX
) != 0
672 && !bitmap_bit_p (bb_flags
, pe
->src
->index
))
674 if (pe
== NULL
&& bitmap_set_bit (&bb_tail
, e
->src
->index
))
675 vec
.quick_push (e
->src
);
679 /* Now walk backwards from every block that is marked as needing
680 a prologue to compute the bb_antic_flags bitmap. Exclude
681 tail blocks; They can be duplicated to be used on paths not
682 needing a prologue. */
683 bitmap_clear (&bb_on_list
);
684 bitmap_and_compl (&bb_antic_flags
, bb_flags
, &bb_tail
);
685 FOR_EACH_BB_FN (bb
, cfun
)
687 if (!bitmap_bit_p (&bb_antic_flags
, bb
->index
))
689 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
690 if (!bitmap_bit_p (&bb_antic_flags
, e
->src
->index
)
691 && bitmap_set_bit (&bb_on_list
, e
->src
->index
))
692 vec
.quick_push (e
->src
);
694 while (!vec
.is_empty ())
696 basic_block tmp_bb
= vec
.pop ();
699 bitmap_clear_bit (&bb_on_list
, tmp_bb
->index
);
700 FOR_EACH_EDGE (e
, ei
, tmp_bb
->succs
)
701 if (!bitmap_bit_p (&bb_antic_flags
, e
->dest
->index
))
709 bitmap_set_bit (&bb_antic_flags
, tmp_bb
->index
);
710 FOR_EACH_EDGE (e
, ei
, tmp_bb
->preds
)
711 if (!bitmap_bit_p (&bb_antic_flags
, e
->src
->index
)
712 && bitmap_set_bit (&bb_on_list
, e
->src
->index
))
713 vec
.quick_push (e
->src
);
716 /* Find exactly one edge that leads to a block in ANTIC from
717 a block that isn't. */
718 if (!bitmap_bit_p (&bb_antic_flags
, (*entry_edge
)->dest
->index
))
719 FOR_EACH_BB_FN (bb
, cfun
)
721 if (!bitmap_bit_p (&bb_antic_flags
, bb
->index
))
723 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
724 if (!bitmap_bit_p (&bb_antic_flags
, e
->src
->index
))
726 if (*entry_edge
!= orig_entry_edge
)
728 *entry_edge
= orig_entry_edge
;
730 fprintf (dump_file
, "More than one candidate edge.\n");
731 goto fail_shrinkwrap
;
734 fprintf (dump_file
, "Found candidate edge for "
735 "shrink-wrapping, %d->%d.\n", e
->src
->index
,
741 if (*entry_edge
!= orig_entry_edge
)
743 /* Test whether the prologue is known to clobber any register
744 (other than FP or SP) which are live on the edge. */
745 CLEAR_HARD_REG_BIT (prologue_clobbered
, STACK_POINTER_REGNUM
);
746 if (frame_pointer_needed
)
747 CLEAR_HARD_REG_BIT (prologue_clobbered
, HARD_FRAME_POINTER_REGNUM
);
748 REG_SET_TO_HARD_REG_SET (live_on_edge
,
749 df_get_live_in ((*entry_edge
)->dest
));
750 if (hard_reg_set_intersect_p (live_on_edge
, prologue_clobbered
))
752 *entry_edge
= orig_entry_edge
;
755 "Shrink-wrapping aborted due to clobber.\n");
758 if (*entry_edge
!= orig_entry_edge
)
760 crtl
->shrink_wrapped
= true;
762 fprintf (dump_file
, "Performing shrink-wrapping.\n");
764 /* Find tail blocks reachable from both blocks needing a
765 prologue and blocks not needing a prologue. */
766 if (!bitmap_empty_p (&bb_tail
))
767 FOR_EACH_BB_FN (bb
, cfun
)
769 bool some_pro
, some_no_pro
;
770 if (!bitmap_bit_p (&bb_tail
, bb
->index
))
772 some_pro
= some_no_pro
= false;
773 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
775 if (bitmap_bit_p (bb_flags
, e
->src
->index
))
780 if (some_pro
&& some_no_pro
)
783 bitmap_clear_bit (&bb_tail
, bb
->index
);
785 /* Find the head of each tail. */
786 while (!vec
.is_empty ())
788 basic_block tbb
= vec
.pop ();
790 if (!bitmap_bit_p (&bb_tail
, tbb
->index
))
793 while (single_succ_p (tbb
))
795 tbb
= single_succ (tbb
);
796 bitmap_clear_bit (&bb_tail
, tbb
->index
);
799 /* Now duplicate the tails. */
800 if (!bitmap_empty_p (&bb_tail
))
801 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
803 basic_block copy_bb
, tbb
;
806 if (!bitmap_clear_bit (&bb_tail
, bb
->index
))
809 /* Create a copy of BB, instructions and all, for
810 use on paths that don't need a prologue.
811 Ideal placement of the copy is on a fall-thru edge
812 or after a block that would jump to the copy. */
813 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
814 if (!bitmap_bit_p (bb_flags
, e
->src
->index
)
815 && single_succ_p (e
->src
))
819 /* Make sure we insert after any barriers. */
820 rtx_insn
*end
= get_last_bb_insn (e
->src
);
821 copy_bb
= create_basic_block (NEXT_INSN (end
),
823 BB_COPY_PARTITION (copy_bb
, e
->src
);
827 /* Otherwise put the copy at the end of the function. */
828 copy_bb
= create_basic_block (NULL_RTX
, NULL_RTX
,
829 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
830 BB_COPY_PARTITION (copy_bb
, bb
);
833 rtx_note
*insert_point
= emit_note_after (NOTE_INSN_DELETED
,
835 emit_barrier_after (BB_END (copy_bb
));
840 dup_block_and_redirect (tbb
, copy_bb
, insert_point
,
842 tbb
= single_succ (tbb
);
843 if (tbb
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
845 e
= split_block (copy_bb
, PREV_INSN (insert_point
));
849 /* Quiet verify_flow_info by (ab)using EDGE_FAKE.
850 We have yet to add a simple_return to the tails,
851 as we'd like to first convert_jumps_to_returns in
852 case the block is no longer used after that. */
854 if (CALL_P (PREV_INSN (insert_point
))
855 && SIBLING_CALL_P (PREV_INSN (insert_point
)))
856 eflags
= EDGE_SIBCALL
| EDGE_ABNORMAL
;
857 make_single_succ_edge (copy_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
),
860 /* verify_flow_info doesn't like a note after a
862 delete_insn (insert_point
);
863 if (bitmap_empty_p (&bb_tail
))
869 bitmap_clear (&bb_tail
);
870 bitmap_clear (&bb_antic_flags
);
871 bitmap_clear (&bb_on_list
);
876 /* If we're allowed to generate a simple return instruction, then by
877 definition we don't need a full epilogue. If the last basic
878 block before the exit block does not contain active instructions,
879 examine its predecessors and try to emit (conditional) return
883 get_unconverted_simple_return (edge exit_fallthru_edge
, bitmap_head bb_flags
,
884 vec
<edge
> *unconverted_simple_returns
,
885 rtx_insn
**returnjump
)
891 /* convert_jumps_to_returns may add to preds of the exit block
892 (but won't remove). Stop at end of current preds. */
893 last
= EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
);
894 for (i
= 0; i
< last
; i
++)
896 edge e
= EDGE_I (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
, i
);
897 if (LABEL_P (BB_HEAD (e
->src
))
898 && !bitmap_bit_p (&bb_flags
, e
->src
->index
)
899 && !active_insn_between (BB_HEAD (e
->src
), BB_END (e
->src
)))
900 *unconverted_simple_returns
901 = convert_jumps_to_returns (e
->src
, true,
902 *unconverted_simple_returns
);
906 if (exit_fallthru_edge
!= NULL
907 && EDGE_COUNT (exit_fallthru_edge
->src
->preds
) != 0
908 && !bitmap_bit_p (&bb_flags
, exit_fallthru_edge
->src
->index
))
912 last_bb
= emit_return_for_exit (exit_fallthru_edge
, true);
913 *returnjump
= BB_END (last_bb
);
914 exit_fallthru_edge
= NULL
;
916 return exit_fallthru_edge
;
919 /* If there were branches to an empty LAST_BB which we tried to
920 convert to conditional simple_returns, but couldn't for some
921 reason, create a block to hold a simple_return insn and redirect
922 those remaining edges. */
925 convert_to_simple_return (edge entry_edge
, edge orig_entry_edge
,
926 bitmap_head bb_flags
, rtx_insn
*returnjump
,
927 vec
<edge
> unconverted_simple_returns
)
932 if (!unconverted_simple_returns
.is_empty ())
934 basic_block simple_return_block_hot
= NULL
;
935 basic_block simple_return_block_cold
= NULL
;
936 edge pending_edge_hot
= NULL
;
937 edge pending_edge_cold
= NULL
;
938 basic_block exit_pred
;
941 gcc_assert (entry_edge
!= orig_entry_edge
);
943 /* See if we can reuse the last insn that was emitted for the
945 if (returnjump
!= NULL_RTX
946 && JUMP_LABEL (returnjump
) == simple_return_rtx
)
948 e
= split_block (BLOCK_FOR_INSN (returnjump
), PREV_INSN (returnjump
));
949 if (BB_PARTITION (e
->src
) == BB_HOT_PARTITION
)
950 simple_return_block_hot
= e
->dest
;
952 simple_return_block_cold
= e
->dest
;
955 /* Also check returns we might need to add to tail blocks. */
956 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
957 if (EDGE_COUNT (e
->src
->preds
) != 0
958 && (e
->flags
& EDGE_FAKE
) != 0
959 && !bitmap_bit_p (&bb_flags
, e
->src
->index
))
961 if (BB_PARTITION (e
->src
) == BB_HOT_PARTITION
)
962 pending_edge_hot
= e
;
964 pending_edge_cold
= e
;
967 /* Save a pointer to the exit's predecessor BB for use in
968 inserting new BBs at the end of the function. Do this
969 after the call to split_block above which may split
970 the original exit pred. */
971 exit_pred
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
973 FOR_EACH_VEC_ELT (unconverted_simple_returns
, i
, e
)
975 basic_block
*pdest_bb
;
978 if (BB_PARTITION (e
->src
) == BB_HOT_PARTITION
)
980 pdest_bb
= &simple_return_block_hot
;
981 pending
= pending_edge_hot
;
985 pdest_bb
= &simple_return_block_cold
;
986 pending
= pending_edge_cold
;
989 if (*pdest_bb
== NULL
&& pending
!= NULL
)
991 emit_return_into_block (true, pending
->src
);
992 pending
->flags
&= ~(EDGE_FALLTHRU
| EDGE_FAKE
);
993 *pdest_bb
= pending
->src
;
995 else if (*pdest_bb
== NULL
)
999 bb
= create_basic_block (NULL
, NULL
, exit_pred
);
1000 BB_COPY_PARTITION (bb
, e
->src
);
1001 rtx_insn
*ret
= targetm
.gen_simple_return ();
1002 rtx_jump_insn
*start
= emit_jump_insn_after (ret
, BB_END (bb
));
1003 JUMP_LABEL (start
) = simple_return_rtx
;
1004 emit_barrier_after (start
);
1007 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
1009 redirect_edge_and_branch_force (e
, *pdest_bb
);
1011 unconverted_simple_returns
.release ();
1014 if (entry_edge
!= orig_entry_edge
)
1016 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
1017 if (EDGE_COUNT (e
->src
->preds
) != 0
1018 && (e
->flags
& EDGE_FAKE
) != 0
1019 && !bitmap_bit_p (&bb_flags
, e
->src
->index
))
1021 emit_return_into_block (true, e
->src
);
1022 e
->flags
&= ~(EDGE_FALLTHRU
| EDGE_FAKE
);