1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
33 #include "cfglayout.h"
36 #include "common/common-target.h"
38 #include "alloc-pool.h"
40 #include "tree-pass.h"
45 /* Holds the interesting trailing notes for the function. */
46 rtx cfg_layout_function_footer
;
47 rtx cfg_layout_function_header
;
49 static rtx
skip_insns_after_block (basic_block
);
50 static void record_effective_endpoints (void);
51 static rtx
label_for_bb (basic_block
);
52 static void fixup_reorder_chain (void);
54 static void change_scope (rtx
, tree
, tree
);
56 void verify_insn_chain (void);
57 static void fixup_fallthru_exit_predecessor (void);
60 unlink_insn_chain (rtx first
, rtx last
)
62 rtx prevfirst
= PREV_INSN (first
);
63 rtx nextlast
= NEXT_INSN (last
);
65 PREV_INSN (first
) = NULL
;
66 NEXT_INSN (last
) = NULL
;
68 NEXT_INSN (prevfirst
) = nextlast
;
70 PREV_INSN (nextlast
) = prevfirst
;
72 set_last_insn (prevfirst
);
74 set_first_insn (nextlast
);
78 /* Skip over inter-block insns occurring after BB which are typically
79 associated with BB (e.g., barriers). If there are any such insns,
80 we return the last one. Otherwise, we return the end of BB. */
83 skip_insns_after_block (basic_block bb
)
85 rtx insn
, last_insn
, next_head
, prev
;
88 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
89 next_head
= BB_HEAD (bb
->next_bb
);
91 for (last_insn
= insn
= BB_END (bb
); (insn
= NEXT_INSN (insn
)) != 0; )
93 if (insn
== next_head
)
96 switch (GET_CODE (insn
))
103 switch (NOTE_KIND (insn
))
105 case NOTE_INSN_BLOCK_END
:
116 && JUMP_TABLE_DATA_P (NEXT_INSN (insn
)))
118 insn
= NEXT_INSN (insn
);
131 /* It is possible to hit contradictory sequence. For instance:
137 Where barrier belongs to jump_insn, but the note does not. This can be
138 created by removing the basic block originally following
139 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
141 for (insn
= last_insn
; insn
!= BB_END (bb
); insn
= prev
)
143 prev
= PREV_INSN (insn
);
145 switch (NOTE_KIND (insn
))
147 case NOTE_INSN_BLOCK_END
:
150 case NOTE_INSN_DELETED
:
151 case NOTE_INSN_DELETED_LABEL
:
154 reorder_insns (insn
, insn
, last_insn
);
161 /* Locate or create a label for a given basic block. */
164 label_for_bb (basic_block bb
)
166 rtx label
= BB_HEAD (bb
);
168 if (!LABEL_P (label
))
171 fprintf (dump_file
, "Emitting label for block %d\n", bb
->index
);
173 label
= block_label (bb
);
179 /* Locate the effective beginning and end of the insn chain for each
180 block, as defined by skip_insns_after_block above. */
183 record_effective_endpoints (void)
189 for (insn
= get_insns ();
192 && NOTE_KIND (insn
) != NOTE_INSN_BASIC_BLOCK
;
193 insn
= NEXT_INSN (insn
))
195 /* No basic blocks at all? */
198 if (PREV_INSN (insn
))
199 cfg_layout_function_header
=
200 unlink_insn_chain (get_insns (), PREV_INSN (insn
));
202 cfg_layout_function_header
= NULL_RTX
;
204 next_insn
= get_insns ();
209 if (PREV_INSN (BB_HEAD (bb
)) && next_insn
!= BB_HEAD (bb
))
210 bb
->il
.rtl
->header
= unlink_insn_chain (next_insn
,
211 PREV_INSN (BB_HEAD (bb
)));
212 end
= skip_insns_after_block (bb
);
213 if (NEXT_INSN (BB_END (bb
)) && BB_END (bb
) != end
)
214 bb
->il
.rtl
->footer
= unlink_insn_chain (NEXT_INSN (BB_END (bb
)), end
);
215 next_insn
= NEXT_INSN (BB_END (bb
));
218 cfg_layout_function_footer
= next_insn
;
219 if (cfg_layout_function_footer
)
220 cfg_layout_function_footer
= unlink_insn_chain (cfg_layout_function_footer
, get_last_insn ());
223 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
224 numbers and files. In order to be GGC friendly we need to use separate
225 varrays. This also slightly improve the memory locality in binary search.
226 The _locs array contains locators where the given property change. The
227 block_locators_blocks contains the scope block that is used for all insn
228 locator greater than corresponding block_locators_locs value and smaller
229 than the following one. Similarly for the other properties. */
230 static VEC(int,heap
) *block_locators_locs
;
231 static GTY(()) VEC(tree
,gc
) *block_locators_blocks
;
232 static VEC(int,heap
) *locations_locators_locs
;
233 DEF_VEC_O(location_t
);
234 DEF_VEC_ALLOC_O(location_t
,heap
);
235 static VEC(location_t
,heap
) *locations_locators_vals
;
236 int prologue_locator
;
237 int epilogue_locator
;
239 /* Hold current location information and last location information, so the
240 datastructures are built lazily only when some instructions in given
242 static location_t curr_location
, last_location
;
243 static tree curr_block
, last_block
;
244 static int curr_rtl_loc
= -1;
246 /* Allocate insn locator datastructure. */
248 insn_locators_alloc (void)
250 prologue_locator
= epilogue_locator
= 0;
252 block_locators_locs
= VEC_alloc (int, heap
, 32);
253 block_locators_blocks
= VEC_alloc (tree
, gc
, 32);
254 locations_locators_locs
= VEC_alloc (int, heap
, 32);
255 locations_locators_vals
= VEC_alloc (location_t
, heap
, 32);
257 curr_location
= UNKNOWN_LOCATION
;
258 last_location
= UNKNOWN_LOCATION
;
264 /* At the end of emit stage, clear current location. */
266 insn_locators_finalize (void)
268 if (curr_rtl_loc
>= 0)
269 epilogue_locator
= curr_insn_locator ();
273 /* Allocate insn locator datastructure. */
275 insn_locators_free (void)
277 prologue_locator
= epilogue_locator
= 0;
279 VEC_free (int, heap
, block_locators_locs
);
280 VEC_free (tree
,gc
, block_locators_blocks
);
281 VEC_free (int, heap
, locations_locators_locs
);
282 VEC_free (location_t
, heap
, locations_locators_vals
);
286 /* Set current location. */
288 set_curr_insn_source_location (location_t location
)
290 /* IV opts calls into RTL expansion to compute costs of operations. At this
291 time locators are not initialized. */
292 if (curr_rtl_loc
== -1)
294 curr_location
= location
;
297 /* Get current location. */
299 get_curr_insn_source_location (void)
301 return curr_location
;
304 /* Set current scope block. */
306 set_curr_insn_block (tree b
)
308 /* IV opts calls into RTL expansion to compute costs of operations. At this
309 time locators are not initialized. */
310 if (curr_rtl_loc
== -1)
316 /* Get current scope block. */
318 get_curr_insn_block (void)
323 /* Return current insn locator. */
325 curr_insn_locator (void)
327 if (curr_rtl_loc
== -1 || curr_location
== UNKNOWN_LOCATION
)
329 if (last_block
!= curr_block
)
332 VEC_safe_push (int, heap
, block_locators_locs
, curr_rtl_loc
);
333 VEC_safe_push (tree
, gc
, block_locators_blocks
, curr_block
);
334 last_block
= curr_block
;
336 if (last_location
!= curr_location
)
339 VEC_safe_push (int, heap
, locations_locators_locs
, curr_rtl_loc
);
340 VEC_safe_push (location_t
, heap
, locations_locators_vals
, &curr_location
);
341 last_location
= curr_location
;
347 into_cfg_layout_mode (void)
349 cfg_layout_initialize (0);
354 outof_cfg_layout_mode (void)
359 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
360 bb
->aux
= bb
->next_bb
;
362 cfg_layout_finalize ();
367 struct rtl_opt_pass pass_into_cfg_layout_mode
=
371 "into_cfglayout", /* name */
373 into_cfg_layout_mode
, /* execute */
376 0, /* static_pass_number */
378 0, /* properties_required */
379 PROP_cfglayout
, /* properties_provided */
380 0, /* properties_destroyed */
381 0, /* todo_flags_start */
382 0 /* todo_flags_finish */
386 struct rtl_opt_pass pass_outof_cfg_layout_mode
=
390 "outof_cfglayout", /* name */
392 outof_cfg_layout_mode
, /* execute */
395 0, /* static_pass_number */
397 0, /* properties_required */
398 0, /* properties_provided */
399 PROP_cfglayout
, /* properties_destroyed */
400 0, /* todo_flags_start */
401 0 /* todo_flags_finish */
405 /* Return scope resulting from combination of S1 and S2. */
407 choose_inner_scope (tree s1
, tree s2
)
413 if (BLOCK_NUMBER (s1
) > BLOCK_NUMBER (s2
))
418 /* Emit lexical block notes needed to change scope from S1 to S2. */
421 change_scope (rtx orig_insn
, tree s1
, tree s2
)
423 rtx insn
= orig_insn
;
424 tree com
= NULL_TREE
;
425 tree ts1
= s1
, ts2
= s2
;
430 gcc_assert (ts1
&& ts2
);
431 if (BLOCK_NUMBER (ts1
) > BLOCK_NUMBER (ts2
))
432 ts1
= BLOCK_SUPERCONTEXT (ts1
);
433 else if (BLOCK_NUMBER (ts1
) < BLOCK_NUMBER (ts2
))
434 ts2
= BLOCK_SUPERCONTEXT (ts2
);
437 ts1
= BLOCK_SUPERCONTEXT (ts1
);
438 ts2
= BLOCK_SUPERCONTEXT (ts2
);
447 rtx note
= emit_note_before (NOTE_INSN_BLOCK_END
, insn
);
448 NOTE_BLOCK (note
) = s
;
449 s
= BLOCK_SUPERCONTEXT (s
);
456 insn
= emit_note_before (NOTE_INSN_BLOCK_BEG
, insn
);
457 NOTE_BLOCK (insn
) = s
;
458 s
= BLOCK_SUPERCONTEXT (s
);
462 /* Return lexical scope block locator belongs to. */
464 locator_scope (int loc
)
466 int max
= VEC_length (int, block_locators_locs
);
469 /* When block_locators_locs was initialized, the pro- and epilogue
470 insns didn't exist yet and can therefore not be found this way.
471 But we know that they belong to the outer most block of the
473 Without this test, the prologue would be put inside the block of
474 the first valid instruction in the function and when that first
475 insn is part of an inlined function then the low_pc of that
476 inlined function is messed up. Likewise for the epilogue and
477 the last valid instruction. */
478 if (loc
== prologue_locator
|| loc
== epilogue_locator
)
479 return DECL_INITIAL (cfun
->decl
);
485 int pos
= (min
+ max
) / 2;
486 int tmp
= VEC_index (int, block_locators_locs
, pos
);
488 if (tmp
<= loc
&& min
!= pos
)
490 else if (tmp
> loc
&& max
!= pos
)
498 return VEC_index (tree
, block_locators_blocks
, min
);
501 /* Return lexical scope block insn belongs to. */
503 insn_scope (const_rtx insn
)
505 return locator_scope (INSN_LOCATOR (insn
));
508 /* Return line number of the statement specified by the locator. */
510 locator_location (int loc
)
512 int max
= VEC_length (int, locations_locators_locs
);
517 int pos
= (min
+ max
) / 2;
518 int tmp
= VEC_index (int, locations_locators_locs
, pos
);
520 if (tmp
<= loc
&& min
!= pos
)
522 else if (tmp
> loc
&& max
!= pos
)
530 return *VEC_index (location_t
, locations_locators_vals
, min
);
533 /* Return source line of the statement that produced this insn. */
535 locator_line (int loc
)
537 expanded_location xloc
;
541 xloc
= expand_location (locator_location (loc
));
545 /* Return line number of the statement that produced this insn. */
547 insn_line (const_rtx insn
)
549 return locator_line (INSN_LOCATOR (insn
));
552 /* Return source file of the statement specified by LOC. */
554 locator_file (int loc
)
556 expanded_location xloc
;
560 xloc
= expand_location (locator_location (loc
));
564 /* Return source file of the statement that produced this insn. */
566 insn_file (const_rtx insn
)
568 return locator_file (INSN_LOCATOR (insn
));
571 /* Return true if LOC1 and LOC2 locators have the same location and scope. */
573 locator_eq (int loc1
, int loc2
)
577 if (locator_location (loc1
) != locator_location (loc2
))
579 return locator_scope (loc1
) == locator_scope (loc2
);
582 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
583 on the scope tree and the newly reordered instructions. */
586 reemit_insn_block_notes (void)
588 tree cur_block
= DECL_INITIAL (cfun
->decl
);
592 if (!active_insn_p (insn
))
593 insn
= next_active_insn (insn
);
594 for (; insn
; insn
= next_active_insn (insn
))
598 /* Avoid putting scope notes between jump table and its label. */
599 if (JUMP_TABLE_DATA_P (insn
))
602 this_block
= insn_scope (insn
);
603 /* For sequences compute scope resulting from merging all scopes
604 of instructions nested inside. */
605 if (GET_CODE (PATTERN (insn
)) == SEQUENCE
)
608 rtx body
= PATTERN (insn
);
611 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
612 this_block
= choose_inner_scope (this_block
,
613 insn_scope (XVECEXP (body
, 0, i
)));
618 if (this_block
!= cur_block
)
620 change_scope (insn
, cur_block
, this_block
);
621 cur_block
= this_block
;
625 /* change_scope emits before the insn, not after. */
626 note
= emit_note (NOTE_INSN_DELETED
);
627 change_scope (note
, cur_block
, DECL_INITIAL (cfun
->decl
));
634 /* Link the basic blocks in the correct order, compacting the basic
635 block queue while at it. This also clears the visited flag on
636 all basic blocks. If STAY_IN_CFGLAYOUT_MODE is false, this function
637 also clears the basic block header and footer fields.
639 This function is usually called after a pass (e.g. tracer) finishes
640 some transformations while in cfglayout mode. The required sequence
641 of the basic blocks is in a linked list along the bb->aux field.
642 This functions re-links the basic block prev_bb and next_bb pointers
643 accordingly, and it compacts and renumbers the blocks. */
646 relink_block_chain (bool stay_in_cfglayout_mode
)
648 basic_block bb
, prev_bb
;
651 /* Maybe dump the re-ordered sequence. */
654 fprintf (dump_file
, "Reordered sequence:\n");
655 for (bb
= ENTRY_BLOCK_PTR
->next_bb
, index
= NUM_FIXED_BLOCKS
;
657 bb
= (basic_block
) bb
->aux
, index
++)
659 fprintf (dump_file
, " %i ", index
);
660 if (get_bb_original (bb
))
661 fprintf (dump_file
, "duplicate of %i ",
662 get_bb_original (bb
)->index
);
663 else if (forwarder_block_p (bb
)
664 && !LABEL_P (BB_HEAD (bb
)))
665 fprintf (dump_file
, "compensation ");
667 fprintf (dump_file
, "bb %i ", bb
->index
);
668 fprintf (dump_file
, " [%i]\n", bb
->frequency
);
672 /* Now reorder the blocks. */
673 prev_bb
= ENTRY_BLOCK_PTR
;
674 bb
= ENTRY_BLOCK_PTR
->next_bb
;
675 for (; bb
; prev_bb
= bb
, bb
= (basic_block
) bb
->aux
)
677 bb
->prev_bb
= prev_bb
;
678 prev_bb
->next_bb
= bb
;
680 prev_bb
->next_bb
= EXIT_BLOCK_PTR
;
681 EXIT_BLOCK_PTR
->prev_bb
= prev_bb
;
683 /* Then, clean up the aux and visited fields. */
687 bb
->il
.rtl
->visited
= 0;
688 if (!stay_in_cfglayout_mode
)
689 bb
->il
.rtl
->header
= bb
->il
.rtl
->footer
= NULL
;
692 /* Maybe reset the original copy tables, they are not valid anymore
693 when we renumber the basic blocks in compact_blocks. If we are
694 are going out of cfglayout mode, don't re-allocate the tables. */
695 free_original_copy_tables ();
696 if (stay_in_cfglayout_mode
)
697 initialize_original_copy_tables ();
699 /* Finally, put basic_block_info in the new order. */
704 /* Given a reorder chain, rearrange the code to match. */
707 fixup_reorder_chain (void)
712 if (cfg_layout_function_header
)
714 set_first_insn (cfg_layout_function_header
);
715 insn
= cfg_layout_function_header
;
716 while (NEXT_INSN (insn
))
717 insn
= NEXT_INSN (insn
);
720 /* First do the bulk reordering -- rechain the blocks without regard to
721 the needed changes to jumps and labels. */
723 for (bb
= ENTRY_BLOCK_PTR
->next_bb
; bb
; bb
= (basic_block
) bb
->aux
)
725 if (bb
->il
.rtl
->header
)
728 NEXT_INSN (insn
) = bb
->il
.rtl
->header
;
730 set_first_insn (bb
->il
.rtl
->header
);
731 PREV_INSN (bb
->il
.rtl
->header
) = insn
;
732 insn
= bb
->il
.rtl
->header
;
733 while (NEXT_INSN (insn
))
734 insn
= NEXT_INSN (insn
);
737 NEXT_INSN (insn
) = BB_HEAD (bb
);
739 set_first_insn (BB_HEAD (bb
));
740 PREV_INSN (BB_HEAD (bb
)) = insn
;
742 if (bb
->il
.rtl
->footer
)
744 NEXT_INSN (insn
) = bb
->il
.rtl
->footer
;
745 PREV_INSN (bb
->il
.rtl
->footer
) = insn
;
746 while (NEXT_INSN (insn
))
747 insn
= NEXT_INSN (insn
);
751 NEXT_INSN (insn
) = cfg_layout_function_footer
;
752 if (cfg_layout_function_footer
)
753 PREV_INSN (cfg_layout_function_footer
) = insn
;
755 while (NEXT_INSN (insn
))
756 insn
= NEXT_INSN (insn
);
758 set_last_insn (insn
);
759 #ifdef ENABLE_CHECKING
760 verify_insn_chain ();
763 /* Now add jumps and labels as needed to match the blocks new
766 for (bb
= ENTRY_BLOCK_PTR
->next_bb
; bb
; bb
= (basic_block
) bb
->aux
)
768 edge e_fall
, e_taken
, e
;
770 basic_block nb
, src_bb
;
773 if (EDGE_COUNT (bb
->succs
) == 0)
776 /* Find the old fallthru edge, and another non-EH edge for
778 e_taken
= e_fall
= NULL
;
780 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
781 if (e
->flags
& EDGE_FALLTHRU
)
783 else if (! (e
->flags
& EDGE_EH
))
786 bb_end_insn
= BB_END (bb
);
787 if (JUMP_P (bb_end_insn
))
789 if (any_condjump_p (bb_end_insn
))
791 /* This might happen if the conditional jump has side
792 effects and could therefore not be optimized away.
793 Make the basic block to end with a barrier in order
794 to prevent rtl_verify_flow_info from complaining. */
797 gcc_assert (!onlyjump_p (bb_end_insn
)
798 || returnjump_p (bb_end_insn
));
799 bb
->il
.rtl
->footer
= emit_barrier_after (bb_end_insn
);
803 /* If the old fallthru is still next, nothing to do. */
804 if (bb
->aux
== e_fall
->dest
805 || e_fall
->dest
== EXIT_BLOCK_PTR
)
808 /* The degenerated case of conditional jump jumping to the next
809 instruction can happen for jumps with side effects. We need
810 to construct a forwarder block and this will be done just
811 fine by force_nonfallthru below. */
815 /* There is another special case: if *neither* block is next,
816 such as happens at the very end of a function, then we'll
817 need to add a new unconditional jump. Choose the taken
818 edge based on known or assumed probability. */
819 else if (bb
->aux
!= e_taken
->dest
)
821 rtx note
= find_reg_note (bb_end_insn
, REG_BR_PROB
, 0);
824 && INTVAL (XEXP (note
, 0)) < REG_BR_PROB_BASE
/ 2
825 && invert_jump (bb_end_insn
,
826 (e_fall
->dest
== EXIT_BLOCK_PTR
828 : label_for_bb (e_fall
->dest
)), 0))
830 e_fall
->flags
&= ~EDGE_FALLTHRU
;
831 gcc_checking_assert (could_fall_through
832 (e_taken
->src
, e_taken
->dest
));
833 e_taken
->flags
|= EDGE_FALLTHRU
;
834 update_br_prob_note (bb
);
835 e
= e_fall
, e_fall
= e_taken
, e_taken
= e
;
839 /* If the "jumping" edge is a crossing edge, and the fall
840 through edge is non-crossing, leave things as they are. */
841 else if ((e_taken
->flags
& EDGE_CROSSING
)
842 && !(e_fall
->flags
& EDGE_CROSSING
))
845 /* Otherwise we can try to invert the jump. This will
846 basically never fail, however, keep up the pretense. */
847 else if (invert_jump (bb_end_insn
,
848 (e_fall
->dest
== EXIT_BLOCK_PTR
850 : label_for_bb (e_fall
->dest
)), 0))
852 e_fall
->flags
&= ~EDGE_FALLTHRU
;
853 gcc_checking_assert (could_fall_through
854 (e_taken
->src
, e_taken
->dest
));
855 e_taken
->flags
|= EDGE_FALLTHRU
;
856 update_br_prob_note (bb
);
860 else if (extract_asm_operands (PATTERN (bb_end_insn
)) != NULL
)
862 /* If the old fallthru is still next or if
863 asm goto doesn't have a fallthru (e.g. when followed by
864 __builtin_unreachable ()), nothing to do. */
866 || bb
->aux
== e_fall
->dest
867 || e_fall
->dest
== EXIT_BLOCK_PTR
)
870 /* Otherwise we'll have to use the fallthru fixup below. */
874 /* Otherwise we have some return, switch or computed
875 jump. In the 99% case, there should not have been a
877 gcc_assert (returnjump_p (bb_end_insn
) || !e_fall
);
883 /* No fallthru implies a noreturn function with EH edges, or
884 something similarly bizarre. In any case, we don't need to
889 /* If the fallthru block is still next, nothing to do. */
890 if (bb
->aux
== e_fall
->dest
)
893 /* A fallthru to exit block. */
894 if (e_fall
->dest
== EXIT_BLOCK_PTR
)
898 /* We got here if we need to add a new jump insn.
899 Note force_nonfallthru can delete E_FALL and thus we have to
900 save E_FALL->src prior to the call to force_nonfallthru. */
901 src_bb
= e_fall
->src
;
902 nb
= force_nonfallthru_and_redirect (e_fall
, e_fall
->dest
);
905 nb
->il
.rtl
->visited
= 1;
908 /* Don't process this new block. */
911 /* Make sure new bb is tagged for correct section (same as
912 fall-thru source, since you cannot fall-thru across
913 section boundaries). */
914 BB_COPY_PARTITION (src_bb
, single_pred (bb
));
915 if (flag_reorder_blocks_and_partition
916 && targetm_common
.have_named_sections
917 && JUMP_P (BB_END (bb
))
918 && !any_condjump_p (BB_END (bb
))
919 && (EDGE_SUCC (bb
, 0)->flags
& EDGE_CROSSING
))
920 add_reg_note (BB_END (bb
), REG_CROSSING_JUMP
, NULL_RTX
);
924 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
926 /* Annoying special case - jump around dead jumptables left in the code. */
929 edge e
= find_fallthru_edge (bb
->succs
);
931 if (e
&& !can_fallthru (e
->src
, e
->dest
))
932 force_nonfallthru (e
);
935 /* Ensure goto_locus from edges has some instructions with that locus
943 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
944 if (e
->goto_locus
&& !(e
->flags
& EDGE_ABNORMAL
))
948 basic_block dest
, nb
;
951 insn
= BB_END (e
->src
);
952 end
= PREV_INSN (BB_HEAD (e
->src
));
954 && (!NONDEBUG_INSN_P (insn
) || INSN_LOCATOR (insn
) == 0))
955 insn
= PREV_INSN (insn
);
957 && locator_eq (INSN_LOCATOR (insn
), (int) e
->goto_locus
))
959 if (simplejump_p (BB_END (e
->src
))
960 && INSN_LOCATOR (BB_END (e
->src
)) == 0)
962 INSN_LOCATOR (BB_END (e
->src
)) = e
->goto_locus
;
966 if (dest
== EXIT_BLOCK_PTR
)
968 /* Non-fallthru edges to the exit block cannot be split. */
969 if (!(e
->flags
& EDGE_FALLTHRU
))
974 insn
= BB_HEAD (dest
);
975 end
= NEXT_INSN (BB_END (dest
));
976 while (insn
!= end
&& !NONDEBUG_INSN_P (insn
))
977 insn
= NEXT_INSN (insn
);
978 if (insn
!= end
&& INSN_LOCATOR (insn
)
979 && locator_eq (INSN_LOCATOR (insn
), (int) e
->goto_locus
))
983 if (!INSN_P (BB_END (nb
)))
984 BB_END (nb
) = emit_insn_after_noloc (gen_nop (), BB_END (nb
),
986 INSN_LOCATOR (BB_END (nb
)) = e
->goto_locus
;
988 /* If there are other incoming edges to the destination block
989 with the same goto locus, redirect them to the new block as
990 well, this can prevent other such blocks from being created
991 in subsequent iterations of the loop. */
992 for (ei2
= ei_start (dest
->preds
); (e2
= ei_safe_edge (ei2
)); )
994 && !(e2
->flags
& (EDGE_ABNORMAL
| EDGE_FALLTHRU
))
995 && locator_eq (e
->goto_locus
, e2
->goto_locus
))
996 redirect_edge_and_branch (e2
, nb
);
1003 /* Perform sanity checks on the insn chain.
1004 1. Check that next/prev pointers are consistent in both the forward and
1006 2. Count insns in chain, going both directions, and check if equal.
1007 3. Check that get_last_insn () returns the actual end of chain. */
1010 verify_insn_chain (void)
1012 rtx x
, prevx
, nextx
;
1013 int insn_cnt1
, insn_cnt2
;
1015 for (prevx
= NULL
, insn_cnt1
= 1, x
= get_insns ();
1017 prevx
= x
, insn_cnt1
++, x
= NEXT_INSN (x
))
1018 gcc_assert (PREV_INSN (x
) == prevx
);
1020 gcc_assert (prevx
== get_last_insn ());
1022 for (nextx
= NULL
, insn_cnt2
= 1, x
= get_last_insn ();
1024 nextx
= x
, insn_cnt2
++, x
= PREV_INSN (x
))
1025 gcc_assert (NEXT_INSN (x
) == nextx
);
1027 gcc_assert (insn_cnt1
== insn_cnt2
);
1030 /* If we have assembler epilogues, the block falling through to exit must
1031 be the last one in the reordered chain when we reach final. Ensure
1032 that this condition is met. */
1034 fixup_fallthru_exit_predecessor (void)
1037 basic_block bb
= NULL
;
1039 /* This transformation is not valid before reload, because we might
1040 separate a call from the instruction that copies the return
1042 gcc_assert (reload_completed
);
1044 e
= find_fallthru_edge (EXIT_BLOCK_PTR
->preds
);
1050 basic_block c
= ENTRY_BLOCK_PTR
->next_bb
;
1052 /* If the very first block is the one with the fall-through exit
1053 edge, we have to split that block. */
1056 bb
= split_block (bb
, NULL
)->dest
;
1059 bb
->il
.rtl
->footer
= c
->il
.rtl
->footer
;
1060 c
->il
.rtl
->footer
= NULL
;
1063 while (c
->aux
!= bb
)
1064 c
= (basic_block
) c
->aux
;
1068 c
= (basic_block
) c
->aux
;
1075 /* In case there are more than one fallthru predecessors of exit, force that
1076 there is only one. */
1079 force_one_exit_fallthru (void)
1081 edge e
, predecessor
= NULL
;
1084 basic_block forwarder
, bb
;
1086 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
1087 if (e
->flags
& EDGE_FALLTHRU
)
1089 if (predecessor
== NULL
)
1101 /* Exit has several fallthru predecessors. Create a forwarder block for
1103 forwarder
= split_edge (predecessor
);
1104 for (ei
= ei_start (EXIT_BLOCK_PTR
->preds
); (e
= ei_safe_edge (ei
)); )
1106 if (e
->src
== forwarder
1107 || !(e
->flags
& EDGE_FALLTHRU
))
1110 redirect_edge_and_branch_force (e
, forwarder
);
1113 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
1117 if (bb
->aux
== NULL
&& bb
!= forwarder
)
1119 bb
->aux
= forwarder
;
1125 /* Return true in case it is possible to duplicate the basic block BB. */
1127 /* We do not want to declare the function in a header file, since it should
1128 only be used through the cfghooks interface, and we do not want to move
1129 it to cfgrtl.c since it would require also moving quite a lot of related
1131 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block
);
1134 cfg_layout_can_duplicate_bb_p (const_basic_block bb
)
1136 /* Do not attempt to duplicate tablejumps, as we need to unshare
1137 the dispatch table. This is difficult to do, as the instructions
1138 computing jump destination may be hoisted outside the basic block. */
1139 if (tablejump_p (BB_END (bb
), NULL
, NULL
))
1142 /* Do not duplicate blocks containing insns that can't be copied. */
1143 if (targetm
.cannot_copy_insn_p
)
1145 rtx insn
= BB_HEAD (bb
);
1148 if (INSN_P (insn
) && targetm
.cannot_copy_insn_p (insn
))
1150 if (insn
== BB_END (bb
))
1152 insn
= NEXT_INSN (insn
);
1160 duplicate_insn_chain (rtx from
, rtx to
)
1162 rtx insn
, last
, copy
;
1164 /* Avoid updating of boundaries of previous basic block. The
1165 note will get removed from insn stream in fixup. */
1166 last
= emit_note (NOTE_INSN_DELETED
);
1168 /* Create copy at the end of INSN chain. The chain will
1169 be reordered later. */
1170 for (insn
= from
; insn
!= NEXT_INSN (to
); insn
= NEXT_INSN (insn
))
1172 switch (GET_CODE (insn
))
1178 /* Avoid copying of dispatch tables. We never duplicate
1179 tablejumps, so this can hit only in case the table got
1180 moved far from original jump. */
1181 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
1182 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
1184 /* Avoid copying following barrier as well if any
1185 (and debug insns in between). */
1188 for (next
= NEXT_INSN (insn
);
1189 next
!= NEXT_INSN (to
);
1190 next
= NEXT_INSN (next
))
1191 if (!DEBUG_INSN_P (next
))
1193 if (next
!= NEXT_INSN (to
) && BARRIER_P (next
))
1197 copy
= emit_copy_of_insn_after (insn
, get_last_insn ());
1198 if (JUMP_P (insn
) && JUMP_LABEL (insn
) != NULL_RTX
1199 && ANY_RETURN_P (JUMP_LABEL (insn
)))
1200 JUMP_LABEL (copy
) = JUMP_LABEL (insn
);
1201 maybe_copy_prologue_epilogue_insn (insn
, copy
);
1212 switch (NOTE_KIND (insn
))
1214 /* In case prologue is empty and function contain label
1215 in first BB, we may want to copy the block. */
1216 case NOTE_INSN_PROLOGUE_END
:
1218 case NOTE_INSN_DELETED
:
1219 case NOTE_INSN_DELETED_LABEL
:
1220 /* No problem to strip these. */
1221 case NOTE_INSN_FUNCTION_BEG
:
1222 /* There is always just single entry to function. */
1223 case NOTE_INSN_BASIC_BLOCK
:
1226 case NOTE_INSN_EPILOGUE_BEG
:
1227 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
1228 emit_note_copy (insn
);
1232 /* All other notes should have already been eliminated. */
1240 insn
= NEXT_INSN (last
);
1244 /* Create a duplicate of the basic block BB. */
1246 /* We do not want to declare the function in a header file, since it should
1247 only be used through the cfghooks interface, and we do not want to move
1248 it to cfgrtl.c since it would require also moving quite a lot of related
1250 extern basic_block
cfg_layout_duplicate_bb (basic_block
);
1253 cfg_layout_duplicate_bb (basic_block bb
)
1258 insn
= duplicate_insn_chain (BB_HEAD (bb
), BB_END (bb
));
1259 new_bb
= create_basic_block (insn
,
1260 insn
? get_last_insn () : NULL
,
1261 EXIT_BLOCK_PTR
->prev_bb
);
1263 BB_COPY_PARTITION (new_bb
, bb
);
1264 if (bb
->il
.rtl
->header
)
1266 insn
= bb
->il
.rtl
->header
;
1267 while (NEXT_INSN (insn
))
1268 insn
= NEXT_INSN (insn
);
1269 insn
= duplicate_insn_chain (bb
->il
.rtl
->header
, insn
);
1271 new_bb
->il
.rtl
->header
= unlink_insn_chain (insn
, get_last_insn ());
1274 if (bb
->il
.rtl
->footer
)
1276 insn
= bb
->il
.rtl
->footer
;
1277 while (NEXT_INSN (insn
))
1278 insn
= NEXT_INSN (insn
);
1279 insn
= duplicate_insn_chain (bb
->il
.rtl
->footer
, insn
);
1281 new_bb
->il
.rtl
->footer
= unlink_insn_chain (insn
, get_last_insn ());
1288 /* Main entry point to this module - initialize the datastructures for
1289 CFG layout changes. It keeps LOOPS up-to-date if not null.
1291 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
1294 cfg_layout_initialize (unsigned int flags
)
1299 initialize_original_copy_tables ();
1301 cfg_layout_rtl_register_cfg_hooks ();
1303 record_effective_endpoints ();
1305 /* Make sure that the targets of non local gotos are marked. */
1306 for (x
= nonlocal_goto_handler_labels
; x
; x
= XEXP (x
, 1))
1308 bb
= BLOCK_FOR_INSN (XEXP (x
, 0));
1309 bb
->flags
|= BB_NON_LOCAL_GOTO_TARGET
;
1312 cleanup_cfg (CLEANUP_CFGLAYOUT
| flags
);
1315 /* Splits superblocks. */
1317 break_superblocks (void)
1319 sbitmap superblocks
;
1323 superblocks
= sbitmap_alloc (last_basic_block
);
1324 sbitmap_zero (superblocks
);
1327 if (bb
->flags
& BB_SUPERBLOCK
)
1329 bb
->flags
&= ~BB_SUPERBLOCK
;
1330 SET_BIT (superblocks
, bb
->index
);
1336 rebuild_jump_labels (get_insns ());
1337 find_many_sub_basic_blocks (superblocks
);
1343 /* Finalize the changes: reorder insn list according to the sequence specified
1344 by aux pointers, enter compensation code, rebuild scope forest. */
1347 cfg_layout_finalize (void)
1349 #ifdef ENABLE_CHECKING
1350 verify_flow_info ();
1352 force_one_exit_fallthru ();
1353 rtl_register_cfg_hooks ();
1354 if (reload_completed
1355 #ifdef HAVE_epilogue
1359 fixup_fallthru_exit_predecessor ();
1360 fixup_reorder_chain ();
1362 rebuild_jump_labels (get_insns ());
1363 delete_dead_jumptables ();
1365 #ifdef ENABLE_CHECKING
1366 verify_insn_chain ();
1367 verify_flow_info ();
1371 /* Checks whether all N blocks in BBS array can be copied. */
1373 can_copy_bbs_p (basic_block
*bbs
, unsigned n
)
1379 for (i
= 0; i
< n
; i
++)
1380 bbs
[i
]->flags
|= BB_DUPLICATED
;
1382 for (i
= 0; i
< n
; i
++)
1384 /* In case we should redirect abnormal edge during duplication, fail. */
1386 FOR_EACH_EDGE (e
, ei
, bbs
[i
]->succs
)
1387 if ((e
->flags
& EDGE_ABNORMAL
)
1388 && (e
->dest
->flags
& BB_DUPLICATED
))
1394 if (!can_duplicate_block_p (bbs
[i
]))
1402 for (i
= 0; i
< n
; i
++)
1403 bbs
[i
]->flags
&= ~BB_DUPLICATED
;
1408 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1409 are placed into array NEW_BBS in the same order. Edges from basic blocks
1410 in BBS are also duplicated and copies of those of them
1411 that lead into BBS are redirected to appropriate newly created block. The
1412 function assigns bbs into loops (copy of basic block bb is assigned to
1413 bb->loop_father->copy loop, so this must be set up correctly in advance)
1414 and updates dominators locally (LOOPS structure that contains the information
1415 about dominators is passed to enable this).
1417 BASE is the superloop to that basic block belongs; if its header or latch
1418 is copied, we do not set the new blocks as header or latch.
1420 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1421 also in the same order.
1423 Newly created basic blocks are put after the basic block AFTER in the
1424 instruction stream, and the order of the blocks in BBS array is preserved. */
1427 copy_bbs (basic_block
*bbs
, unsigned n
, basic_block
*new_bbs
,
1428 edge
*edges
, unsigned num_edges
, edge
*new_edges
,
1429 struct loop
*base
, basic_block after
)
1432 basic_block bb
, new_bb
, dom_bb
;
1435 /* Duplicate bbs, update dominators, assign bbs to loops. */
1436 for (i
= 0; i
< n
; i
++)
1440 new_bb
= new_bbs
[i
] = duplicate_block (bb
, NULL
, after
);
1442 bb
->flags
|= BB_DUPLICATED
;
1443 /* Possibly set loop header. */
1444 if (bb
->loop_father
->header
== bb
&& bb
->loop_father
!= base
)
1445 new_bb
->loop_father
->header
= new_bb
;
1447 if (bb
->loop_father
->latch
== bb
&& bb
->loop_father
!= base
)
1448 new_bb
->loop_father
->latch
= new_bb
;
1451 /* Set dominators. */
1452 for (i
= 0; i
< n
; i
++)
1455 new_bb
= new_bbs
[i
];
1457 dom_bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
1458 if (dom_bb
->flags
& BB_DUPLICATED
)
1460 dom_bb
= get_bb_copy (dom_bb
);
1461 set_immediate_dominator (CDI_DOMINATORS
, new_bb
, dom_bb
);
1465 /* Redirect edges. */
1466 for (j
= 0; j
< num_edges
; j
++)
1467 new_edges
[j
] = NULL
;
1468 for (i
= 0; i
< n
; i
++)
1471 new_bb
= new_bbs
[i
];
1474 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
1476 for (j
= 0; j
< num_edges
; j
++)
1477 if (edges
[j
] && edges
[j
]->src
== bb
&& edges
[j
]->dest
== e
->dest
)
1480 if (!(e
->dest
->flags
& BB_DUPLICATED
))
1482 redirect_edge_and_branch_force (e
, get_bb_copy (e
->dest
));
1486 /* Clear information about duplicates. */
1487 for (i
= 0; i
< n
; i
++)
1488 bbs
[i
]->flags
&= ~BB_DUPLICATED
;
1491 #include "gt-cfglayout.h"