1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "insn-config.h"
34 #include "cfglayout.h"
38 #include "alloc-pool.h"
40 #include "tree-pass.h"
44 /* Holds the interesting trailing notes for the function. */
45 rtx cfg_layout_function_footer
;
46 rtx cfg_layout_function_header
;
48 static rtx
skip_insns_after_block (basic_block
);
49 static void record_effective_endpoints (void);
50 static rtx
label_for_bb (basic_block
);
51 static void fixup_reorder_chain (void);
53 static void change_scope (rtx
, tree
, tree
);
55 void verify_insn_chain (void);
56 static void fixup_fallthru_exit_predecessor (void);
57 static tree
insn_scope (rtx
);
60 unlink_insn_chain (rtx first
, rtx last
)
62 rtx prevfirst
= PREV_INSN (first
);
63 rtx nextlast
= NEXT_INSN (last
);
65 PREV_INSN (first
) = NULL
;
66 NEXT_INSN (last
) = NULL
;
68 NEXT_INSN (prevfirst
) = nextlast
;
70 PREV_INSN (nextlast
) = prevfirst
;
72 set_last_insn (prevfirst
);
74 set_first_insn (nextlast
);
78 /* Skip over inter-block insns occurring after BB which are typically
79 associated with BB (e.g., barriers). If there are any such insns,
80 we return the last one. Otherwise, we return the end of BB. */
83 skip_insns_after_block (basic_block bb
)
85 rtx insn
, last_insn
, next_head
, prev
;
88 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
89 next_head
= BB_HEAD (bb
->next_bb
);
91 for (last_insn
= insn
= BB_END (bb
); (insn
= NEXT_INSN (insn
)) != 0; )
93 if (insn
== next_head
)
96 switch (GET_CODE (insn
))
103 switch (NOTE_KIND (insn
))
105 case NOTE_INSN_BLOCK_END
:
116 && JUMP_P (NEXT_INSN (insn
))
117 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
118 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
120 insn
= NEXT_INSN (insn
);
133 /* It is possible to hit contradictory sequence. For instance:
139 Where barrier belongs to jump_insn, but the note does not. This can be
140 created by removing the basic block originally following
141 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
143 for (insn
= last_insn
; insn
!= BB_END (bb
); insn
= prev
)
145 prev
= PREV_INSN (insn
);
147 switch (NOTE_KIND (insn
))
149 case NOTE_INSN_BLOCK_END
:
152 case NOTE_INSN_DELETED
:
153 case NOTE_INSN_DELETED_LABEL
:
156 reorder_insns (insn
, insn
, last_insn
);
163 /* Locate or create a label for a given basic block. */
166 label_for_bb (basic_block bb
)
168 rtx label
= BB_HEAD (bb
);
170 if (!LABEL_P (label
))
173 fprintf (dump_file
, "Emitting label for block %d\n", bb
->index
);
175 label
= block_label (bb
);
181 /* Locate the effective beginning and end of the insn chain for each
182 block, as defined by skip_insns_after_block above. */
185 record_effective_endpoints (void)
191 for (insn
= get_insns ();
194 && NOTE_KIND (insn
) != NOTE_INSN_BASIC_BLOCK
;
195 insn
= NEXT_INSN (insn
))
197 /* No basic blocks at all? */
200 if (PREV_INSN (insn
))
201 cfg_layout_function_header
=
202 unlink_insn_chain (get_insns (), PREV_INSN (insn
));
204 cfg_layout_function_header
= NULL_RTX
;
206 next_insn
= get_insns ();
211 if (PREV_INSN (BB_HEAD (bb
)) && next_insn
!= BB_HEAD (bb
))
212 bb
->il
.rtl
->header
= unlink_insn_chain (next_insn
,
213 PREV_INSN (BB_HEAD (bb
)));
214 end
= skip_insns_after_block (bb
);
215 if (NEXT_INSN (BB_END (bb
)) && BB_END (bb
) != end
)
216 bb
->il
.rtl
->footer
= unlink_insn_chain (NEXT_INSN (BB_END (bb
)), end
);
217 next_insn
= NEXT_INSN (BB_END (bb
));
220 cfg_layout_function_footer
= next_insn
;
221 if (cfg_layout_function_footer
)
222 cfg_layout_function_footer
= unlink_insn_chain (cfg_layout_function_footer
, get_last_insn ());
225 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
226 numbers and files. In order to be GGC friendly we need to use separate
227 varrays. This also slightly improve the memory locality in binary search.
228 The _locs array contains locators where the given property change. The
229 block_locators_blocks contains the scope block that is used for all insn
230 locator greater than corresponding block_locators_locs value and smaller
231 than the following one. Similarly for the other properties. */
232 static VEC(int,heap
) *block_locators_locs
;
233 static GTY(()) VEC(tree
,gc
) *block_locators_blocks
;
234 static VEC(int,heap
) *locations_locators_locs
;
235 DEF_VEC_O(location_t
);
236 DEF_VEC_ALLOC_O(location_t
,heap
);
237 static VEC(location_t
,heap
) *locations_locators_vals
;
238 int prologue_locator
;
239 int epilogue_locator
;
241 /* Hold current location information and last location information, so the
242 datastructures are built lazily only when some instructions in given
244 location_t curr_location
, last_location
;
245 static tree curr_block
, last_block
;
246 static int curr_rtl_loc
= -1;
248 /* Allocate insn locator datastructure. */
250 insn_locators_alloc (void)
252 prologue_locator
= epilogue_locator
= 0;
254 block_locators_locs
= VEC_alloc (int, heap
, 32);
255 block_locators_blocks
= VEC_alloc (tree
, gc
, 32);
256 locations_locators_locs
= VEC_alloc (int, heap
, 32);
257 locations_locators_vals
= VEC_alloc (location_t
, heap
, 32);
259 #ifdef USE_MAPPED_LOCATION
263 last_location
.line
= -1;
264 curr_location
.line
= -1;
271 /* At the end of emit stage, clear current location. */
273 insn_locators_finalize (void)
275 if (curr_rtl_loc
>= 0)
276 epilogue_locator
= curr_insn_locator ();
280 /* Set current location. */
282 set_curr_insn_source_location (location_t location
)
284 /* IV opts calls into RTL expansion to compute costs of operations. At this
285 time locators are not initialized. */
286 if (curr_rtl_loc
== -1)
288 #ifdef USE_MAPPED_LOCATION
289 if (location
== last_location
)
292 if (location
.file
&& last_location
.file
293 && !strcmp (location
.file
, last_location
.file
)
294 && location
.line
== last_location
.line
)
297 curr_location
= location
;
300 /* Set current scope block. */
302 set_curr_insn_block (tree b
)
304 /* IV opts calls into RTL expansion to compute costs of operations. At this
305 time locators are not initialized. */
306 if (curr_rtl_loc
== -1)
312 /* Return current insn locator. */
314 curr_insn_locator (void)
316 if (curr_rtl_loc
== -1)
318 if (last_block
!= curr_block
)
321 VEC_safe_push (int, heap
, block_locators_locs
, curr_rtl_loc
);
322 VEC_safe_push (tree
, gc
, block_locators_blocks
, curr_block
);
323 last_block
= curr_block
;
325 #ifdef USE_MAPPED_LOCATION
326 if (last_location
!= curr_location
)
328 if (last_location
.file
!= curr_location
.file
329 || last_location
.line
!= curr_location
.line
)
333 VEC_safe_push (int, heap
, locations_locators_locs
, curr_rtl_loc
);
334 VEC_safe_push (location_t
, heap
, locations_locators_vals
, &curr_location
);
335 last_location
= curr_location
;
341 into_cfg_layout_mode (void)
343 cfg_layout_initialize (0);
348 outof_cfg_layout_mode (void)
353 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
354 bb
->aux
= bb
->next_bb
;
356 cfg_layout_finalize ();
361 struct tree_opt_pass pass_into_cfg_layout_mode
=
363 "into_cfglayout", /* name */
365 into_cfg_layout_mode
, /* execute */
368 0, /* static_pass_number */
370 0, /* properties_required */
371 0, /* properties_provided */
372 0, /* properties_destroyed */
373 0, /* todo_flags_start */
374 TODO_dump_func
, /* todo_flags_finish */
378 struct tree_opt_pass pass_outof_cfg_layout_mode
=
380 "outof_cfglayout", /* name */
382 outof_cfg_layout_mode
, /* execute */
385 0, /* static_pass_number */
387 0, /* properties_required */
388 0, /* properties_provided */
389 0, /* properties_destroyed */
390 0, /* todo_flags_start */
391 TODO_dump_func
, /* todo_flags_finish */
395 /* Return sope resulting from combination of S1 and S2. */
397 choose_inner_scope (tree s1
, tree s2
)
403 if (BLOCK_NUMBER (s1
) > BLOCK_NUMBER (s2
))
408 /* Emit lexical block notes needed to change scope from S1 to S2. */
411 change_scope (rtx orig_insn
, tree s1
, tree s2
)
413 rtx insn
= orig_insn
;
414 tree com
= NULL_TREE
;
415 tree ts1
= s1
, ts2
= s2
;
420 gcc_assert (ts1
&& ts2
);
421 if (BLOCK_NUMBER (ts1
) > BLOCK_NUMBER (ts2
))
422 ts1
= BLOCK_SUPERCONTEXT (ts1
);
423 else if (BLOCK_NUMBER (ts1
) < BLOCK_NUMBER (ts2
))
424 ts2
= BLOCK_SUPERCONTEXT (ts2
);
427 ts1
= BLOCK_SUPERCONTEXT (ts1
);
428 ts2
= BLOCK_SUPERCONTEXT (ts2
);
437 rtx note
= emit_note_before (NOTE_INSN_BLOCK_END
, insn
);
438 NOTE_BLOCK (note
) = s
;
439 s
= BLOCK_SUPERCONTEXT (s
);
446 insn
= emit_note_before (NOTE_INSN_BLOCK_BEG
, insn
);
447 NOTE_BLOCK (insn
) = s
;
448 s
= BLOCK_SUPERCONTEXT (s
);
452 /* Return lexical scope block insn belong to. */
454 insn_scope (rtx insn
)
456 int max
= VEC_length (int, block_locators_locs
);
458 int loc
= INSN_LOCATOR (insn
);
460 /* When block_locators_locs was initialized, the pro- and epilogue
461 insns didn't exist yet and can therefore not be found this way.
462 But we know that they belong to the outer most block of the
464 Without this test, the prologue would be put inside the block of
465 the first valid instruction in the function and when that first
466 insn is part of an inlined function then the low_pc of that
467 inlined function is messed up. Likewise for the epilogue and
468 the last valid instruction. */
469 if (loc
== prologue_locator
|| loc
== epilogue_locator
)
470 return DECL_INITIAL (cfun
->decl
);
476 int pos
= (min
+ max
) / 2;
477 int tmp
= VEC_index (int, block_locators_locs
, pos
);
479 if (tmp
<= loc
&& min
!= pos
)
481 else if (tmp
> loc
&& max
!= pos
)
489 return VEC_index (tree
, block_locators_blocks
, min
);
492 /* Return line number of the statement specified by the locator. */
494 locator_location (int loc
)
496 int max
= VEC_length (int, locations_locators_locs
);
501 int pos
= (min
+ max
) / 2;
502 int tmp
= VEC_index (int, locations_locators_locs
, pos
);
504 if (tmp
<= loc
&& min
!= pos
)
506 else if (tmp
> loc
&& max
!= pos
)
514 return *VEC_index (location_t
, locations_locators_vals
, min
);
517 /* Return source line of the statement that produced this insn. */
519 locator_line (int loc
)
521 expanded_location xloc
;
525 xloc
= expand_location (locator_location (loc
));
529 /* Return line number of the statement that produced this insn. */
533 return locator_line (INSN_LOCATOR (insn
));
536 /* Return source file of the statement specified by LOC. */
538 locator_file (int loc
)
540 expanded_location xloc
;
544 xloc
= expand_location (locator_location (loc
));
548 /* Return source file of the statement that produced this insn. */
552 return locator_file (INSN_LOCATOR (insn
));
555 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
556 on the scope tree and the newly reordered instructions. */
559 reemit_insn_block_notes (void)
561 tree cur_block
= DECL_INITIAL (cfun
->decl
);
565 if (!active_insn_p (insn
))
566 insn
= next_active_insn (insn
);
567 for (; insn
; insn
= next_active_insn (insn
))
571 /* Avoid putting scope notes between jump table and its label. */
573 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
574 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
577 this_block
= insn_scope (insn
);
578 /* For sequences compute scope resulting from merging all scopes
579 of instructions nested inside. */
580 if (GET_CODE (PATTERN (insn
)) == SEQUENCE
)
583 rtx body
= PATTERN (insn
);
586 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
587 this_block
= choose_inner_scope (this_block
,
588 insn_scope (XVECEXP (body
, 0, i
)));
593 if (this_block
!= cur_block
)
595 change_scope (insn
, cur_block
, this_block
);
596 cur_block
= this_block
;
600 /* change_scope emits before the insn, not after. */
601 note
= emit_note (NOTE_INSN_DELETED
);
602 change_scope (note
, cur_block
, DECL_INITIAL (cfun
->decl
));
609 /* Link the basic blocks in the correct order, compacting the basic
610 block queue while at it. This also clears the visited flag on
611 all basic blocks. If STAY_IN_CFGLAYOUT_MODE is false, this function
612 also clears the basic block header and footer fields.
614 This function is usually called after a pass (e.g. tracer) finishes
615 some transformations while in cfglayout mode. The required sequence
616 of the basic blocks is in a linked list along the bb->aux field.
617 This functions re-links the basic block prev_bb and next_bb pointers
618 accordingly, and it compacts and renumbers the blocks. */
621 relink_block_chain (bool stay_in_cfglayout_mode
)
623 basic_block bb
, prev_bb
;
626 /* Maybe dump the re-ordered sequence. */
629 fprintf (dump_file
, "Reordered sequence:\n");
630 for (bb
= ENTRY_BLOCK_PTR
->next_bb
, index
= NUM_FIXED_BLOCKS
;
632 bb
= (basic_block
) bb
->aux
, index
++)
634 fprintf (dump_file
, " %i ", index
);
635 if (get_bb_original (bb
))
636 fprintf (dump_file
, "duplicate of %i ",
637 get_bb_original (bb
)->index
);
638 else if (forwarder_block_p (bb
)
639 && !LABEL_P (BB_HEAD (bb
)))
640 fprintf (dump_file
, "compensation ");
642 fprintf (dump_file
, "bb %i ", bb
->index
);
643 fprintf (dump_file
, " [%i]\n", bb
->frequency
);
647 /* Now reorder the blocks. */
648 prev_bb
= ENTRY_BLOCK_PTR
;
649 bb
= ENTRY_BLOCK_PTR
->next_bb
;
650 for (; bb
; prev_bb
= bb
, bb
= (basic_block
) bb
->aux
)
652 bb
->prev_bb
= prev_bb
;
653 prev_bb
->next_bb
= bb
;
655 prev_bb
->next_bb
= EXIT_BLOCK_PTR
;
656 EXIT_BLOCK_PTR
->prev_bb
= prev_bb
;
658 /* Then, clean up the aux and visited fields. */
662 bb
->il
.rtl
->visited
= 0;
663 if (!stay_in_cfglayout_mode
)
664 bb
->il
.rtl
->header
= bb
->il
.rtl
->footer
= NULL
;
667 /* Maybe reset the original copy tables, they are not valid anymore
668 when we renumber the basic blocks in compact_blocks. If we are
669 are going out of cfglayout mode, don't re-allocate the tables. */
670 free_original_copy_tables ();
671 if (stay_in_cfglayout_mode
)
672 initialize_original_copy_tables ();
674 /* Finally, put basic_block_info in the new order. */
679 /* Given a reorder chain, rearrange the code to match. */
682 fixup_reorder_chain (void)
687 if (cfg_layout_function_header
)
689 set_first_insn (cfg_layout_function_header
);
690 insn
= cfg_layout_function_header
;
691 while (NEXT_INSN (insn
))
692 insn
= NEXT_INSN (insn
);
695 /* First do the bulk reordering -- rechain the blocks without regard to
696 the needed changes to jumps and labels. */
698 for (bb
= ENTRY_BLOCK_PTR
->next_bb
; bb
; bb
= (basic_block
) bb
->aux
)
700 if (bb
->il
.rtl
->header
)
703 NEXT_INSN (insn
) = bb
->il
.rtl
->header
;
705 set_first_insn (bb
->il
.rtl
->header
);
706 PREV_INSN (bb
->il
.rtl
->header
) = insn
;
707 insn
= bb
->il
.rtl
->header
;
708 while (NEXT_INSN (insn
))
709 insn
= NEXT_INSN (insn
);
712 NEXT_INSN (insn
) = BB_HEAD (bb
);
714 set_first_insn (BB_HEAD (bb
));
715 PREV_INSN (BB_HEAD (bb
)) = insn
;
717 if (bb
->il
.rtl
->footer
)
719 NEXT_INSN (insn
) = bb
->il
.rtl
->footer
;
720 PREV_INSN (bb
->il
.rtl
->footer
) = insn
;
721 while (NEXT_INSN (insn
))
722 insn
= NEXT_INSN (insn
);
726 NEXT_INSN (insn
) = cfg_layout_function_footer
;
727 if (cfg_layout_function_footer
)
728 PREV_INSN (cfg_layout_function_footer
) = insn
;
730 while (NEXT_INSN (insn
))
731 insn
= NEXT_INSN (insn
);
733 set_last_insn (insn
);
734 #ifdef ENABLE_CHECKING
735 verify_insn_chain ();
738 /* Now add jumps and labels as needed to match the blocks new
741 for (bb
= ENTRY_BLOCK_PTR
->next_bb
; bb
; bb
= (basic_block
) bb
->aux
)
743 edge e_fall
, e_taken
, e
;
748 if (EDGE_COUNT (bb
->succs
) == 0)
751 /* Find the old fallthru edge, and another non-EH edge for
753 e_taken
= e_fall
= NULL
;
755 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
756 if (e
->flags
& EDGE_FALLTHRU
)
758 else if (! (e
->flags
& EDGE_EH
))
761 bb_end_insn
= BB_END (bb
);
762 if (JUMP_P (bb_end_insn
))
764 if (any_condjump_p (bb_end_insn
))
766 /* If the old fallthru is still next, nothing to do. */
767 if (bb
->aux
== e_fall
->dest
768 || e_fall
->dest
== EXIT_BLOCK_PTR
)
771 /* The degenerated case of conditional jump jumping to the next
772 instruction can happen for jumps with side effects. We need
773 to construct a forwarder block and this will be done just
774 fine by force_nonfallthru below. */
778 /* There is another special case: if *neither* block is next,
779 such as happens at the very end of a function, then we'll
780 need to add a new unconditional jump. Choose the taken
781 edge based on known or assumed probability. */
782 else if (bb
->aux
!= e_taken
->dest
)
784 rtx note
= find_reg_note (bb_end_insn
, REG_BR_PROB
, 0);
787 && INTVAL (XEXP (note
, 0)) < REG_BR_PROB_BASE
/ 2
788 && invert_jump (bb_end_insn
,
789 (e_fall
->dest
== EXIT_BLOCK_PTR
791 : label_for_bb (e_fall
->dest
)), 0))
793 e_fall
->flags
&= ~EDGE_FALLTHRU
;
794 #ifdef ENABLE_CHECKING
795 gcc_assert (could_fall_through
796 (e_taken
->src
, e_taken
->dest
));
798 e_taken
->flags
|= EDGE_FALLTHRU
;
799 update_br_prob_note (bb
);
800 e
= e_fall
, e_fall
= e_taken
, e_taken
= e
;
804 /* If the "jumping" edge is a crossing edge, and the fall
805 through edge is non-crossing, leave things as they are. */
806 else if ((e_taken
->flags
& EDGE_CROSSING
)
807 && !(e_fall
->flags
& EDGE_CROSSING
))
810 /* Otherwise we can try to invert the jump. This will
811 basically never fail, however, keep up the pretense. */
812 else if (invert_jump (bb_end_insn
,
813 (e_fall
->dest
== EXIT_BLOCK_PTR
815 : label_for_bb (e_fall
->dest
)), 0))
817 e_fall
->flags
&= ~EDGE_FALLTHRU
;
818 #ifdef ENABLE_CHECKING
819 gcc_assert (could_fall_through
820 (e_taken
->src
, e_taken
->dest
));
822 e_taken
->flags
|= EDGE_FALLTHRU
;
823 update_br_prob_note (bb
);
829 /* Otherwise we have some return, switch or computed
830 jump. In the 99% case, there should not have been a
832 gcc_assert (returnjump_p (bb_end_insn
) || !e_fall
);
838 /* No fallthru implies a noreturn function with EH edges, or
839 something similarly bizarre. In any case, we don't need to
844 /* If the fallthru block is still next, nothing to do. */
845 if (bb
->aux
== e_fall
->dest
)
848 /* A fallthru to exit block. */
849 if (e_fall
->dest
== EXIT_BLOCK_PTR
)
853 /* We got here if we need to add a new jump insn. */
854 nb
= force_nonfallthru (e_fall
);
857 nb
->il
.rtl
->visited
= 1;
860 /* Don't process this new block. */
863 /* Make sure new bb is tagged for correct section (same as
864 fall-thru source, since you cannot fall-throu across
865 section boundaries). */
866 BB_COPY_PARTITION (e_fall
->src
, single_pred (bb
));
867 if (flag_reorder_blocks_and_partition
868 && targetm
.have_named_sections
869 && JUMP_P (BB_END (bb
))
870 && !any_condjump_p (BB_END (bb
))
871 && (EDGE_SUCC (bb
, 0)->flags
& EDGE_CROSSING
))
872 REG_NOTES (BB_END (bb
)) = gen_rtx_EXPR_LIST
873 (REG_CROSSING_JUMP
, NULL_RTX
, REG_NOTES (BB_END (bb
)));
877 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
879 /* Annoying special case - jump around dead jumptables left in the code. */
885 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
886 if (e
->flags
& EDGE_FALLTHRU
)
889 if (e
&& !can_fallthru (e
->src
, e
->dest
))
890 force_nonfallthru (e
);
894 /* Perform sanity checks on the insn chain.
895 1. Check that next/prev pointers are consistent in both the forward and
897 2. Count insns in chain, going both directions, and check if equal.
898 3. Check that get_last_insn () returns the actual end of chain. */
901 verify_insn_chain (void)
904 int insn_cnt1
, insn_cnt2
;
906 for (prevx
= NULL
, insn_cnt1
= 1, x
= get_insns ();
908 prevx
= x
, insn_cnt1
++, x
= NEXT_INSN (x
))
909 gcc_assert (PREV_INSN (x
) == prevx
);
911 gcc_assert (prevx
== get_last_insn ());
913 for (nextx
= NULL
, insn_cnt2
= 1, x
= get_last_insn ();
915 nextx
= x
, insn_cnt2
++, x
= PREV_INSN (x
))
916 gcc_assert (NEXT_INSN (x
) == nextx
);
918 gcc_assert (insn_cnt1
== insn_cnt2
);
921 /* If we have assembler epilogues, the block falling through to exit must
922 be the last one in the reordered chain when we reach final. Ensure
923 that this condition is met. */
925 fixup_fallthru_exit_predecessor (void)
929 basic_block bb
= NULL
;
931 /* This transformation is not valid before reload, because we might
932 separate a call from the instruction that copies the return
934 gcc_assert (reload_completed
);
936 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
937 if (e
->flags
& EDGE_FALLTHRU
)
942 basic_block c
= ENTRY_BLOCK_PTR
->next_bb
;
944 /* If the very first block is the one with the fall-through exit
945 edge, we have to split that block. */
948 bb
= split_block (bb
, NULL
)->dest
;
951 bb
->il
.rtl
->footer
= c
->il
.rtl
->footer
;
952 c
->il
.rtl
->footer
= NULL
;
956 c
= (basic_block
) c
->aux
;
960 c
= (basic_block
) c
->aux
;
967 /* Return true in case it is possible to duplicate the basic block BB. */
969 /* We do not want to declare the function in a header file, since it should
970 only be used through the cfghooks interface, and we do not want to move
971 it to cfgrtl.c since it would require also moving quite a lot of related
973 extern bool cfg_layout_can_duplicate_bb_p (basic_block
);
976 cfg_layout_can_duplicate_bb_p (basic_block bb
)
978 /* Do not attempt to duplicate tablejumps, as we need to unshare
979 the dispatch table. This is difficult to do, as the instructions
980 computing jump destination may be hoisted outside the basic block. */
981 if (tablejump_p (BB_END (bb
), NULL
, NULL
))
984 /* Do not duplicate blocks containing insns that can't be copied. */
985 if (targetm
.cannot_copy_insn_p
)
987 rtx insn
= BB_HEAD (bb
);
990 if (INSN_P (insn
) && targetm
.cannot_copy_insn_p (insn
))
992 if (insn
== BB_END (bb
))
994 insn
= NEXT_INSN (insn
);
1002 duplicate_insn_chain (rtx from
, rtx to
)
1006 /* Avoid updating of boundaries of previous basic block. The
1007 note will get removed from insn stream in fixup. */
1008 last
= emit_note (NOTE_INSN_DELETED
);
1010 /* Create copy at the end of INSN chain. The chain will
1011 be reordered later. */
1012 for (insn
= from
; insn
!= NEXT_INSN (to
); insn
= NEXT_INSN (insn
))
1014 switch (GET_CODE (insn
))
1019 /* Avoid copying of dispatch tables. We never duplicate
1020 tablejumps, so this can hit only in case the table got
1021 moved far from original jump. */
1022 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
1023 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
1025 emit_copy_of_insn_after (insn
, get_last_insn ());
1036 switch (NOTE_KIND (insn
))
1038 /* In case prologue is empty and function contain label
1039 in first BB, we may want to copy the block. */
1040 case NOTE_INSN_PROLOGUE_END
:
1042 case NOTE_INSN_DELETED
:
1043 case NOTE_INSN_DELETED_LABEL
:
1044 /* No problem to strip these. */
1045 case NOTE_INSN_EPILOGUE_BEG
:
1046 /* Debug code expect these notes to exist just once.
1047 Keep them in the master copy.
1048 ??? It probably makes more sense to duplicate them for each
1050 case NOTE_INSN_FUNCTION_BEG
:
1051 /* There is always just single entry to function. */
1052 case NOTE_INSN_BASIC_BLOCK
:
1055 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
1056 emit_note_copy (insn
);
1060 /* All other notes should have already been eliminated.
1069 insn
= NEXT_INSN (last
);
1073 /* Create a duplicate of the basic block BB. */
1075 /* We do not want to declare the function in a header file, since it should
1076 only be used through the cfghooks interface, and we do not want to move
1077 it to cfgrtl.c since it would require also moving quite a lot of related
1079 extern basic_block
cfg_layout_duplicate_bb (basic_block
);
1082 cfg_layout_duplicate_bb (basic_block bb
)
1087 insn
= duplicate_insn_chain (BB_HEAD (bb
), BB_END (bb
));
1088 new_bb
= create_basic_block (insn
,
1089 insn
? get_last_insn () : NULL
,
1090 EXIT_BLOCK_PTR
->prev_bb
);
1092 BB_COPY_PARTITION (new_bb
, bb
);
1093 if (bb
->il
.rtl
->header
)
1095 insn
= bb
->il
.rtl
->header
;
1096 while (NEXT_INSN (insn
))
1097 insn
= NEXT_INSN (insn
);
1098 insn
= duplicate_insn_chain (bb
->il
.rtl
->header
, insn
);
1100 new_bb
->il
.rtl
->header
= unlink_insn_chain (insn
, get_last_insn ());
1103 if (bb
->il
.rtl
->footer
)
1105 insn
= bb
->il
.rtl
->footer
;
1106 while (NEXT_INSN (insn
))
1107 insn
= NEXT_INSN (insn
);
1108 insn
= duplicate_insn_chain (bb
->il
.rtl
->footer
, insn
);
1110 new_bb
->il
.rtl
->footer
= unlink_insn_chain (insn
, get_last_insn ());
1117 /* Main entry point to this module - initialize the datastructures for
1118 CFG layout changes. It keeps LOOPS up-to-date if not null.
1120 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
1123 cfg_layout_initialize (unsigned int flags
)
1128 initialize_original_copy_tables ();
1130 cfg_layout_rtl_register_cfg_hooks ();
1132 record_effective_endpoints ();
1134 /* Make sure that the targets of non local gotos are marked. */
1135 for (x
= nonlocal_goto_handler_labels
; x
; x
= XEXP (x
, 1))
1137 bb
= BLOCK_FOR_INSN (XEXP (x
, 0));
1138 bb
->flags
|= BB_NON_LOCAL_GOTO_TARGET
;
1141 cleanup_cfg (CLEANUP_CFGLAYOUT
| flags
);
1144 /* Splits superblocks. */
1146 break_superblocks (void)
1148 sbitmap superblocks
;
1152 superblocks
= sbitmap_alloc (last_basic_block
);
1153 sbitmap_zero (superblocks
);
1156 if (bb
->flags
& BB_SUPERBLOCK
)
1158 bb
->flags
&= ~BB_SUPERBLOCK
;
1159 SET_BIT (superblocks
, bb
->index
);
1165 rebuild_jump_labels (get_insns ());
1166 find_many_sub_basic_blocks (superblocks
);
1172 /* Finalize the changes: reorder insn list according to the sequence specified
1173 by aux pointers, enter compensation code, rebuild scope forest. */
1176 cfg_layout_finalize (void)
1178 #ifdef ENABLE_CHECKING
1179 verify_flow_info ();
1181 rtl_register_cfg_hooks ();
1182 if (reload_completed
1183 #ifdef HAVE_epilogue
1187 fixup_fallthru_exit_predecessor ();
1188 fixup_reorder_chain ();
1190 rebuild_jump_labels (get_insns ());
1191 delete_dead_jumptables ();
1193 #ifdef ENABLE_CHECKING
1194 verify_insn_chain ();
1195 verify_flow_info ();
1199 /* Checks whether all N blocks in BBS array can be copied. */
1201 can_copy_bbs_p (basic_block
*bbs
, unsigned n
)
1207 for (i
= 0; i
< n
; i
++)
1208 bbs
[i
]->flags
|= BB_DUPLICATED
;
1210 for (i
= 0; i
< n
; i
++)
1212 /* In case we should redirect abnormal edge during duplication, fail. */
1214 FOR_EACH_EDGE (e
, ei
, bbs
[i
]->succs
)
1215 if ((e
->flags
& EDGE_ABNORMAL
)
1216 && (e
->dest
->flags
& BB_DUPLICATED
))
1222 if (!can_duplicate_block_p (bbs
[i
]))
1230 for (i
= 0; i
< n
; i
++)
1231 bbs
[i
]->flags
&= ~BB_DUPLICATED
;
1236 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1237 are placed into array NEW_BBS in the same order. Edges from basic blocks
1238 in BBS are also duplicated and copies of those of them
1239 that lead into BBS are redirected to appropriate newly created block. The
1240 function assigns bbs into loops (copy of basic block bb is assigned to
1241 bb->loop_father->copy loop, so this must be set up correctly in advance)
1242 and updates dominators locally (LOOPS structure that contains the information
1243 about dominators is passed to enable this).
1245 BASE is the superloop to that basic block belongs; if its header or latch
1246 is copied, we do not set the new blocks as header or latch.
1248 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1249 also in the same order.
1251 Newly created basic blocks are put after the basic block AFTER in the
1252 instruction stream, and the order of the blocks in BBS array is preserved. */
1255 copy_bbs (basic_block
*bbs
, unsigned n
, basic_block
*new_bbs
,
1256 edge
*edges
, unsigned num_edges
, edge
*new_edges
,
1257 struct loop
*base
, basic_block after
)
1260 basic_block bb
, new_bb
, dom_bb
;
1263 /* Duplicate bbs, update dominators, assign bbs to loops. */
1264 for (i
= 0; i
< n
; i
++)
1268 new_bb
= new_bbs
[i
] = duplicate_block (bb
, NULL
, after
);
1270 bb
->flags
|= BB_DUPLICATED
;
1271 /* Possibly set loop header. */
1272 if (bb
->loop_father
->header
== bb
&& bb
->loop_father
!= base
)
1273 new_bb
->loop_father
->header
= new_bb
;
1275 if (bb
->loop_father
->latch
== bb
&& bb
->loop_father
!= base
)
1276 new_bb
->loop_father
->latch
= new_bb
;
1279 /* Set dominators. */
1280 for (i
= 0; i
< n
; i
++)
1283 new_bb
= new_bbs
[i
];
1285 dom_bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
1286 if (dom_bb
->flags
& BB_DUPLICATED
)
1288 dom_bb
= get_bb_copy (dom_bb
);
1289 set_immediate_dominator (CDI_DOMINATORS
, new_bb
, dom_bb
);
1293 /* Redirect edges. */
1294 for (j
= 0; j
< num_edges
; j
++)
1295 new_edges
[j
] = NULL
;
1296 for (i
= 0; i
< n
; i
++)
1299 new_bb
= new_bbs
[i
];
1302 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
1304 for (j
= 0; j
< num_edges
; j
++)
1305 if (edges
[j
] && edges
[j
]->src
== bb
&& edges
[j
]->dest
== e
->dest
)
1308 if (!(e
->dest
->flags
& BB_DUPLICATED
))
1310 redirect_edge_and_branch_force (e
, get_bb_copy (e
->dest
));
1314 /* Clear information about duplicates. */
1315 for (i
= 0; i
< n
; i
++)
1316 bbs
[i
]->flags
&= ~BB_DUPLICATED
;
1319 #include "gt-cfglayout.h"