1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
33 #include "cfglayout.h"
37 #include "alloc-pool.h"
39 #include "tree-pass.h"
43 /* Holds the interesting trailing notes for the function. */
44 rtx cfg_layout_function_footer
;
45 rtx cfg_layout_function_header
;
47 static rtx
skip_insns_after_block (basic_block
);
48 static void record_effective_endpoints (void);
49 static rtx
label_for_bb (basic_block
);
50 static void fixup_reorder_chain (void);
52 static void change_scope (rtx
, tree
, tree
);
54 void verify_insn_chain (void);
55 static void fixup_fallthru_exit_predecessor (void);
56 static tree
insn_scope (const_rtx
);
59 unlink_insn_chain (rtx first
, rtx last
)
61 rtx prevfirst
= PREV_INSN (first
);
62 rtx nextlast
= NEXT_INSN (last
);
64 PREV_INSN (first
) = NULL
;
65 NEXT_INSN (last
) = NULL
;
67 NEXT_INSN (prevfirst
) = nextlast
;
69 PREV_INSN (nextlast
) = prevfirst
;
71 set_last_insn (prevfirst
);
73 set_first_insn (nextlast
);
77 /* Skip over inter-block insns occurring after BB which are typically
78 associated with BB (e.g., barriers). If there are any such insns,
79 we return the last one. Otherwise, we return the end of BB. */
82 skip_insns_after_block (basic_block bb
)
84 rtx insn
, last_insn
, next_head
, prev
;
87 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
88 next_head
= BB_HEAD (bb
->next_bb
);
90 for (last_insn
= insn
= BB_END (bb
); (insn
= NEXT_INSN (insn
)) != 0; )
92 if (insn
== next_head
)
95 switch (GET_CODE (insn
))
102 switch (NOTE_KIND (insn
))
104 case NOTE_INSN_BLOCK_END
:
115 && JUMP_P (NEXT_INSN (insn
))
116 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
117 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
119 insn
= NEXT_INSN (insn
);
132 /* It is possible to hit contradictory sequence. For instance:
138 Where barrier belongs to jump_insn, but the note does not. This can be
139 created by removing the basic block originally following
140 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
142 for (insn
= last_insn
; insn
!= BB_END (bb
); insn
= prev
)
144 prev
= PREV_INSN (insn
);
146 switch (NOTE_KIND (insn
))
148 case NOTE_INSN_BLOCK_END
:
151 case NOTE_INSN_DELETED
:
152 case NOTE_INSN_DELETED_LABEL
:
155 reorder_insns (insn
, insn
, last_insn
);
162 /* Locate or create a label for a given basic block. */
165 label_for_bb (basic_block bb
)
167 rtx label
= BB_HEAD (bb
);
169 if (!LABEL_P (label
))
172 fprintf (dump_file
, "Emitting label for block %d\n", bb
->index
);
174 label
= block_label (bb
);
180 /* Locate the effective beginning and end of the insn chain for each
181 block, as defined by skip_insns_after_block above. */
184 record_effective_endpoints (void)
190 for (insn
= get_insns ();
193 && NOTE_KIND (insn
) != NOTE_INSN_BASIC_BLOCK
;
194 insn
= NEXT_INSN (insn
))
196 /* No basic blocks at all? */
199 if (PREV_INSN (insn
))
200 cfg_layout_function_header
=
201 unlink_insn_chain (get_insns (), PREV_INSN (insn
));
203 cfg_layout_function_header
= NULL_RTX
;
205 next_insn
= get_insns ();
210 if (PREV_INSN (BB_HEAD (bb
)) && next_insn
!= BB_HEAD (bb
))
211 bb
->il
.rtl
->header
= unlink_insn_chain (next_insn
,
212 PREV_INSN (BB_HEAD (bb
)));
213 end
= skip_insns_after_block (bb
);
214 if (NEXT_INSN (BB_END (bb
)) && BB_END (bb
) != end
)
215 bb
->il
.rtl
->footer
= unlink_insn_chain (NEXT_INSN (BB_END (bb
)), end
);
216 next_insn
= NEXT_INSN (BB_END (bb
));
219 cfg_layout_function_footer
= next_insn
;
220 if (cfg_layout_function_footer
)
221 cfg_layout_function_footer
= unlink_insn_chain (cfg_layout_function_footer
, get_last_insn ());
224 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
225 numbers and files. In order to be GGC friendly we need to use separate
226 varrays. This also slightly improve the memory locality in binary search.
227 The _locs array contains locators where the given property change. The
228 block_locators_blocks contains the scope block that is used for all insn
229 locator greater than corresponding block_locators_locs value and smaller
230 than the following one. Similarly for the other properties. */
231 static VEC(int,heap
) *block_locators_locs
;
232 static GTY(()) VEC(tree
,gc
) *block_locators_blocks
;
233 static VEC(int,heap
) *locations_locators_locs
;
234 DEF_VEC_O(location_t
);
235 DEF_VEC_ALLOC_O(location_t
,heap
);
236 static VEC(location_t
,heap
) *locations_locators_vals
;
237 int prologue_locator
;
238 int epilogue_locator
;
240 /* Hold current location information and last location information, so the
241 datastructures are built lazily only when some instructions in given
243 location_t curr_location
, last_location
;
244 static tree curr_block
, last_block
;
245 static int curr_rtl_loc
= -1;
247 /* Allocate insn locator datastructure. */
249 insn_locators_alloc (void)
251 prologue_locator
= epilogue_locator
= 0;
253 block_locators_locs
= VEC_alloc (int, heap
, 32);
254 block_locators_blocks
= VEC_alloc (tree
, gc
, 32);
255 locations_locators_locs
= VEC_alloc (int, heap
, 32);
256 locations_locators_vals
= VEC_alloc (location_t
, heap
, 32);
265 /* At the end of emit stage, clear current location. */
267 insn_locators_finalize (void)
269 if (curr_rtl_loc
>= 0)
270 epilogue_locator
= curr_insn_locator ();
274 /* Set current location. */
276 set_curr_insn_source_location (location_t location
)
278 /* IV opts calls into RTL expansion to compute costs of operations. At this
279 time locators are not initialized. */
280 if (curr_rtl_loc
== -1)
282 if (location
== last_location
)
284 curr_location
= location
;
287 /* Set current scope block. */
289 set_curr_insn_block (tree b
)
291 /* IV opts calls into RTL expansion to compute costs of operations. At this
292 time locators are not initialized. */
293 if (curr_rtl_loc
== -1)
299 /* Return current insn locator. */
301 curr_insn_locator (void)
303 if (curr_rtl_loc
== -1)
305 if (last_block
!= curr_block
)
308 VEC_safe_push (int, heap
, block_locators_locs
, curr_rtl_loc
);
309 VEC_safe_push (tree
, gc
, block_locators_blocks
, curr_block
);
310 last_block
= curr_block
;
312 if (last_location
!= curr_location
)
315 VEC_safe_push (int, heap
, locations_locators_locs
, curr_rtl_loc
);
316 VEC_safe_push (location_t
, heap
, locations_locators_vals
, &curr_location
);
317 last_location
= curr_location
;
323 into_cfg_layout_mode (void)
325 cfg_layout_initialize (0);
330 outof_cfg_layout_mode (void)
335 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
336 bb
->aux
= bb
->next_bb
;
338 cfg_layout_finalize ();
343 struct rtl_opt_pass pass_into_cfg_layout_mode
=
347 "into_cfglayout", /* name */
349 into_cfg_layout_mode
, /* execute */
352 0, /* static_pass_number */
354 0, /* properties_required */
355 0, /* properties_provided */
356 0, /* properties_destroyed */
357 0, /* todo_flags_start */
358 TODO_dump_func
, /* todo_flags_finish */
362 struct rtl_opt_pass pass_outof_cfg_layout_mode
=
366 "outof_cfglayout", /* name */
368 outof_cfg_layout_mode
, /* execute */
371 0, /* static_pass_number */
373 0, /* properties_required */
374 0, /* properties_provided */
375 0, /* properties_destroyed */
376 0, /* todo_flags_start */
377 TODO_dump_func
, /* todo_flags_finish */
381 /* Return scope resulting from combination of S1 and S2. */
383 choose_inner_scope (tree s1
, tree s2
)
389 if (BLOCK_NUMBER (s1
) > BLOCK_NUMBER (s2
))
394 /* Emit lexical block notes needed to change scope from S1 to S2. */
397 change_scope (rtx orig_insn
, tree s1
, tree s2
)
399 rtx insn
= orig_insn
;
400 tree com
= NULL_TREE
;
401 tree ts1
= s1
, ts2
= s2
;
406 gcc_assert (ts1
&& ts2
);
407 if (BLOCK_NUMBER (ts1
) > BLOCK_NUMBER (ts2
))
408 ts1
= BLOCK_SUPERCONTEXT (ts1
);
409 else if (BLOCK_NUMBER (ts1
) < BLOCK_NUMBER (ts2
))
410 ts2
= BLOCK_SUPERCONTEXT (ts2
);
413 ts1
= BLOCK_SUPERCONTEXT (ts1
);
414 ts2
= BLOCK_SUPERCONTEXT (ts2
);
423 rtx note
= emit_note_before (NOTE_INSN_BLOCK_END
, insn
);
424 NOTE_BLOCK (note
) = s
;
425 s
= BLOCK_SUPERCONTEXT (s
);
432 insn
= emit_note_before (NOTE_INSN_BLOCK_BEG
, insn
);
433 NOTE_BLOCK (insn
) = s
;
434 s
= BLOCK_SUPERCONTEXT (s
);
438 /* Return lexical scope block insn belong to. */
440 insn_scope (const_rtx insn
)
442 int max
= VEC_length (int, block_locators_locs
);
444 int loc
= INSN_LOCATOR (insn
);
446 /* When block_locators_locs was initialized, the pro- and epilogue
447 insns didn't exist yet and can therefore not be found this way.
448 But we know that they belong to the outer most block of the
450 Without this test, the prologue would be put inside the block of
451 the first valid instruction in the function and when that first
452 insn is part of an inlined function then the low_pc of that
453 inlined function is messed up. Likewise for the epilogue and
454 the last valid instruction. */
455 if (loc
== prologue_locator
|| loc
== epilogue_locator
)
456 return DECL_INITIAL (cfun
->decl
);
462 int pos
= (min
+ max
) / 2;
463 int tmp
= VEC_index (int, block_locators_locs
, pos
);
465 if (tmp
<= loc
&& min
!= pos
)
467 else if (tmp
> loc
&& max
!= pos
)
475 return VEC_index (tree
, block_locators_blocks
, min
);
478 /* Return line number of the statement specified by the locator. */
480 locator_location (int loc
)
482 int max
= VEC_length (int, locations_locators_locs
);
487 int pos
= (min
+ max
) / 2;
488 int tmp
= VEC_index (int, locations_locators_locs
, pos
);
490 if (tmp
<= loc
&& min
!= pos
)
492 else if (tmp
> loc
&& max
!= pos
)
500 return *VEC_index (location_t
, locations_locators_vals
, min
);
503 /* Return source line of the statement that produced this insn. */
505 locator_line (int loc
)
507 expanded_location xloc
;
511 xloc
= expand_location (locator_location (loc
));
515 /* Return line number of the statement that produced this insn. */
517 insn_line (const_rtx insn
)
519 return locator_line (INSN_LOCATOR (insn
));
522 /* Return source file of the statement specified by LOC. */
524 locator_file (int loc
)
526 expanded_location xloc
;
530 xloc
= expand_location (locator_location (loc
));
534 /* Return source file of the statement that produced this insn. */
536 insn_file (const_rtx insn
)
538 return locator_file (INSN_LOCATOR (insn
));
541 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
542 on the scope tree and the newly reordered instructions. */
545 reemit_insn_block_notes (void)
547 tree cur_block
= DECL_INITIAL (cfun
->decl
);
551 if (!active_insn_p (insn
))
552 insn
= next_active_insn (insn
);
553 for (; insn
; insn
= next_active_insn (insn
))
557 /* Avoid putting scope notes between jump table and its label. */
559 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
560 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
563 this_block
= insn_scope (insn
);
564 /* For sequences compute scope resulting from merging all scopes
565 of instructions nested inside. */
566 if (GET_CODE (PATTERN (insn
)) == SEQUENCE
)
569 rtx body
= PATTERN (insn
);
572 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
573 this_block
= choose_inner_scope (this_block
,
574 insn_scope (XVECEXP (body
, 0, i
)));
579 if (this_block
!= cur_block
)
581 change_scope (insn
, cur_block
, this_block
);
582 cur_block
= this_block
;
586 /* change_scope emits before the insn, not after. */
587 note
= emit_note (NOTE_INSN_DELETED
);
588 change_scope (note
, cur_block
, DECL_INITIAL (cfun
->decl
));
595 /* Link the basic blocks in the correct order, compacting the basic
596 block queue while at it. This also clears the visited flag on
597 all basic blocks. If STAY_IN_CFGLAYOUT_MODE is false, this function
598 also clears the basic block header and footer fields.
600 This function is usually called after a pass (e.g. tracer) finishes
601 some transformations while in cfglayout mode. The required sequence
602 of the basic blocks is in a linked list along the bb->aux field.
603 This functions re-links the basic block prev_bb and next_bb pointers
604 accordingly, and it compacts and renumbers the blocks. */
607 relink_block_chain (bool stay_in_cfglayout_mode
)
609 basic_block bb
, prev_bb
;
612 /* Maybe dump the re-ordered sequence. */
615 fprintf (dump_file
, "Reordered sequence:\n");
616 for (bb
= ENTRY_BLOCK_PTR
->next_bb
, index
= NUM_FIXED_BLOCKS
;
618 bb
= (basic_block
) bb
->aux
, index
++)
620 fprintf (dump_file
, " %i ", index
);
621 if (get_bb_original (bb
))
622 fprintf (dump_file
, "duplicate of %i ",
623 get_bb_original (bb
)->index
);
624 else if (forwarder_block_p (bb
)
625 && !LABEL_P (BB_HEAD (bb
)))
626 fprintf (dump_file
, "compensation ");
628 fprintf (dump_file
, "bb %i ", bb
->index
);
629 fprintf (dump_file
, " [%i]\n", bb
->frequency
);
633 /* Now reorder the blocks. */
634 prev_bb
= ENTRY_BLOCK_PTR
;
635 bb
= ENTRY_BLOCK_PTR
->next_bb
;
636 for (; bb
; prev_bb
= bb
, bb
= (basic_block
) bb
->aux
)
638 bb
->prev_bb
= prev_bb
;
639 prev_bb
->next_bb
= bb
;
641 prev_bb
->next_bb
= EXIT_BLOCK_PTR
;
642 EXIT_BLOCK_PTR
->prev_bb
= prev_bb
;
644 /* Then, clean up the aux and visited fields. */
648 bb
->il
.rtl
->visited
= 0;
649 if (!stay_in_cfglayout_mode
)
650 bb
->il
.rtl
->header
= bb
->il
.rtl
->footer
= NULL
;
653 /* Maybe reset the original copy tables, they are not valid anymore
654 when we renumber the basic blocks in compact_blocks. If we are
655 are going out of cfglayout mode, don't re-allocate the tables. */
656 free_original_copy_tables ();
657 if (stay_in_cfglayout_mode
)
658 initialize_original_copy_tables ();
660 /* Finally, put basic_block_info in the new order. */
665 /* Given a reorder chain, rearrange the code to match. */
668 fixup_reorder_chain (void)
673 if (cfg_layout_function_header
)
675 set_first_insn (cfg_layout_function_header
);
676 insn
= cfg_layout_function_header
;
677 while (NEXT_INSN (insn
))
678 insn
= NEXT_INSN (insn
);
681 /* First do the bulk reordering -- rechain the blocks without regard to
682 the needed changes to jumps and labels. */
684 for (bb
= ENTRY_BLOCK_PTR
->next_bb
; bb
; bb
= (basic_block
) bb
->aux
)
686 if (bb
->il
.rtl
->header
)
689 NEXT_INSN (insn
) = bb
->il
.rtl
->header
;
691 set_first_insn (bb
->il
.rtl
->header
);
692 PREV_INSN (bb
->il
.rtl
->header
) = insn
;
693 insn
= bb
->il
.rtl
->header
;
694 while (NEXT_INSN (insn
))
695 insn
= NEXT_INSN (insn
);
698 NEXT_INSN (insn
) = BB_HEAD (bb
);
700 set_first_insn (BB_HEAD (bb
));
701 PREV_INSN (BB_HEAD (bb
)) = insn
;
703 if (bb
->il
.rtl
->footer
)
705 NEXT_INSN (insn
) = bb
->il
.rtl
->footer
;
706 PREV_INSN (bb
->il
.rtl
->footer
) = insn
;
707 while (NEXT_INSN (insn
))
708 insn
= NEXT_INSN (insn
);
712 NEXT_INSN (insn
) = cfg_layout_function_footer
;
713 if (cfg_layout_function_footer
)
714 PREV_INSN (cfg_layout_function_footer
) = insn
;
716 while (NEXT_INSN (insn
))
717 insn
= NEXT_INSN (insn
);
719 set_last_insn (insn
);
720 #ifdef ENABLE_CHECKING
721 verify_insn_chain ();
724 /* Now add jumps and labels as needed to match the blocks new
727 for (bb
= ENTRY_BLOCK_PTR
->next_bb
; bb
; bb
= (basic_block
) bb
->aux
)
729 edge e_fall
, e_taken
, e
;
734 if (EDGE_COUNT (bb
->succs
) == 0)
737 /* Find the old fallthru edge, and another non-EH edge for
739 e_taken
= e_fall
= NULL
;
741 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
742 if (e
->flags
& EDGE_FALLTHRU
)
744 else if (! (e
->flags
& EDGE_EH
))
747 bb_end_insn
= BB_END (bb
);
748 if (JUMP_P (bb_end_insn
))
750 if (any_condjump_p (bb_end_insn
))
752 /* If the old fallthru is still next, nothing to do. */
753 if (bb
->aux
== e_fall
->dest
754 || e_fall
->dest
== EXIT_BLOCK_PTR
)
757 /* The degenerated case of conditional jump jumping to the next
758 instruction can happen for jumps with side effects. We need
759 to construct a forwarder block and this will be done just
760 fine by force_nonfallthru below. */
764 /* There is another special case: if *neither* block is next,
765 such as happens at the very end of a function, then we'll
766 need to add a new unconditional jump. Choose the taken
767 edge based on known or assumed probability. */
768 else if (bb
->aux
!= e_taken
->dest
)
770 rtx note
= find_reg_note (bb_end_insn
, REG_BR_PROB
, 0);
773 && INTVAL (XEXP (note
, 0)) < REG_BR_PROB_BASE
/ 2
774 && invert_jump (bb_end_insn
,
775 (e_fall
->dest
== EXIT_BLOCK_PTR
777 : label_for_bb (e_fall
->dest
)), 0))
779 e_fall
->flags
&= ~EDGE_FALLTHRU
;
780 #ifdef ENABLE_CHECKING
781 gcc_assert (could_fall_through
782 (e_taken
->src
, e_taken
->dest
));
784 e_taken
->flags
|= EDGE_FALLTHRU
;
785 update_br_prob_note (bb
);
786 e
= e_fall
, e_fall
= e_taken
, e_taken
= e
;
790 /* If the "jumping" edge is a crossing edge, and the fall
791 through edge is non-crossing, leave things as they are. */
792 else if ((e_taken
->flags
& EDGE_CROSSING
)
793 && !(e_fall
->flags
& EDGE_CROSSING
))
796 /* Otherwise we can try to invert the jump. This will
797 basically never fail, however, keep up the pretense. */
798 else if (invert_jump (bb_end_insn
,
799 (e_fall
->dest
== EXIT_BLOCK_PTR
801 : label_for_bb (e_fall
->dest
)), 0))
803 e_fall
->flags
&= ~EDGE_FALLTHRU
;
804 #ifdef ENABLE_CHECKING
805 gcc_assert (could_fall_through
806 (e_taken
->src
, e_taken
->dest
));
808 e_taken
->flags
|= EDGE_FALLTHRU
;
809 update_br_prob_note (bb
);
815 /* Otherwise we have some return, switch or computed
816 jump. In the 99% case, there should not have been a
818 gcc_assert (returnjump_p (bb_end_insn
) || !e_fall
);
824 /* No fallthru implies a noreturn function with EH edges, or
825 something similarly bizarre. In any case, we don't need to
830 /* If the fallthru block is still next, nothing to do. */
831 if (bb
->aux
== e_fall
->dest
)
834 /* A fallthru to exit block. */
835 if (e_fall
->dest
== EXIT_BLOCK_PTR
)
839 /* We got here if we need to add a new jump insn. */
840 nb
= force_nonfallthru (e_fall
);
843 nb
->il
.rtl
->visited
= 1;
846 /* Don't process this new block. */
849 /* Make sure new bb is tagged for correct section (same as
850 fall-thru source, since you cannot fall-throu across
851 section boundaries). */
852 BB_COPY_PARTITION (e_fall
->src
, single_pred (bb
));
853 if (flag_reorder_blocks_and_partition
854 && targetm
.have_named_sections
855 && JUMP_P (BB_END (bb
))
856 && !any_condjump_p (BB_END (bb
))
857 && (EDGE_SUCC (bb
, 0)->flags
& EDGE_CROSSING
))
858 add_reg_note (BB_END (bb
), REG_CROSSING_JUMP
, NULL_RTX
);
862 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
864 /* Annoying special case - jump around dead jumptables left in the code. */
870 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
871 if (e
->flags
& EDGE_FALLTHRU
)
874 if (e
&& !can_fallthru (e
->src
, e
->dest
))
875 force_nonfallthru (e
);
879 /* Perform sanity checks on the insn chain.
880 1. Check that next/prev pointers are consistent in both the forward and
882 2. Count insns in chain, going both directions, and check if equal.
883 3. Check that get_last_insn () returns the actual end of chain. */
886 verify_insn_chain (void)
889 int insn_cnt1
, insn_cnt2
;
891 for (prevx
= NULL
, insn_cnt1
= 1, x
= get_insns ();
893 prevx
= x
, insn_cnt1
++, x
= NEXT_INSN (x
))
894 gcc_assert (PREV_INSN (x
) == prevx
);
896 gcc_assert (prevx
== get_last_insn ());
898 for (nextx
= NULL
, insn_cnt2
= 1, x
= get_last_insn ();
900 nextx
= x
, insn_cnt2
++, x
= PREV_INSN (x
))
901 gcc_assert (NEXT_INSN (x
) == nextx
);
903 gcc_assert (insn_cnt1
== insn_cnt2
);
906 /* If we have assembler epilogues, the block falling through to exit must
907 be the last one in the reordered chain when we reach final. Ensure
908 that this condition is met. */
910 fixup_fallthru_exit_predecessor (void)
914 basic_block bb
= NULL
;
916 /* This transformation is not valid before reload, because we might
917 separate a call from the instruction that copies the return
919 gcc_assert (reload_completed
);
921 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
922 if (e
->flags
& EDGE_FALLTHRU
)
927 basic_block c
= ENTRY_BLOCK_PTR
->next_bb
;
929 /* If the very first block is the one with the fall-through exit
930 edge, we have to split that block. */
933 bb
= split_block (bb
, NULL
)->dest
;
936 bb
->il
.rtl
->footer
= c
->il
.rtl
->footer
;
937 c
->il
.rtl
->footer
= NULL
;
941 c
= (basic_block
) c
->aux
;
945 c
= (basic_block
) c
->aux
;
952 /* In case there are more than one fallthru predecessors of exit, force that
953 there is only one. */
956 force_one_exit_fallthru (void)
958 edge e
, predecessor
= NULL
;
961 basic_block forwarder
, bb
;
963 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
964 if (e
->flags
& EDGE_FALLTHRU
)
966 if (predecessor
== NULL
)
978 /* Exit has several fallthru predecessors. Create a forwarder block for
980 forwarder
= split_edge (predecessor
);
981 for (ei
= ei_start (EXIT_BLOCK_PTR
->preds
); (e
= ei_safe_edge (ei
)); )
983 if (e
->src
== forwarder
984 || !(e
->flags
& EDGE_FALLTHRU
))
987 redirect_edge_and_branch_force (e
, forwarder
);
990 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
994 if (bb
->aux
== NULL
&& bb
!= forwarder
)
1002 /* Return true in case it is possible to duplicate the basic block BB. */
1004 /* We do not want to declare the function in a header file, since it should
1005 only be used through the cfghooks interface, and we do not want to move
1006 it to cfgrtl.c since it would require also moving quite a lot of related
1008 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block
);
1011 cfg_layout_can_duplicate_bb_p (const_basic_block bb
)
1013 /* Do not attempt to duplicate tablejumps, as we need to unshare
1014 the dispatch table. This is difficult to do, as the instructions
1015 computing jump destination may be hoisted outside the basic block. */
1016 if (tablejump_p (BB_END (bb
), NULL
, NULL
))
1019 /* Do not duplicate blocks containing insns that can't be copied. */
1020 if (targetm
.cannot_copy_insn_p
)
1022 rtx insn
= BB_HEAD (bb
);
1025 if (INSN_P (insn
) && targetm
.cannot_copy_insn_p (insn
))
1027 if (insn
== BB_END (bb
))
1029 insn
= NEXT_INSN (insn
);
1037 duplicate_insn_chain (rtx from
, rtx to
)
1041 /* Avoid updating of boundaries of previous basic block. The
1042 note will get removed from insn stream in fixup. */
1043 last
= emit_note (NOTE_INSN_DELETED
);
1045 /* Create copy at the end of INSN chain. The chain will
1046 be reordered later. */
1047 for (insn
= from
; insn
!= NEXT_INSN (to
); insn
= NEXT_INSN (insn
))
1049 switch (GET_CODE (insn
))
1054 /* Avoid copying of dispatch tables. We never duplicate
1055 tablejumps, so this can hit only in case the table got
1056 moved far from original jump. */
1057 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
1058 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
1060 emit_copy_of_insn_after (insn
, get_last_insn ());
1071 switch (NOTE_KIND (insn
))
1073 /* In case prologue is empty and function contain label
1074 in first BB, we may want to copy the block. */
1075 case NOTE_INSN_PROLOGUE_END
:
1077 case NOTE_INSN_DELETED
:
1078 case NOTE_INSN_DELETED_LABEL
:
1079 /* No problem to strip these. */
1080 case NOTE_INSN_EPILOGUE_BEG
:
1081 /* Debug code expect these notes to exist just once.
1082 Keep them in the master copy.
1083 ??? It probably makes more sense to duplicate them for each
1085 case NOTE_INSN_FUNCTION_BEG
:
1086 /* There is always just single entry to function. */
1087 case NOTE_INSN_BASIC_BLOCK
:
1090 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
1091 emit_note_copy (insn
);
1095 /* All other notes should have already been eliminated.
1104 insn
= NEXT_INSN (last
);
1108 /* Create a duplicate of the basic block BB. */
1110 /* We do not want to declare the function in a header file, since it should
1111 only be used through the cfghooks interface, and we do not want to move
1112 it to cfgrtl.c since it would require also moving quite a lot of related
1114 extern basic_block
cfg_layout_duplicate_bb (basic_block
);
1117 cfg_layout_duplicate_bb (basic_block bb
)
1122 insn
= duplicate_insn_chain (BB_HEAD (bb
), BB_END (bb
));
1123 new_bb
= create_basic_block (insn
,
1124 insn
? get_last_insn () : NULL
,
1125 EXIT_BLOCK_PTR
->prev_bb
);
1127 BB_COPY_PARTITION (new_bb
, bb
);
1128 if (bb
->il
.rtl
->header
)
1130 insn
= bb
->il
.rtl
->header
;
1131 while (NEXT_INSN (insn
))
1132 insn
= NEXT_INSN (insn
);
1133 insn
= duplicate_insn_chain (bb
->il
.rtl
->header
, insn
);
1135 new_bb
->il
.rtl
->header
= unlink_insn_chain (insn
, get_last_insn ());
1138 if (bb
->il
.rtl
->footer
)
1140 insn
= bb
->il
.rtl
->footer
;
1141 while (NEXT_INSN (insn
))
1142 insn
= NEXT_INSN (insn
);
1143 insn
= duplicate_insn_chain (bb
->il
.rtl
->footer
, insn
);
1145 new_bb
->il
.rtl
->footer
= unlink_insn_chain (insn
, get_last_insn ());
1152 /* Main entry point to this module - initialize the datastructures for
1153 CFG layout changes. It keeps LOOPS up-to-date if not null.
1155 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
1158 cfg_layout_initialize (unsigned int flags
)
1163 initialize_original_copy_tables ();
1165 cfg_layout_rtl_register_cfg_hooks ();
1167 record_effective_endpoints ();
1169 /* Make sure that the targets of non local gotos are marked. */
1170 for (x
= nonlocal_goto_handler_labels
; x
; x
= XEXP (x
, 1))
1172 bb
= BLOCK_FOR_INSN (XEXP (x
, 0));
1173 bb
->flags
|= BB_NON_LOCAL_GOTO_TARGET
;
1176 cleanup_cfg (CLEANUP_CFGLAYOUT
| flags
);
1179 /* Splits superblocks. */
1181 break_superblocks (void)
1183 sbitmap superblocks
;
1187 superblocks
= sbitmap_alloc (last_basic_block
);
1188 sbitmap_zero (superblocks
);
1191 if (bb
->flags
& BB_SUPERBLOCK
)
1193 bb
->flags
&= ~BB_SUPERBLOCK
;
1194 SET_BIT (superblocks
, bb
->index
);
1200 rebuild_jump_labels (get_insns ());
1201 find_many_sub_basic_blocks (superblocks
);
1207 /* Finalize the changes: reorder insn list according to the sequence specified
1208 by aux pointers, enter compensation code, rebuild scope forest. */
1211 cfg_layout_finalize (void)
1213 #ifdef ENABLE_CHECKING
1214 verify_flow_info ();
1216 force_one_exit_fallthru ();
1217 rtl_register_cfg_hooks ();
1218 if (reload_completed
1219 #ifdef HAVE_epilogue
1223 fixup_fallthru_exit_predecessor ();
1224 fixup_reorder_chain ();
1226 rebuild_jump_labels (get_insns ());
1227 delete_dead_jumptables ();
1229 #ifdef ENABLE_CHECKING
1230 verify_insn_chain ();
1231 verify_flow_info ();
1235 /* Checks whether all N blocks in BBS array can be copied. */
1237 can_copy_bbs_p (basic_block
*bbs
, unsigned n
)
1243 for (i
= 0; i
< n
; i
++)
1244 bbs
[i
]->flags
|= BB_DUPLICATED
;
1246 for (i
= 0; i
< n
; i
++)
1248 /* In case we should redirect abnormal edge during duplication, fail. */
1250 FOR_EACH_EDGE (e
, ei
, bbs
[i
]->succs
)
1251 if ((e
->flags
& EDGE_ABNORMAL
)
1252 && (e
->dest
->flags
& BB_DUPLICATED
))
1258 if (!can_duplicate_block_p (bbs
[i
]))
1266 for (i
= 0; i
< n
; i
++)
1267 bbs
[i
]->flags
&= ~BB_DUPLICATED
;
1272 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1273 are placed into array NEW_BBS in the same order. Edges from basic blocks
1274 in BBS are also duplicated and copies of those of them
1275 that lead into BBS are redirected to appropriate newly created block. The
1276 function assigns bbs into loops (copy of basic block bb is assigned to
1277 bb->loop_father->copy loop, so this must be set up correctly in advance)
1278 and updates dominators locally (LOOPS structure that contains the information
1279 about dominators is passed to enable this).
1281 BASE is the superloop to that basic block belongs; if its header or latch
1282 is copied, we do not set the new blocks as header or latch.
1284 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1285 also in the same order.
1287 Newly created basic blocks are put after the basic block AFTER in the
1288 instruction stream, and the order of the blocks in BBS array is preserved. */
1291 copy_bbs (basic_block
*bbs
, unsigned n
, basic_block
*new_bbs
,
1292 edge
*edges
, unsigned num_edges
, edge
*new_edges
,
1293 struct loop
*base
, basic_block after
)
1296 basic_block bb
, new_bb
, dom_bb
;
1299 /* Duplicate bbs, update dominators, assign bbs to loops. */
1300 for (i
= 0; i
< n
; i
++)
1304 new_bb
= new_bbs
[i
] = duplicate_block (bb
, NULL
, after
);
1306 bb
->flags
|= BB_DUPLICATED
;
1307 /* Possibly set loop header. */
1308 if (bb
->loop_father
->header
== bb
&& bb
->loop_father
!= base
)
1309 new_bb
->loop_father
->header
= new_bb
;
1311 if (bb
->loop_father
->latch
== bb
&& bb
->loop_father
!= base
)
1312 new_bb
->loop_father
->latch
= new_bb
;
1315 /* Set dominators. */
1316 for (i
= 0; i
< n
; i
++)
1319 new_bb
= new_bbs
[i
];
1321 dom_bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
1322 if (dom_bb
->flags
& BB_DUPLICATED
)
1324 dom_bb
= get_bb_copy (dom_bb
);
1325 set_immediate_dominator (CDI_DOMINATORS
, new_bb
, dom_bb
);
1329 /* Redirect edges. */
1330 for (j
= 0; j
< num_edges
; j
++)
1331 new_edges
[j
] = NULL
;
1332 for (i
= 0; i
< n
; i
++)
1335 new_bb
= new_bbs
[i
];
1338 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
1340 for (j
= 0; j
< num_edges
; j
++)
1341 if (edges
[j
] && edges
[j
]->src
== bb
&& edges
[j
]->dest
== e
->dest
)
1344 if (!(e
->dest
->flags
& BB_DUPLICATED
))
1346 redirect_edge_and_branch_force (e
, get_bb_copy (e
->dest
));
1350 /* Clear information about duplicates. */
1351 for (i
= 0; i
< n
; i
++)
1352 bbs
[i
]->flags
&= ~BB_DUPLICATED
;
1355 #include "gt-cfglayout.h"