1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
33 #include "cfglayout.h"
37 #include "alloc-pool.h"
39 #include "tree-pass.h"
42 /* Holds the interesting trailing notes for the function. */
43 rtx cfg_layout_function_footer
;
44 rtx cfg_layout_function_header
;
46 static rtx
skip_insns_after_block (basic_block
);
47 static void record_effective_endpoints (void);
48 static rtx
label_for_bb (basic_block
);
49 static void fixup_reorder_chain (void);
51 static void change_scope (rtx
, tree
, tree
);
53 void verify_insn_chain (void);
54 static void fixup_fallthru_exit_predecessor (void);
55 static tree
insn_scope (rtx
);
58 unlink_insn_chain (rtx first
, rtx last
)
60 rtx prevfirst
= PREV_INSN (first
);
61 rtx nextlast
= NEXT_INSN (last
);
63 PREV_INSN (first
) = NULL
;
64 NEXT_INSN (last
) = NULL
;
66 NEXT_INSN (prevfirst
) = nextlast
;
68 PREV_INSN (nextlast
) = prevfirst
;
70 set_last_insn (prevfirst
);
72 set_first_insn (nextlast
);
76 /* Skip over inter-block insns occurring after BB which are typically
77 associated with BB (e.g., barriers). If there are any such insns,
78 we return the last one. Otherwise, we return the end of BB. */
81 skip_insns_after_block (basic_block bb
)
83 rtx insn
, last_insn
, next_head
, prev
;
86 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
87 next_head
= BB_HEAD (bb
->next_bb
);
89 for (last_insn
= insn
= BB_END (bb
); (insn
= NEXT_INSN (insn
)) != 0; )
91 if (insn
== next_head
)
94 switch (GET_CODE (insn
))
101 switch (NOTE_KIND (insn
))
103 case NOTE_INSN_BLOCK_END
:
114 && JUMP_P (NEXT_INSN (insn
))
115 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
116 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
118 insn
= NEXT_INSN (insn
);
131 /* It is possible to hit contradictory sequence. For instance:
137 Where barrier belongs to jump_insn, but the note does not. This can be
138 created by removing the basic block originally following
139 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
141 for (insn
= last_insn
; insn
!= BB_END (bb
); insn
= prev
)
143 prev
= PREV_INSN (insn
);
145 switch (NOTE_KIND (insn
))
147 case NOTE_INSN_BLOCK_END
:
150 case NOTE_INSN_DELETED
:
151 case NOTE_INSN_DELETED_LABEL
:
154 reorder_insns (insn
, insn
, last_insn
);
161 /* Locate or create a label for a given basic block. */
164 label_for_bb (basic_block bb
)
166 rtx label
= BB_HEAD (bb
);
168 if (!LABEL_P (label
))
171 fprintf (dump_file
, "Emitting label for block %d\n", bb
->index
);
173 label
= block_label (bb
);
179 /* Locate the effective beginning and end of the insn chain for each
180 block, as defined by skip_insns_after_block above. */
183 record_effective_endpoints (void)
189 for (insn
= get_insns ();
192 && NOTE_KIND (insn
) != NOTE_INSN_BASIC_BLOCK
;
193 insn
= NEXT_INSN (insn
))
195 /* No basic blocks at all? */
198 if (PREV_INSN (insn
))
199 cfg_layout_function_header
=
200 unlink_insn_chain (get_insns (), PREV_INSN (insn
));
202 cfg_layout_function_header
= NULL_RTX
;
204 next_insn
= get_insns ();
209 if (PREV_INSN (BB_HEAD (bb
)) && next_insn
!= BB_HEAD (bb
))
210 bb
->il
.rtl
->header
= unlink_insn_chain (next_insn
,
211 PREV_INSN (BB_HEAD (bb
)));
212 end
= skip_insns_after_block (bb
);
213 if (NEXT_INSN (BB_END (bb
)) && BB_END (bb
) != end
)
214 bb
->il
.rtl
->footer
= unlink_insn_chain (NEXT_INSN (BB_END (bb
)), end
);
215 next_insn
= NEXT_INSN (BB_END (bb
));
218 cfg_layout_function_footer
= next_insn
;
219 if (cfg_layout_function_footer
)
220 cfg_layout_function_footer
= unlink_insn_chain (cfg_layout_function_footer
, get_last_insn ());
223 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
224 numbers and files. In order to be GGC friendly we need to use separate
225 varrays. This also slightly improve the memory locality in binary search.
226 The _locs array contains locators where the given property change. The
227 block_locators_blocks contains the scope block that is used for all insn
228 locator greater than corresponding block_locators_locs value and smaller
229 than the following one. Similarly for the other properties. */
230 static VEC(int,heap
) *block_locators_locs
;
231 static GTY(()) VEC(tree
,gc
) *block_locators_blocks
;
232 static VEC(int,heap
) *locations_locators_locs
;
233 DEF_VEC_O(location_t
);
234 DEF_VEC_ALLOC_O(location_t
,heap
);
235 static VEC(location_t
,heap
) *locations_locators_vals
;
236 int prologue_locator
;
237 int epilogue_locator
;
239 /* Hold current location information and last location information, so the
240 datastructures are built lazilly only when some instructions in given
242 location_t curr_location
, last_location
;
243 static tree curr_block
, last_block
;
244 static int curr_rtl_loc
= -1;
246 /* Allocate insn locator datastructure. */
248 insn_locators_alloc (void)
250 prologue_locator
= epilogue_locator
= 0;
252 block_locators_locs
= VEC_alloc (int, heap
, 32);
253 block_locators_blocks
= VEC_alloc (tree
, gc
, 32);
254 locations_locators_locs
= VEC_alloc (int, heap
, 32);
255 locations_locators_vals
= VEC_alloc (location_t
, heap
, 32);
257 #ifdef USE_MAPPED_LOCATION
261 last_location
.line
= -1;
262 curr_location
.line
= -1;
269 /* At the end of emit stage, clear current location. */
271 insn_locators_finalize (void)
273 if (curr_rtl_loc
>= 0)
274 epilogue_locator
= curr_insn_locator ();
278 /* Set current location. */
280 set_curr_insn_source_location (location_t location
)
282 /* IV opts calls into RTL expansion to compute costs of operations. At this
283 time locators are not initialized. */
284 if (curr_rtl_loc
== -1)
286 #ifdef USE_MAPPED_LOCATION
287 if (location
== last_location
)
290 if (location
.file
&& last_location
.file
291 && !strcmp (location
.file
, last_location
.file
)
292 && location
.line
== last_location
.line
)
295 curr_location
= location
;
298 /* Set current scope block. */
300 set_curr_insn_block (tree b
)
302 /* IV opts calls into RTL expansion to compute costs of operations. At this
303 time locators are not initialized. */
304 if (curr_rtl_loc
== -1)
310 /* Return current insn locator. */
312 curr_insn_locator (void)
314 if (curr_rtl_loc
== -1)
316 if (last_block
!= curr_block
)
319 VEC_safe_push (int, heap
, block_locators_locs
, curr_rtl_loc
);
320 VEC_safe_push (tree
, gc
, block_locators_blocks
, curr_block
);
321 last_block
= curr_block
;
323 #ifdef USE_MAPPED_LOCATION
324 if (last_location
!= curr_location
)
326 if (last_location
.file
!= curr_location
.file
327 || last_location
.line
!= curr_location
.line
)
331 VEC_safe_push (int, heap
, locations_locators_locs
, curr_rtl_loc
);
332 VEC_safe_push (location_t
, heap
, locations_locators_vals
, &curr_location
);
333 last_location
= curr_location
;
339 into_cfg_layout_mode (void)
341 cfg_layout_initialize (0);
346 outof_cfg_layout_mode (void)
351 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
352 bb
->aux
= bb
->next_bb
;
354 cfg_layout_finalize ();
359 struct tree_opt_pass pass_into_cfg_layout_mode
=
361 "into_cfglayout", /* name */
363 into_cfg_layout_mode
, /* execute */
366 0, /* static_pass_number */
368 0, /* properties_required */
369 0, /* properties_provided */
370 0, /* properties_destroyed */
371 0, /* todo_flags_start */
372 TODO_dump_func
, /* todo_flags_finish */
376 struct tree_opt_pass pass_outof_cfg_layout_mode
=
378 "outof_cfglayout", /* name */
380 outof_cfg_layout_mode
, /* execute */
383 0, /* static_pass_number */
385 0, /* properties_required */
386 0, /* properties_provided */
387 0, /* properties_destroyed */
388 0, /* todo_flags_start */
389 TODO_dump_func
, /* todo_flags_finish */
393 /* Return sope resulting from combination of S1 and S2. */
395 choose_inner_scope (tree s1
, tree s2
)
401 if (BLOCK_NUMBER (s1
) > BLOCK_NUMBER (s2
))
406 /* Emit lexical block notes needed to change scope from S1 to S2. */
409 change_scope (rtx orig_insn
, tree s1
, tree s2
)
411 rtx insn
= orig_insn
;
412 tree com
= NULL_TREE
;
413 tree ts1
= s1
, ts2
= s2
;
418 gcc_assert (ts1
&& ts2
);
419 if (BLOCK_NUMBER (ts1
) > BLOCK_NUMBER (ts2
))
420 ts1
= BLOCK_SUPERCONTEXT (ts1
);
421 else if (BLOCK_NUMBER (ts1
) < BLOCK_NUMBER (ts2
))
422 ts2
= BLOCK_SUPERCONTEXT (ts2
);
425 ts1
= BLOCK_SUPERCONTEXT (ts1
);
426 ts2
= BLOCK_SUPERCONTEXT (ts2
);
435 rtx note
= emit_note_before (NOTE_INSN_BLOCK_END
, insn
);
436 NOTE_BLOCK (note
) = s
;
437 s
= BLOCK_SUPERCONTEXT (s
);
444 insn
= emit_note_before (NOTE_INSN_BLOCK_BEG
, insn
);
445 NOTE_BLOCK (insn
) = s
;
446 s
= BLOCK_SUPERCONTEXT (s
);
450 /* Return lexical scope block insn belong to. */
452 insn_scope (rtx insn
)
454 int max
= VEC_length (int, block_locators_locs
);
456 int loc
= INSN_LOCATOR (insn
);
458 /* When block_locators_locs was initialized, the pro- and epilogue
459 insns didn't exist yet and can therefore not be found this way.
460 But we know that they belong to the outer most block of the
462 Without this test, the prologue would be put inside the block of
463 the first valid instruction in the function and when that first
464 insn is part of an inlined function then the low_pc of that
465 inlined function is messed up. Likewise for the epilogue and
466 the last valid instruction. */
467 if (loc
== prologue_locator
|| loc
== epilogue_locator
)
468 return DECL_INITIAL (cfun
->decl
);
474 int pos
= (min
+ max
) / 2;
475 int tmp
= VEC_index (int, block_locators_locs
, pos
);
477 if (tmp
<= loc
&& min
!= pos
)
479 else if (tmp
> loc
&& max
!= pos
)
487 return VEC_index (tree
, block_locators_blocks
, min
);
490 /* Return line number of the statement specified by the locator. */
492 locator_location (int loc
)
494 int max
= VEC_length (int, locations_locators_locs
);
499 int pos
= (min
+ max
) / 2;
500 int tmp
= VEC_index (int, locations_locators_locs
, pos
);
502 if (tmp
<= loc
&& min
!= pos
)
504 else if (tmp
> loc
&& max
!= pos
)
512 return *VEC_index (location_t
, locations_locators_vals
, min
);
515 /* Return source line of the statement that produced this insn. */
517 locator_line (int loc
)
519 expanded_location xloc
;
523 xloc
= expand_location (locator_location (loc
));
527 /* Return line number of the statement that produced this insn. */
531 return locator_line (INSN_LOCATOR (insn
));
534 /* Return source file of the statement specified by LOC. */
536 locator_file (int loc
)
538 expanded_location xloc
;
542 xloc
= expand_location (locator_location (loc
));
546 /* Return source file of the statement that produced this insn. */
550 return locator_file (INSN_LOCATOR (insn
));
553 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
554 on the scope tree and the newly reordered instructions. */
557 reemit_insn_block_notes (void)
559 tree cur_block
= DECL_INITIAL (cfun
->decl
);
563 if (!active_insn_p (insn
))
564 insn
= next_active_insn (insn
);
565 for (; insn
; insn
= next_active_insn (insn
))
569 /* Avoid putting scope notes between jump table and its label. */
571 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
572 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
575 this_block
= insn_scope (insn
);
576 /* For sequences compute scope resulting from merging all scopes
577 of instructions nested inside. */
578 if (GET_CODE (PATTERN (insn
)) == SEQUENCE
)
581 rtx body
= PATTERN (insn
);
584 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
585 this_block
= choose_inner_scope (this_block
,
586 insn_scope (XVECEXP (body
, 0, i
)));
591 if (this_block
!= cur_block
)
593 change_scope (insn
, cur_block
, this_block
);
594 cur_block
= this_block
;
598 /* change_scope emits before the insn, not after. */
599 note
= emit_note (NOTE_INSN_DELETED
);
600 change_scope (note
, cur_block
, DECL_INITIAL (cfun
->decl
));
607 /* Link the basic blocks in the correct order, compacting the basic
608 block queue while at it. This also clears the visited flag on
609 all basic blocks. If STAY_IN_CFGLAYOUT_MODE is false, this function
610 also clears the basic block header and footer fields.
612 This function is usually called after a pass (e.g. tracer) finishes
613 some transformations while in cfglayout mode. The required sequence
614 of the basic blocks is in a linked list along the bb->aux field.
615 This functions re-links the basic block prev_bb and next_bb pointers
616 accordingly, and it compacts and renumbers the blocks. */
619 relink_block_chain (bool stay_in_cfglayout_mode
)
621 basic_block bb
, prev_bb
;
624 /* Maybe dump the re-ordered sequence. */
627 fprintf (dump_file
, "Reordered sequence:\n");
628 for (bb
= ENTRY_BLOCK_PTR
->next_bb
, index
= NUM_FIXED_BLOCKS
;
630 bb
= bb
->aux
, index
++)
632 fprintf (dump_file
, " %i ", index
);
633 if (get_bb_original (bb
))
634 fprintf (dump_file
, "duplicate of %i ",
635 get_bb_original (bb
)->index
);
636 else if (forwarder_block_p (bb
)
637 && !LABEL_P (BB_HEAD (bb
)))
638 fprintf (dump_file
, "compensation ");
640 fprintf (dump_file
, "bb %i ", bb
->index
);
641 fprintf (dump_file
, " [%i]\n", bb
->frequency
);
645 /* Now reorder the blocks. */
646 prev_bb
= ENTRY_BLOCK_PTR
;
647 bb
= ENTRY_BLOCK_PTR
->next_bb
;
648 for (; bb
; prev_bb
= bb
, bb
= bb
->aux
)
650 bb
->prev_bb
= prev_bb
;
651 prev_bb
->next_bb
= bb
;
653 prev_bb
->next_bb
= EXIT_BLOCK_PTR
;
654 EXIT_BLOCK_PTR
->prev_bb
= prev_bb
;
656 /* Then, clean up the aux and visited fields. */
660 bb
->il
.rtl
->visited
= 0;
661 if (!stay_in_cfglayout_mode
)
662 bb
->il
.rtl
->header
= bb
->il
.rtl
->footer
= NULL
;
665 /* Maybe reset the original copy tables, they are not valid anymore
666 when we renumber the basic blocks in compact_blocks. If we are
667 are going out of cfglayout mode, don't re-allocate the tables. */
668 free_original_copy_tables ();
669 if (stay_in_cfglayout_mode
)
670 initialize_original_copy_tables ();
672 /* Finally, put basic_block_info in the new order. */
677 /* Given a reorder chain, rearrange the code to match. */
680 fixup_reorder_chain (void)
685 if (cfg_layout_function_header
)
687 set_first_insn (cfg_layout_function_header
);
688 insn
= cfg_layout_function_header
;
689 while (NEXT_INSN (insn
))
690 insn
= NEXT_INSN (insn
);
693 /* First do the bulk reordering -- rechain the blocks without regard to
694 the needed changes to jumps and labels. */
696 for (bb
= ENTRY_BLOCK_PTR
->next_bb
; bb
; bb
= bb
->aux
)
698 if (bb
->il
.rtl
->header
)
701 NEXT_INSN (insn
) = bb
->il
.rtl
->header
;
703 set_first_insn (bb
->il
.rtl
->header
);
704 PREV_INSN (bb
->il
.rtl
->header
) = insn
;
705 insn
= bb
->il
.rtl
->header
;
706 while (NEXT_INSN (insn
))
707 insn
= NEXT_INSN (insn
);
710 NEXT_INSN (insn
) = BB_HEAD (bb
);
712 set_first_insn (BB_HEAD (bb
));
713 PREV_INSN (BB_HEAD (bb
)) = insn
;
715 if (bb
->il
.rtl
->footer
)
717 NEXT_INSN (insn
) = bb
->il
.rtl
->footer
;
718 PREV_INSN (bb
->il
.rtl
->footer
) = insn
;
719 while (NEXT_INSN (insn
))
720 insn
= NEXT_INSN (insn
);
724 NEXT_INSN (insn
) = cfg_layout_function_footer
;
725 if (cfg_layout_function_footer
)
726 PREV_INSN (cfg_layout_function_footer
) = insn
;
728 while (NEXT_INSN (insn
))
729 insn
= NEXT_INSN (insn
);
731 set_last_insn (insn
);
732 #ifdef ENABLE_CHECKING
733 verify_insn_chain ();
736 /* Now add jumps and labels as needed to match the blocks new
739 for (bb
= ENTRY_BLOCK_PTR
->next_bb
; bb
; bb
= bb
->aux
)
741 edge e_fall
, e_taken
, e
;
746 if (EDGE_COUNT (bb
->succs
) == 0)
749 /* Find the old fallthru edge, and another non-EH edge for
751 e_taken
= e_fall
= NULL
;
753 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
754 if (e
->flags
& EDGE_FALLTHRU
)
756 else if (! (e
->flags
& EDGE_EH
))
759 bb_end_insn
= BB_END (bb
);
760 if (JUMP_P (bb_end_insn
))
762 if (any_condjump_p (bb_end_insn
))
764 /* If the old fallthru is still next, nothing to do. */
765 if (bb
->aux
== e_fall
->dest
766 || e_fall
->dest
== EXIT_BLOCK_PTR
)
769 /* The degenerated case of conditional jump jumping to the next
770 instruction can happen for jumps with side effects. We need
771 to construct a forwarder block and this will be done just
772 fine by force_nonfallthru below. */
776 /* There is another special case: if *neither* block is next,
777 such as happens at the very end of a function, then we'll
778 need to add a new unconditional jump. Choose the taken
779 edge based on known or assumed probability. */
780 else if (bb
->aux
!= e_taken
->dest
)
782 rtx note
= find_reg_note (bb_end_insn
, REG_BR_PROB
, 0);
785 && INTVAL (XEXP (note
, 0)) < REG_BR_PROB_BASE
/ 2
786 && invert_jump (bb_end_insn
,
787 (e_fall
->dest
== EXIT_BLOCK_PTR
789 : label_for_bb (e_fall
->dest
)), 0))
791 e_fall
->flags
&= ~EDGE_FALLTHRU
;
792 #ifdef ENABLE_CHECKING
793 gcc_assert (could_fall_through
794 (e_taken
->src
, e_taken
->dest
));
796 e_taken
->flags
|= EDGE_FALLTHRU
;
797 update_br_prob_note (bb
);
798 e
= e_fall
, e_fall
= e_taken
, e_taken
= e
;
802 /* If the "jumping" edge is a crossing edge, and the fall
803 through edge is non-crossing, leave things as they are. */
804 else if ((e_taken
->flags
& EDGE_CROSSING
)
805 && !(e_fall
->flags
& EDGE_CROSSING
))
808 /* Otherwise we can try to invert the jump. This will
809 basically never fail, however, keep up the pretense. */
810 else if (invert_jump (bb_end_insn
,
811 (e_fall
->dest
== EXIT_BLOCK_PTR
813 : label_for_bb (e_fall
->dest
)), 0))
815 e_fall
->flags
&= ~EDGE_FALLTHRU
;
816 #ifdef ENABLE_CHECKING
817 gcc_assert (could_fall_through
818 (e_taken
->src
, e_taken
->dest
));
820 e_taken
->flags
|= EDGE_FALLTHRU
;
821 update_br_prob_note (bb
);
827 /* Otherwise we have some return, switch or computed
828 jump. In the 99% case, there should not have been a
830 gcc_assert (returnjump_p (bb_end_insn
) || !e_fall
);
836 /* No fallthru implies a noreturn function with EH edges, or
837 something similarly bizarre. In any case, we don't need to
842 /* If the fallthru block is still next, nothing to do. */
843 if (bb
->aux
== e_fall
->dest
)
846 /* A fallthru to exit block. */
847 if (e_fall
->dest
== EXIT_BLOCK_PTR
)
851 /* We got here if we need to add a new jump insn. */
852 nb
= force_nonfallthru (e_fall
);
855 nb
->il
.rtl
->visited
= 1;
858 /* Don't process this new block. */
861 /* Make sure new bb is tagged for correct section (same as
862 fall-thru source, since you cannot fall-throu across
863 section boundaries). */
864 BB_COPY_PARTITION (e_fall
->src
, single_pred (bb
));
865 if (flag_reorder_blocks_and_partition
866 && targetm
.have_named_sections
867 && JUMP_P (BB_END (bb
))
868 && !any_condjump_p (BB_END (bb
))
869 && (EDGE_SUCC (bb
, 0)->flags
& EDGE_CROSSING
))
870 REG_NOTES (BB_END (bb
)) = gen_rtx_EXPR_LIST
871 (REG_CROSSING_JUMP
, NULL_RTX
, REG_NOTES (BB_END (bb
)));
875 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
877 /* Annoying special case - jump around dead jumptables left in the code. */
883 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
884 if (e
->flags
& EDGE_FALLTHRU
)
887 if (e
&& !can_fallthru (e
->src
, e
->dest
))
888 force_nonfallthru (e
);
892 /* Perform sanity checks on the insn chain.
893 1. Check that next/prev pointers are consistent in both the forward and
895 2. Count insns in chain, going both directions, and check if equal.
896 3. Check that get_last_insn () returns the actual end of chain. */
899 verify_insn_chain (void)
902 int insn_cnt1
, insn_cnt2
;
904 for (prevx
= NULL
, insn_cnt1
= 1, x
= get_insns ();
906 prevx
= x
, insn_cnt1
++, x
= NEXT_INSN (x
))
907 gcc_assert (PREV_INSN (x
) == prevx
);
909 gcc_assert (prevx
== get_last_insn ());
911 for (nextx
= NULL
, insn_cnt2
= 1, x
= get_last_insn ();
913 nextx
= x
, insn_cnt2
++, x
= PREV_INSN (x
))
914 gcc_assert (NEXT_INSN (x
) == nextx
);
916 gcc_assert (insn_cnt1
== insn_cnt2
);
919 /* If we have assembler epilogues, the block falling through to exit must
920 be the last one in the reordered chain when we reach final. Ensure
921 that this condition is met. */
923 fixup_fallthru_exit_predecessor (void)
927 basic_block bb
= NULL
;
929 /* This transformation is not valid before reload, because we might
930 separate a call from the instruction that copies the return
932 gcc_assert (reload_completed
);
934 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
935 if (e
->flags
& EDGE_FALLTHRU
)
940 basic_block c
= ENTRY_BLOCK_PTR
->next_bb
;
942 /* If the very first block is the one with the fall-through exit
943 edge, we have to split that block. */
946 bb
= split_block (bb
, NULL
)->dest
;
949 bb
->il
.rtl
->footer
= c
->il
.rtl
->footer
;
950 c
->il
.rtl
->footer
= NULL
;
965 /* Return true in case it is possible to duplicate the basic block BB. */
967 /* We do not want to declare the function in a header file, since it should
968 only be used through the cfghooks interface, and we do not want to move
969 it to cfgrtl.c since it would require also moving quite a lot of related
971 extern bool cfg_layout_can_duplicate_bb_p (basic_block
);
974 cfg_layout_can_duplicate_bb_p (basic_block bb
)
976 /* Do not attempt to duplicate tablejumps, as we need to unshare
977 the dispatch table. This is difficult to do, as the instructions
978 computing jump destination may be hoisted outside the basic block. */
979 if (tablejump_p (BB_END (bb
), NULL
, NULL
))
982 /* Do not duplicate blocks containing insns that can't be copied. */
983 if (targetm
.cannot_copy_insn_p
)
985 rtx insn
= BB_HEAD (bb
);
988 if (INSN_P (insn
) && targetm
.cannot_copy_insn_p (insn
))
990 if (insn
== BB_END (bb
))
992 insn
= NEXT_INSN (insn
);
1000 duplicate_insn_chain (rtx from
, rtx to
)
1004 /* Avoid updating of boundaries of previous basic block. The
1005 note will get removed from insn stream in fixup. */
1006 last
= emit_note (NOTE_INSN_DELETED
);
1008 /* Create copy at the end of INSN chain. The chain will
1009 be reordered later. */
1010 for (insn
= from
; insn
!= NEXT_INSN (to
); insn
= NEXT_INSN (insn
))
1012 switch (GET_CODE (insn
))
1017 /* Avoid copying of dispatch tables. We never duplicate
1018 tablejumps, so this can hit only in case the table got
1019 moved far from original jump. */
1020 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
1021 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
1023 emit_copy_of_insn_after (insn
, get_last_insn ());
1034 switch (NOTE_KIND (insn
))
1036 /* In case prologue is empty and function contain label
1037 in first BB, we may want to copy the block. */
1038 case NOTE_INSN_PROLOGUE_END
:
1040 case NOTE_INSN_DELETED
:
1041 case NOTE_INSN_DELETED_LABEL
:
1042 /* No problem to strip these. */
1043 case NOTE_INSN_EPILOGUE_BEG
:
1044 /* Debug code expect these notes to exist just once.
1045 Keep them in the master copy.
1046 ??? It probably makes more sense to duplicate them for each
1048 case NOTE_INSN_FUNCTION_BEG
:
1049 /* There is always just single entry to function. */
1050 case NOTE_INSN_BASIC_BLOCK
:
1053 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
1054 emit_note_copy (insn
);
1058 /* All other notes should have already been eliminated.
1067 insn
= NEXT_INSN (last
);
1071 /* Create a duplicate of the basic block BB. */
1073 /* We do not want to declare the function in a header file, since it should
1074 only be used through the cfghooks interface, and we do not want to move
1075 it to cfgrtl.c since it would require also moving quite a lot of related
1077 extern basic_block
cfg_layout_duplicate_bb (basic_block
);
1080 cfg_layout_duplicate_bb (basic_block bb
)
1085 insn
= duplicate_insn_chain (BB_HEAD (bb
), BB_END (bb
));
1086 new_bb
= create_basic_block (insn
,
1087 insn
? get_last_insn () : NULL
,
1088 EXIT_BLOCK_PTR
->prev_bb
);
1090 BB_COPY_PARTITION (new_bb
, bb
);
1091 if (bb
->il
.rtl
->header
)
1093 insn
= bb
->il
.rtl
->header
;
1094 while (NEXT_INSN (insn
))
1095 insn
= NEXT_INSN (insn
);
1096 insn
= duplicate_insn_chain (bb
->il
.rtl
->header
, insn
);
1098 new_bb
->il
.rtl
->header
= unlink_insn_chain (insn
, get_last_insn ());
1101 if (bb
->il
.rtl
->footer
)
1103 insn
= bb
->il
.rtl
->footer
;
1104 while (NEXT_INSN (insn
))
1105 insn
= NEXT_INSN (insn
);
1106 insn
= duplicate_insn_chain (bb
->il
.rtl
->footer
, insn
);
1108 new_bb
->il
.rtl
->footer
= unlink_insn_chain (insn
, get_last_insn ());
1111 if (bb
->il
.rtl
->global_live_at_start
)
1113 new_bb
->il
.rtl
->global_live_at_start
= ALLOC_REG_SET (®_obstack
);
1114 new_bb
->il
.rtl
->global_live_at_end
= ALLOC_REG_SET (®_obstack
);
1115 COPY_REG_SET (new_bb
->il
.rtl
->global_live_at_start
,
1116 bb
->il
.rtl
->global_live_at_start
);
1117 COPY_REG_SET (new_bb
->il
.rtl
->global_live_at_end
,
1118 bb
->il
.rtl
->global_live_at_end
);
1124 /* Main entry point to this module - initialize the datastructures for
1125 CFG layout changes. It keeps LOOPS up-to-date if not null.
1127 FLAGS is a set of additional flags to pass to cleanup_cfg(). It should
1128 include CLEANUP_UPDATE_LIFE if liveness information must be kept up
1132 cfg_layout_initialize (unsigned int flags
)
1134 initialize_original_copy_tables ();
1136 cfg_layout_rtl_register_cfg_hooks ();
1138 record_effective_endpoints ();
1140 cleanup_cfg (CLEANUP_CFGLAYOUT
| flags
);
1143 /* Splits superblocks. */
1145 break_superblocks (void)
1147 sbitmap superblocks
;
1151 superblocks
= sbitmap_alloc (last_basic_block
);
1152 sbitmap_zero (superblocks
);
1155 if (bb
->flags
& BB_SUPERBLOCK
)
1157 bb
->flags
&= ~BB_SUPERBLOCK
;
1158 SET_BIT (superblocks
, bb
->index
);
1164 rebuild_jump_labels (get_insns ());
1165 find_many_sub_basic_blocks (superblocks
);
1171 /* Finalize the changes: reorder insn list according to the sequence specified
1172 by aux pointers, enter compensation code, rebuild scope forest. */
1175 cfg_layout_finalize (void)
1177 #ifdef ENABLE_CHECKING
1178 verify_flow_info ();
1180 rtl_register_cfg_hooks ();
1181 if (reload_completed
1182 #ifdef HAVE_epilogue
1186 fixup_fallthru_exit_predecessor ();
1187 fixup_reorder_chain ();
1189 rebuild_jump_labels (get_insns ());
1190 delete_dead_jumptables ();
1192 #ifdef ENABLE_CHECKING
1193 verify_insn_chain ();
1194 verify_flow_info ();
1198 /* Checks whether all N blocks in BBS array can be copied. */
1200 can_copy_bbs_p (basic_block
*bbs
, unsigned n
)
1206 for (i
= 0; i
< n
; i
++)
1207 bbs
[i
]->flags
|= BB_DUPLICATED
;
1209 for (i
= 0; i
< n
; i
++)
1211 /* In case we should redirect abnormal edge during duplication, fail. */
1213 FOR_EACH_EDGE (e
, ei
, bbs
[i
]->succs
)
1214 if ((e
->flags
& EDGE_ABNORMAL
)
1215 && (e
->dest
->flags
& BB_DUPLICATED
))
1221 if (!can_duplicate_block_p (bbs
[i
]))
1229 for (i
= 0; i
< n
; i
++)
1230 bbs
[i
]->flags
&= ~BB_DUPLICATED
;
1235 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1236 are placed into array NEW_BBS in the same order. Edges from basic blocks
1237 in BBS are also duplicated and copies of those of them
1238 that lead into BBS are redirected to appropriate newly created block. The
1239 function assigns bbs into loops (copy of basic block bb is assigned to
1240 bb->loop_father->copy loop, so this must be set up correctly in advance)
1241 and updates dominators locally (LOOPS structure that contains the information
1242 about dominators is passed to enable this).
1244 BASE is the superloop to that basic block belongs; if its header or latch
1245 is copied, we do not set the new blocks as header or latch.
1247 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1248 also in the same order.
1250 Newly created basic blocks are put after the basic block AFTER in the
1251 instruction stream, and the order of the blocks in BBS array is preserved. */
1254 copy_bbs (basic_block
*bbs
, unsigned n
, basic_block
*new_bbs
,
1255 edge
*edges
, unsigned num_edges
, edge
*new_edges
,
1256 struct loop
*base
, basic_block after
)
1259 basic_block bb
, new_bb
, dom_bb
;
1262 /* Duplicate bbs, update dominators, assign bbs to loops. */
1263 for (i
= 0; i
< n
; i
++)
1267 new_bb
= new_bbs
[i
] = duplicate_block (bb
, NULL
, after
);
1269 bb
->flags
|= BB_DUPLICATED
;
1270 /* Possibly set loop header. */
1271 if (bb
->loop_father
->header
== bb
&& bb
->loop_father
!= base
)
1272 new_bb
->loop_father
->header
= new_bb
;
1274 if (bb
->loop_father
->latch
== bb
&& bb
->loop_father
!= base
)
1275 new_bb
->loop_father
->latch
= new_bb
;
1278 /* Set dominators. */
1279 for (i
= 0; i
< n
; i
++)
1282 new_bb
= new_bbs
[i
];
1284 dom_bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
1285 if (dom_bb
->flags
& BB_DUPLICATED
)
1287 dom_bb
= get_bb_copy (dom_bb
);
1288 set_immediate_dominator (CDI_DOMINATORS
, new_bb
, dom_bb
);
1292 /* Redirect edges. */
1293 for (j
= 0; j
< num_edges
; j
++)
1294 new_edges
[j
] = NULL
;
1295 for (i
= 0; i
< n
; i
++)
1298 new_bb
= new_bbs
[i
];
1301 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
1303 for (j
= 0; j
< num_edges
; j
++)
1304 if (edges
[j
] && edges
[j
]->src
== bb
&& edges
[j
]->dest
== e
->dest
)
1307 if (!(e
->dest
->flags
& BB_DUPLICATED
))
1309 redirect_edge_and_branch_force (e
, get_bb_copy (e
->dest
));
1313 /* Clear information about duplicates. */
1314 for (i
= 0; i
< n
; i
++)
1315 bbs
[i
]->flags
&= ~BB_DUPLICATED
;
1318 #include "gt-cfglayout.h"