1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
33 #include "cfglayout.h"
37 #include "alloc-pool.h"
39 #include "tree-pass.h"
43 /* Holds the interesting trailing notes for the function. */
44 rtx cfg_layout_function_footer
;
45 rtx cfg_layout_function_header
;
47 static rtx
skip_insns_after_block (basic_block
);
48 static void record_effective_endpoints (void);
49 static rtx
label_for_bb (basic_block
);
50 static void fixup_reorder_chain (void);
52 static void change_scope (rtx
, tree
, tree
);
54 void verify_insn_chain (void);
55 static void fixup_fallthru_exit_predecessor (void);
56 static tree
insn_scope (const_rtx
);
59 unlink_insn_chain (rtx first
, rtx last
)
61 rtx prevfirst
= PREV_INSN (first
);
62 rtx nextlast
= NEXT_INSN (last
);
64 PREV_INSN (first
) = NULL
;
65 NEXT_INSN (last
) = NULL
;
67 NEXT_INSN (prevfirst
) = nextlast
;
69 PREV_INSN (nextlast
) = prevfirst
;
71 set_last_insn (prevfirst
);
73 set_first_insn (nextlast
);
77 /* Skip over inter-block insns occurring after BB which are typically
78 associated with BB (e.g., barriers). If there are any such insns,
79 we return the last one. Otherwise, we return the end of BB. */
82 skip_insns_after_block (basic_block bb
)
84 rtx insn
, last_insn
, next_head
, prev
;
87 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
88 next_head
= BB_HEAD (bb
->next_bb
);
90 for (last_insn
= insn
= BB_END (bb
); (insn
= NEXT_INSN (insn
)) != 0; )
92 if (insn
== next_head
)
95 switch (GET_CODE (insn
))
102 switch (NOTE_KIND (insn
))
104 case NOTE_INSN_BLOCK_END
:
115 && JUMP_P (NEXT_INSN (insn
))
116 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
117 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
119 insn
= NEXT_INSN (insn
);
132 /* It is possible to hit contradictory sequence. For instance:
138 Where barrier belongs to jump_insn, but the note does not. This can be
139 created by removing the basic block originally following
140 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
142 for (insn
= last_insn
; insn
!= BB_END (bb
); insn
= prev
)
144 prev
= PREV_INSN (insn
);
146 switch (NOTE_KIND (insn
))
148 case NOTE_INSN_BLOCK_END
:
151 case NOTE_INSN_DELETED
:
152 case NOTE_INSN_DELETED_LABEL
:
155 reorder_insns (insn
, insn
, last_insn
);
162 /* Locate or create a label for a given basic block. */
165 label_for_bb (basic_block bb
)
167 rtx label
= BB_HEAD (bb
);
169 if (!LABEL_P (label
))
172 fprintf (dump_file
, "Emitting label for block %d\n", bb
->index
);
174 label
= block_label (bb
);
180 /* Locate the effective beginning and end of the insn chain for each
181 block, as defined by skip_insns_after_block above. */
184 record_effective_endpoints (void)
190 for (insn
= get_insns ();
193 && NOTE_KIND (insn
) != NOTE_INSN_BASIC_BLOCK
;
194 insn
= NEXT_INSN (insn
))
196 /* No basic blocks at all? */
199 if (PREV_INSN (insn
))
200 cfg_layout_function_header
=
201 unlink_insn_chain (get_insns (), PREV_INSN (insn
));
203 cfg_layout_function_header
= NULL_RTX
;
205 next_insn
= get_insns ();
210 if (PREV_INSN (BB_HEAD (bb
)) && next_insn
!= BB_HEAD (bb
))
211 bb
->il
.rtl
->header
= unlink_insn_chain (next_insn
,
212 PREV_INSN (BB_HEAD (bb
)));
213 end
= skip_insns_after_block (bb
);
214 if (NEXT_INSN (BB_END (bb
)) && BB_END (bb
) != end
)
215 bb
->il
.rtl
->footer
= unlink_insn_chain (NEXT_INSN (BB_END (bb
)), end
);
216 next_insn
= NEXT_INSN (BB_END (bb
));
219 cfg_layout_function_footer
= next_insn
;
220 if (cfg_layout_function_footer
)
221 cfg_layout_function_footer
= unlink_insn_chain (cfg_layout_function_footer
, get_last_insn ());
224 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
225 numbers and files. In order to be GGC friendly we need to use separate
226 varrays. This also slightly improve the memory locality in binary search.
227 The _locs array contains locators where the given property change. The
228 block_locators_blocks contains the scope block that is used for all insn
229 locator greater than corresponding block_locators_locs value and smaller
230 than the following one. Similarly for the other properties. */
231 static VEC(int,heap
) *block_locators_locs
;
232 static GTY(()) VEC(tree
,gc
) *block_locators_blocks
;
233 static VEC(int,heap
) *locations_locators_locs
;
234 DEF_VEC_O(location_t
);
235 DEF_VEC_ALLOC_O(location_t
,heap
);
236 static VEC(location_t
,heap
) *locations_locators_vals
;
237 int prologue_locator
;
238 int epilogue_locator
;
240 /* Hold current location information and last location information, so the
241 datastructures are built lazily only when some instructions in given
243 location_t curr_location
, last_location
;
244 static tree curr_block
, last_block
;
245 static int curr_rtl_loc
= -1;
247 /* Allocate insn locator datastructure. */
249 insn_locators_alloc (void)
251 prologue_locator
= epilogue_locator
= 0;
253 block_locators_locs
= VEC_alloc (int, heap
, 32);
254 block_locators_blocks
= VEC_alloc (tree
, gc
, 32);
255 locations_locators_locs
= VEC_alloc (int, heap
, 32);
256 locations_locators_vals
= VEC_alloc (location_t
, heap
, 32);
258 #ifdef USE_MAPPED_LOCATION
262 last_location
.line
= -1;
263 curr_location
.line
= -1;
270 /* At the end of emit stage, clear current location. */
272 insn_locators_finalize (void)
274 if (curr_rtl_loc
>= 0)
275 epilogue_locator
= curr_insn_locator ();
279 /* Set current location. */
281 set_curr_insn_source_location (location_t location
)
283 /* IV opts calls into RTL expansion to compute costs of operations. At this
284 time locators are not initialized. */
285 if (curr_rtl_loc
== -1)
287 #ifdef USE_MAPPED_LOCATION
288 if (location
== last_location
)
291 if (location
.file
&& last_location
.file
292 && !strcmp (location
.file
, last_location
.file
)
293 && location
.line
== last_location
.line
)
296 curr_location
= location
;
299 /* Set current scope block. */
301 set_curr_insn_block (tree b
)
303 /* IV opts calls into RTL expansion to compute costs of operations. At this
304 time locators are not initialized. */
305 if (curr_rtl_loc
== -1)
311 /* Return current insn locator. */
313 curr_insn_locator (void)
315 if (curr_rtl_loc
== -1)
317 if (last_block
!= curr_block
)
320 VEC_safe_push (int, heap
, block_locators_locs
, curr_rtl_loc
);
321 VEC_safe_push (tree
, gc
, block_locators_blocks
, curr_block
);
322 last_block
= curr_block
;
324 #ifdef USE_MAPPED_LOCATION
325 if (last_location
!= curr_location
)
327 if (last_location
.file
!= curr_location
.file
328 || last_location
.line
!= curr_location
.line
)
332 VEC_safe_push (int, heap
, locations_locators_locs
, curr_rtl_loc
);
333 VEC_safe_push (location_t
, heap
, locations_locators_vals
, &curr_location
);
334 last_location
= curr_location
;
340 into_cfg_layout_mode (void)
342 cfg_layout_initialize (0);
347 outof_cfg_layout_mode (void)
352 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
353 bb
->aux
= bb
->next_bb
;
355 cfg_layout_finalize ();
360 struct tree_opt_pass pass_into_cfg_layout_mode
=
362 "into_cfglayout", /* name */
364 into_cfg_layout_mode
, /* execute */
367 0, /* static_pass_number */
369 0, /* properties_required */
370 0, /* properties_provided */
371 0, /* properties_destroyed */
372 0, /* todo_flags_start */
373 TODO_dump_func
, /* todo_flags_finish */
377 struct tree_opt_pass pass_outof_cfg_layout_mode
=
379 "outof_cfglayout", /* name */
381 outof_cfg_layout_mode
, /* execute */
384 0, /* static_pass_number */
386 0, /* properties_required */
387 0, /* properties_provided */
388 0, /* properties_destroyed */
389 0, /* todo_flags_start */
390 TODO_dump_func
, /* todo_flags_finish */
394 /* Return sope resulting from combination of S1 and S2. */
396 choose_inner_scope (tree s1
, tree s2
)
402 if (BLOCK_NUMBER (s1
) > BLOCK_NUMBER (s2
))
407 /* Emit lexical block notes needed to change scope from S1 to S2. */
410 change_scope (rtx orig_insn
, tree s1
, tree s2
)
412 rtx insn
= orig_insn
;
413 tree com
= NULL_TREE
;
414 tree ts1
= s1
, ts2
= s2
;
419 gcc_assert (ts1
&& ts2
);
420 if (BLOCK_NUMBER (ts1
) > BLOCK_NUMBER (ts2
))
421 ts1
= BLOCK_SUPERCONTEXT (ts1
);
422 else if (BLOCK_NUMBER (ts1
) < BLOCK_NUMBER (ts2
))
423 ts2
= BLOCK_SUPERCONTEXT (ts2
);
426 ts1
= BLOCK_SUPERCONTEXT (ts1
);
427 ts2
= BLOCK_SUPERCONTEXT (ts2
);
436 rtx note
= emit_note_before (NOTE_INSN_BLOCK_END
, insn
);
437 NOTE_BLOCK (note
) = s
;
438 s
= BLOCK_SUPERCONTEXT (s
);
445 insn
= emit_note_before (NOTE_INSN_BLOCK_BEG
, insn
);
446 NOTE_BLOCK (insn
) = s
;
447 s
= BLOCK_SUPERCONTEXT (s
);
451 /* Return lexical scope block insn belong to. */
453 insn_scope (const_rtx insn
)
455 int max
= VEC_length (int, block_locators_locs
);
457 int loc
= INSN_LOCATOR (insn
);
459 /* When block_locators_locs was initialized, the pro- and epilogue
460 insns didn't exist yet and can therefore not be found this way.
461 But we know that they belong to the outer most block of the
463 Without this test, the prologue would be put inside the block of
464 the first valid instruction in the function and when that first
465 insn is part of an inlined function then the low_pc of that
466 inlined function is messed up. Likewise for the epilogue and
467 the last valid instruction. */
468 if (loc
== prologue_locator
|| loc
== epilogue_locator
)
469 return DECL_INITIAL (cfun
->decl
);
475 int pos
= (min
+ max
) / 2;
476 int tmp
= VEC_index (int, block_locators_locs
, pos
);
478 if (tmp
<= loc
&& min
!= pos
)
480 else if (tmp
> loc
&& max
!= pos
)
488 return VEC_index (tree
, block_locators_blocks
, min
);
491 /* Return line number of the statement specified by the locator. */
493 locator_location (int loc
)
495 int max
= VEC_length (int, locations_locators_locs
);
500 int pos
= (min
+ max
) / 2;
501 int tmp
= VEC_index (int, locations_locators_locs
, pos
);
503 if (tmp
<= loc
&& min
!= pos
)
505 else if (tmp
> loc
&& max
!= pos
)
513 return *VEC_index (location_t
, locations_locators_vals
, min
);
516 /* Return source line of the statement that produced this insn. */
518 locator_line (int loc
)
520 expanded_location xloc
;
524 xloc
= expand_location (locator_location (loc
));
528 /* Return line number of the statement that produced this insn. */
530 insn_line (const_rtx insn
)
532 return locator_line (INSN_LOCATOR (insn
));
535 /* Return source file of the statement specified by LOC. */
537 locator_file (int loc
)
539 expanded_location xloc
;
543 xloc
= expand_location (locator_location (loc
));
547 /* Return source file of the statement that produced this insn. */
549 insn_file (const_rtx insn
)
551 return locator_file (INSN_LOCATOR (insn
));
554 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
555 on the scope tree and the newly reordered instructions. */
558 reemit_insn_block_notes (void)
560 tree cur_block
= DECL_INITIAL (cfun
->decl
);
564 if (!active_insn_p (insn
))
565 insn
= next_active_insn (insn
);
566 for (; insn
; insn
= next_active_insn (insn
))
570 /* Avoid putting scope notes between jump table and its label. */
572 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
573 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
576 this_block
= insn_scope (insn
);
577 /* For sequences compute scope resulting from merging all scopes
578 of instructions nested inside. */
579 if (GET_CODE (PATTERN (insn
)) == SEQUENCE
)
582 rtx body
= PATTERN (insn
);
585 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
586 this_block
= choose_inner_scope (this_block
,
587 insn_scope (XVECEXP (body
, 0, i
)));
592 if (this_block
!= cur_block
)
594 change_scope (insn
, cur_block
, this_block
);
595 cur_block
= this_block
;
599 /* change_scope emits before the insn, not after. */
600 note
= emit_note (NOTE_INSN_DELETED
);
601 change_scope (note
, cur_block
, DECL_INITIAL (cfun
->decl
));
608 /* Link the basic blocks in the correct order, compacting the basic
609 block queue while at it. This also clears the visited flag on
610 all basic blocks. If STAY_IN_CFGLAYOUT_MODE is false, this function
611 also clears the basic block header and footer fields.
613 This function is usually called after a pass (e.g. tracer) finishes
614 some transformations while in cfglayout mode. The required sequence
615 of the basic blocks is in a linked list along the bb->aux field.
616 This functions re-links the basic block prev_bb and next_bb pointers
617 accordingly, and it compacts and renumbers the blocks. */
620 relink_block_chain (bool stay_in_cfglayout_mode
)
622 basic_block bb
, prev_bb
;
625 /* Maybe dump the re-ordered sequence. */
628 fprintf (dump_file
, "Reordered sequence:\n");
629 for (bb
= ENTRY_BLOCK_PTR
->next_bb
, index
= NUM_FIXED_BLOCKS
;
631 bb
= (basic_block
) bb
->aux
, index
++)
633 fprintf (dump_file
, " %i ", index
);
634 if (get_bb_original (bb
))
635 fprintf (dump_file
, "duplicate of %i ",
636 get_bb_original (bb
)->index
);
637 else if (forwarder_block_p (bb
)
638 && !LABEL_P (BB_HEAD (bb
)))
639 fprintf (dump_file
, "compensation ");
641 fprintf (dump_file
, "bb %i ", bb
->index
);
642 fprintf (dump_file
, " [%i]\n", bb
->frequency
);
646 /* Now reorder the blocks. */
647 prev_bb
= ENTRY_BLOCK_PTR
;
648 bb
= ENTRY_BLOCK_PTR
->next_bb
;
649 for (; bb
; prev_bb
= bb
, bb
= (basic_block
) bb
->aux
)
651 bb
->prev_bb
= prev_bb
;
652 prev_bb
->next_bb
= bb
;
654 prev_bb
->next_bb
= EXIT_BLOCK_PTR
;
655 EXIT_BLOCK_PTR
->prev_bb
= prev_bb
;
657 /* Then, clean up the aux and visited fields. */
661 bb
->il
.rtl
->visited
= 0;
662 if (!stay_in_cfglayout_mode
)
663 bb
->il
.rtl
->header
= bb
->il
.rtl
->footer
= NULL
;
666 /* Maybe reset the original copy tables, they are not valid anymore
667 when we renumber the basic blocks in compact_blocks. If we are
668 are going out of cfglayout mode, don't re-allocate the tables. */
669 free_original_copy_tables ();
670 if (stay_in_cfglayout_mode
)
671 initialize_original_copy_tables ();
673 /* Finally, put basic_block_info in the new order. */
678 /* Given a reorder chain, rearrange the code to match. */
681 fixup_reorder_chain (void)
686 if (cfg_layout_function_header
)
688 set_first_insn (cfg_layout_function_header
);
689 insn
= cfg_layout_function_header
;
690 while (NEXT_INSN (insn
))
691 insn
= NEXT_INSN (insn
);
694 /* First do the bulk reordering -- rechain the blocks without regard to
695 the needed changes to jumps and labels. */
697 for (bb
= ENTRY_BLOCK_PTR
->next_bb
; bb
; bb
= (basic_block
) bb
->aux
)
699 if (bb
->il
.rtl
->header
)
702 NEXT_INSN (insn
) = bb
->il
.rtl
->header
;
704 set_first_insn (bb
->il
.rtl
->header
);
705 PREV_INSN (bb
->il
.rtl
->header
) = insn
;
706 insn
= bb
->il
.rtl
->header
;
707 while (NEXT_INSN (insn
))
708 insn
= NEXT_INSN (insn
);
711 NEXT_INSN (insn
) = BB_HEAD (bb
);
713 set_first_insn (BB_HEAD (bb
));
714 PREV_INSN (BB_HEAD (bb
)) = insn
;
716 if (bb
->il
.rtl
->footer
)
718 NEXT_INSN (insn
) = bb
->il
.rtl
->footer
;
719 PREV_INSN (bb
->il
.rtl
->footer
) = insn
;
720 while (NEXT_INSN (insn
))
721 insn
= NEXT_INSN (insn
);
725 NEXT_INSN (insn
) = cfg_layout_function_footer
;
726 if (cfg_layout_function_footer
)
727 PREV_INSN (cfg_layout_function_footer
) = insn
;
729 while (NEXT_INSN (insn
))
730 insn
= NEXT_INSN (insn
);
732 set_last_insn (insn
);
733 #ifdef ENABLE_CHECKING
734 verify_insn_chain ();
737 /* Now add jumps and labels as needed to match the blocks new
740 for (bb
= ENTRY_BLOCK_PTR
->next_bb
; bb
; bb
= (basic_block
) bb
->aux
)
742 edge e_fall
, e_taken
, e
;
747 if (EDGE_COUNT (bb
->succs
) == 0)
750 /* Find the old fallthru edge, and another non-EH edge for
752 e_taken
= e_fall
= NULL
;
754 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
755 if (e
->flags
& EDGE_FALLTHRU
)
757 else if (! (e
->flags
& EDGE_EH
))
760 bb_end_insn
= BB_END (bb
);
761 if (JUMP_P (bb_end_insn
))
763 if (any_condjump_p (bb_end_insn
))
765 /* If the old fallthru is still next, nothing to do. */
766 if (bb
->aux
== e_fall
->dest
767 || e_fall
->dest
== EXIT_BLOCK_PTR
)
770 /* The degenerated case of conditional jump jumping to the next
771 instruction can happen for jumps with side effects. We need
772 to construct a forwarder block and this will be done just
773 fine by force_nonfallthru below. */
777 /* There is another special case: if *neither* block is next,
778 such as happens at the very end of a function, then we'll
779 need to add a new unconditional jump. Choose the taken
780 edge based on known or assumed probability. */
781 else if (bb
->aux
!= e_taken
->dest
)
783 rtx note
= find_reg_note (bb_end_insn
, REG_BR_PROB
, 0);
786 && INTVAL (XEXP (note
, 0)) < REG_BR_PROB_BASE
/ 2
787 && invert_jump (bb_end_insn
,
788 (e_fall
->dest
== EXIT_BLOCK_PTR
790 : label_for_bb (e_fall
->dest
)), 0))
792 e_fall
->flags
&= ~EDGE_FALLTHRU
;
793 #ifdef ENABLE_CHECKING
794 gcc_assert (could_fall_through
795 (e_taken
->src
, e_taken
->dest
));
797 e_taken
->flags
|= EDGE_FALLTHRU
;
798 update_br_prob_note (bb
);
799 e
= e_fall
, e_fall
= e_taken
, e_taken
= e
;
803 /* If the "jumping" edge is a crossing edge, and the fall
804 through edge is non-crossing, leave things as they are. */
805 else if ((e_taken
->flags
& EDGE_CROSSING
)
806 && !(e_fall
->flags
& EDGE_CROSSING
))
809 /* Otherwise we can try to invert the jump. This will
810 basically never fail, however, keep up the pretense. */
811 else if (invert_jump (bb_end_insn
,
812 (e_fall
->dest
== EXIT_BLOCK_PTR
814 : label_for_bb (e_fall
->dest
)), 0))
816 e_fall
->flags
&= ~EDGE_FALLTHRU
;
817 #ifdef ENABLE_CHECKING
818 gcc_assert (could_fall_through
819 (e_taken
->src
, e_taken
->dest
));
821 e_taken
->flags
|= EDGE_FALLTHRU
;
822 update_br_prob_note (bb
);
828 /* Otherwise we have some return, switch or computed
829 jump. In the 99% case, there should not have been a
831 gcc_assert (returnjump_p (bb_end_insn
) || !e_fall
);
837 /* No fallthru implies a noreturn function with EH edges, or
838 something similarly bizarre. In any case, we don't need to
843 /* If the fallthru block is still next, nothing to do. */
844 if (bb
->aux
== e_fall
->dest
)
847 /* A fallthru to exit block. */
848 if (e_fall
->dest
== EXIT_BLOCK_PTR
)
852 /* We got here if we need to add a new jump insn. */
853 nb
= force_nonfallthru (e_fall
);
856 nb
->il
.rtl
->visited
= 1;
859 /* Don't process this new block. */
862 /* Make sure new bb is tagged for correct section (same as
863 fall-thru source, since you cannot fall-throu across
864 section boundaries). */
865 BB_COPY_PARTITION (e_fall
->src
, single_pred (bb
));
866 if (flag_reorder_blocks_and_partition
867 && targetm
.have_named_sections
868 && JUMP_P (BB_END (bb
))
869 && !any_condjump_p (BB_END (bb
))
870 && (EDGE_SUCC (bb
, 0)->flags
& EDGE_CROSSING
))
871 REG_NOTES (BB_END (bb
)) = gen_rtx_EXPR_LIST
872 (REG_CROSSING_JUMP
, NULL_RTX
, REG_NOTES (BB_END (bb
)));
876 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
878 /* Annoying special case - jump around dead jumptables left in the code. */
884 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
885 if (e
->flags
& EDGE_FALLTHRU
)
888 if (e
&& !can_fallthru (e
->src
, e
->dest
))
889 force_nonfallthru (e
);
893 /* Perform sanity checks on the insn chain.
894 1. Check that next/prev pointers are consistent in both the forward and
896 2. Count insns in chain, going both directions, and check if equal.
897 3. Check that get_last_insn () returns the actual end of chain. */
900 verify_insn_chain (void)
903 int insn_cnt1
, insn_cnt2
;
905 for (prevx
= NULL
, insn_cnt1
= 1, x
= get_insns ();
907 prevx
= x
, insn_cnt1
++, x
= NEXT_INSN (x
))
908 gcc_assert (PREV_INSN (x
) == prevx
);
910 gcc_assert (prevx
== get_last_insn ());
912 for (nextx
= NULL
, insn_cnt2
= 1, x
= get_last_insn ();
914 nextx
= x
, insn_cnt2
++, x
= PREV_INSN (x
))
915 gcc_assert (NEXT_INSN (x
) == nextx
);
917 gcc_assert (insn_cnt1
== insn_cnt2
);
920 /* If we have assembler epilogues, the block falling through to exit must
921 be the last one in the reordered chain when we reach final. Ensure
922 that this condition is met. */
924 fixup_fallthru_exit_predecessor (void)
928 basic_block bb
= NULL
;
930 /* This transformation is not valid before reload, because we might
931 separate a call from the instruction that copies the return
933 gcc_assert (reload_completed
);
935 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
936 if (e
->flags
& EDGE_FALLTHRU
)
941 basic_block c
= ENTRY_BLOCK_PTR
->next_bb
;
943 /* If the very first block is the one with the fall-through exit
944 edge, we have to split that block. */
947 bb
= split_block (bb
, NULL
)->dest
;
950 bb
->il
.rtl
->footer
= c
->il
.rtl
->footer
;
951 c
->il
.rtl
->footer
= NULL
;
955 c
= (basic_block
) c
->aux
;
959 c
= (basic_block
) c
->aux
;
966 /* In case there are more than one fallthru predecessors of exit, force that
967 there is only one. */
970 force_one_exit_fallthru (void)
972 edge e
, predecessor
= NULL
;
975 basic_block forwarder
, bb
;
977 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
978 if (e
->flags
& EDGE_FALLTHRU
)
980 if (predecessor
== NULL
)
992 /* Exit has several fallthru predecessors. Create a forwarder block for
994 forwarder
= split_edge (predecessor
);
995 for (ei
= ei_start (EXIT_BLOCK_PTR
->preds
); (e
= ei_safe_edge (ei
)); )
997 if (e
->src
== forwarder
998 || !(e
->flags
& EDGE_FALLTHRU
))
1001 redirect_edge_and_branch_force (e
, forwarder
);
1004 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
1008 if (bb
->aux
== NULL
&& bb
!= forwarder
)
1010 bb
->aux
= forwarder
;
1016 /* Return true in case it is possible to duplicate the basic block BB. */
1018 /* We do not want to declare the function in a header file, since it should
1019 only be used through the cfghooks interface, and we do not want to move
1020 it to cfgrtl.c since it would require also moving quite a lot of related
1022 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block
);
1025 cfg_layout_can_duplicate_bb_p (const_basic_block bb
)
1027 /* Do not attempt to duplicate tablejumps, as we need to unshare
1028 the dispatch table. This is difficult to do, as the instructions
1029 computing jump destination may be hoisted outside the basic block. */
1030 if (tablejump_p (BB_END (bb
), NULL
, NULL
))
1033 /* Do not duplicate blocks containing insns that can't be copied. */
1034 if (targetm
.cannot_copy_insn_p
)
1036 rtx insn
= BB_HEAD (bb
);
1039 if (INSN_P (insn
) && targetm
.cannot_copy_insn_p (insn
))
1041 if (insn
== BB_END (bb
))
1043 insn
= NEXT_INSN (insn
);
1051 duplicate_insn_chain (rtx from
, rtx to
)
1055 /* Avoid updating of boundaries of previous basic block. The
1056 note will get removed from insn stream in fixup. */
1057 last
= emit_note (NOTE_INSN_DELETED
);
1059 /* Create copy at the end of INSN chain. The chain will
1060 be reordered later. */
1061 for (insn
= from
; insn
!= NEXT_INSN (to
); insn
= NEXT_INSN (insn
))
1063 switch (GET_CODE (insn
))
1068 /* Avoid copying of dispatch tables. We never duplicate
1069 tablejumps, so this can hit only in case the table got
1070 moved far from original jump. */
1071 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
1072 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
1074 emit_copy_of_insn_after (insn
, get_last_insn ());
1085 switch (NOTE_KIND (insn
))
1087 /* In case prologue is empty and function contain label
1088 in first BB, we may want to copy the block. */
1089 case NOTE_INSN_PROLOGUE_END
:
1091 case NOTE_INSN_DELETED
:
1092 case NOTE_INSN_DELETED_LABEL
:
1093 /* No problem to strip these. */
1094 case NOTE_INSN_EPILOGUE_BEG
:
1095 /* Debug code expect these notes to exist just once.
1096 Keep them in the master copy.
1097 ??? It probably makes more sense to duplicate them for each
1099 case NOTE_INSN_FUNCTION_BEG
:
1100 /* There is always just single entry to function. */
1101 case NOTE_INSN_BASIC_BLOCK
:
1104 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
1105 emit_note_copy (insn
);
1109 /* All other notes should have already been eliminated.
1118 insn
= NEXT_INSN (last
);
1122 /* Create a duplicate of the basic block BB. */
1124 /* We do not want to declare the function in a header file, since it should
1125 only be used through the cfghooks interface, and we do not want to move
1126 it to cfgrtl.c since it would require also moving quite a lot of related
1128 extern basic_block
cfg_layout_duplicate_bb (basic_block
);
1131 cfg_layout_duplicate_bb (basic_block bb
)
1136 insn
= duplicate_insn_chain (BB_HEAD (bb
), BB_END (bb
));
1137 new_bb
= create_basic_block (insn
,
1138 insn
? get_last_insn () : NULL
,
1139 EXIT_BLOCK_PTR
->prev_bb
);
1141 BB_COPY_PARTITION (new_bb
, bb
);
1142 if (bb
->il
.rtl
->header
)
1144 insn
= bb
->il
.rtl
->header
;
1145 while (NEXT_INSN (insn
))
1146 insn
= NEXT_INSN (insn
);
1147 insn
= duplicate_insn_chain (bb
->il
.rtl
->header
, insn
);
1149 new_bb
->il
.rtl
->header
= unlink_insn_chain (insn
, get_last_insn ());
1152 if (bb
->il
.rtl
->footer
)
1154 insn
= bb
->il
.rtl
->footer
;
1155 while (NEXT_INSN (insn
))
1156 insn
= NEXT_INSN (insn
);
1157 insn
= duplicate_insn_chain (bb
->il
.rtl
->footer
, insn
);
1159 new_bb
->il
.rtl
->footer
= unlink_insn_chain (insn
, get_last_insn ());
1166 /* Main entry point to this module - initialize the datastructures for
1167 CFG layout changes. It keeps LOOPS up-to-date if not null.
1169 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
1172 cfg_layout_initialize (unsigned int flags
)
1177 initialize_original_copy_tables ();
1179 cfg_layout_rtl_register_cfg_hooks ();
1181 record_effective_endpoints ();
1183 /* Make sure that the targets of non local gotos are marked. */
1184 for (x
= nonlocal_goto_handler_labels
; x
; x
= XEXP (x
, 1))
1186 bb
= BLOCK_FOR_INSN (XEXP (x
, 0));
1187 bb
->flags
|= BB_NON_LOCAL_GOTO_TARGET
;
1190 cleanup_cfg (CLEANUP_CFGLAYOUT
| flags
);
1193 /* Splits superblocks. */
1195 break_superblocks (void)
1197 sbitmap superblocks
;
1201 superblocks
= sbitmap_alloc (last_basic_block
);
1202 sbitmap_zero (superblocks
);
1205 if (bb
->flags
& BB_SUPERBLOCK
)
1207 bb
->flags
&= ~BB_SUPERBLOCK
;
1208 SET_BIT (superblocks
, bb
->index
);
1214 rebuild_jump_labels (get_insns ());
1215 find_many_sub_basic_blocks (superblocks
);
1221 /* Finalize the changes: reorder insn list according to the sequence specified
1222 by aux pointers, enter compensation code, rebuild scope forest. */
1225 cfg_layout_finalize (void)
1227 #ifdef ENABLE_CHECKING
1228 verify_flow_info ();
1230 force_one_exit_fallthru ();
1231 rtl_register_cfg_hooks ();
1232 if (reload_completed
1233 #ifdef HAVE_epilogue
1237 fixup_fallthru_exit_predecessor ();
1238 fixup_reorder_chain ();
1240 rebuild_jump_labels (get_insns ());
1241 delete_dead_jumptables ();
1243 #ifdef ENABLE_CHECKING
1244 verify_insn_chain ();
1245 verify_flow_info ();
1249 /* Checks whether all N blocks in BBS array can be copied. */
1251 can_copy_bbs_p (basic_block
*bbs
, unsigned n
)
1257 for (i
= 0; i
< n
; i
++)
1258 bbs
[i
]->flags
|= BB_DUPLICATED
;
1260 for (i
= 0; i
< n
; i
++)
1262 /* In case we should redirect abnormal edge during duplication, fail. */
1264 FOR_EACH_EDGE (e
, ei
, bbs
[i
]->succs
)
1265 if ((e
->flags
& EDGE_ABNORMAL
)
1266 && (e
->dest
->flags
& BB_DUPLICATED
))
1272 if (!can_duplicate_block_p (bbs
[i
]))
1280 for (i
= 0; i
< n
; i
++)
1281 bbs
[i
]->flags
&= ~BB_DUPLICATED
;
1286 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1287 are placed into array NEW_BBS in the same order. Edges from basic blocks
1288 in BBS are also duplicated and copies of those of them
1289 that lead into BBS are redirected to appropriate newly created block. The
1290 function assigns bbs into loops (copy of basic block bb is assigned to
1291 bb->loop_father->copy loop, so this must be set up correctly in advance)
1292 and updates dominators locally (LOOPS structure that contains the information
1293 about dominators is passed to enable this).
1295 BASE is the superloop to that basic block belongs; if its header or latch
1296 is copied, we do not set the new blocks as header or latch.
1298 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1299 also in the same order.
1301 Newly created basic blocks are put after the basic block AFTER in the
1302 instruction stream, and the order of the blocks in BBS array is preserved. */
1305 copy_bbs (basic_block
*bbs
, unsigned n
, basic_block
*new_bbs
,
1306 edge
*edges
, unsigned num_edges
, edge
*new_edges
,
1307 struct loop
*base
, basic_block after
)
1310 basic_block bb
, new_bb
, dom_bb
;
1313 /* Duplicate bbs, update dominators, assign bbs to loops. */
1314 for (i
= 0; i
< n
; i
++)
1318 new_bb
= new_bbs
[i
] = duplicate_block (bb
, NULL
, after
);
1320 bb
->flags
|= BB_DUPLICATED
;
1321 /* Possibly set loop header. */
1322 if (bb
->loop_father
->header
== bb
&& bb
->loop_father
!= base
)
1323 new_bb
->loop_father
->header
= new_bb
;
1325 if (bb
->loop_father
->latch
== bb
&& bb
->loop_father
!= base
)
1326 new_bb
->loop_father
->latch
= new_bb
;
1329 /* Set dominators. */
1330 for (i
= 0; i
< n
; i
++)
1333 new_bb
= new_bbs
[i
];
1335 dom_bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
1336 if (dom_bb
->flags
& BB_DUPLICATED
)
1338 dom_bb
= get_bb_copy (dom_bb
);
1339 set_immediate_dominator (CDI_DOMINATORS
, new_bb
, dom_bb
);
1343 /* Redirect edges. */
1344 for (j
= 0; j
< num_edges
; j
++)
1345 new_edges
[j
] = NULL
;
1346 for (i
= 0; i
< n
; i
++)
1349 new_bb
= new_bbs
[i
];
1352 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
1354 for (j
= 0; j
< num_edges
; j
++)
1355 if (edges
[j
] && edges
[j
]->src
== bb
&& edges
[j
]->dest
== e
->dest
)
1358 if (!(e
->dest
->flags
& BB_DUPLICATED
))
1360 redirect_edge_and_branch_force (e
, get_bb_copy (e
->dest
));
1364 /* Clear information about duplicates. */
1365 for (i
= 0; i
< n
; i
++)
1366 bbs
[i
]->flags
&= ~BB_DUPLICATED
;
1369 #include "gt-cfglayout.h"