1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
33 #include "cfglayout.h"
37 #include "alloc-pool.h"
39 #include "tree-pass.h"
43 /* Holds the interesting trailing notes for the function. */
44 rtx cfg_layout_function_footer
;
45 rtx cfg_layout_function_header
;
47 static rtx
skip_insns_after_block (basic_block
);
48 static void record_effective_endpoints (void);
49 static rtx
label_for_bb (basic_block
);
50 static void fixup_reorder_chain (void);
52 static void change_scope (rtx
, tree
, tree
);
54 void verify_insn_chain (void);
55 static void fixup_fallthru_exit_predecessor (void);
56 static tree
insn_scope (const_rtx
);
59 unlink_insn_chain (rtx first
, rtx last
)
61 rtx prevfirst
= PREV_INSN (first
);
62 rtx nextlast
= NEXT_INSN (last
);
64 PREV_INSN (first
) = NULL
;
65 NEXT_INSN (last
) = NULL
;
67 NEXT_INSN (prevfirst
) = nextlast
;
69 PREV_INSN (nextlast
) = prevfirst
;
71 set_last_insn (prevfirst
);
73 set_first_insn (nextlast
);
77 /* Skip over inter-block insns occurring after BB which are typically
78 associated with BB (e.g., barriers). If there are any such insns,
79 we return the last one. Otherwise, we return the end of BB. */
82 skip_insns_after_block (basic_block bb
)
84 rtx insn
, last_insn
, next_head
, prev
;
87 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
88 next_head
= BB_HEAD (bb
->next_bb
);
90 for (last_insn
= insn
= BB_END (bb
); (insn
= NEXT_INSN (insn
)) != 0; )
92 if (insn
== next_head
)
95 switch (GET_CODE (insn
))
102 switch (NOTE_KIND (insn
))
104 case NOTE_INSN_BLOCK_END
:
115 && JUMP_TABLE_DATA_P (NEXT_INSN (insn
)))
117 insn
= NEXT_INSN (insn
);
130 /* It is possible to hit contradictory sequence. For instance:
136 Where barrier belongs to jump_insn, but the note does not. This can be
137 created by removing the basic block originally following
138 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
140 for (insn
= last_insn
; insn
!= BB_END (bb
); insn
= prev
)
142 prev
= PREV_INSN (insn
);
144 switch (NOTE_KIND (insn
))
146 case NOTE_INSN_BLOCK_END
:
149 case NOTE_INSN_DELETED
:
150 case NOTE_INSN_DELETED_LABEL
:
153 reorder_insns (insn
, insn
, last_insn
);
160 /* Locate or create a label for a given basic block. */
163 label_for_bb (basic_block bb
)
165 rtx label
= BB_HEAD (bb
);
167 if (!LABEL_P (label
))
170 fprintf (dump_file
, "Emitting label for block %d\n", bb
->index
);
172 label
= block_label (bb
);
178 /* Locate the effective beginning and end of the insn chain for each
179 block, as defined by skip_insns_after_block above. */
182 record_effective_endpoints (void)
188 for (insn
= get_insns ();
191 && NOTE_KIND (insn
) != NOTE_INSN_BASIC_BLOCK
;
192 insn
= NEXT_INSN (insn
))
194 /* No basic blocks at all? */
197 if (PREV_INSN (insn
))
198 cfg_layout_function_header
=
199 unlink_insn_chain (get_insns (), PREV_INSN (insn
));
201 cfg_layout_function_header
= NULL_RTX
;
203 next_insn
= get_insns ();
208 if (PREV_INSN (BB_HEAD (bb
)) && next_insn
!= BB_HEAD (bb
))
209 bb
->il
.rtl
->header
= unlink_insn_chain (next_insn
,
210 PREV_INSN (BB_HEAD (bb
)));
211 end
= skip_insns_after_block (bb
);
212 if (NEXT_INSN (BB_END (bb
)) && BB_END (bb
) != end
)
213 bb
->il
.rtl
->footer
= unlink_insn_chain (NEXT_INSN (BB_END (bb
)), end
);
214 next_insn
= NEXT_INSN (BB_END (bb
));
217 cfg_layout_function_footer
= next_insn
;
218 if (cfg_layout_function_footer
)
219 cfg_layout_function_footer
= unlink_insn_chain (cfg_layout_function_footer
, get_last_insn ());
222 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
223 numbers and files. In order to be GGC friendly we need to use separate
224 varrays. This also slightly improve the memory locality in binary search.
225 The _locs array contains locators where the given property change. The
226 block_locators_blocks contains the scope block that is used for all insn
227 locator greater than corresponding block_locators_locs value and smaller
228 than the following one. Similarly for the other properties. */
229 static VEC(int,heap
) *block_locators_locs
;
230 static GTY(()) VEC(tree
,gc
) *block_locators_blocks
;
231 static VEC(int,heap
) *locations_locators_locs
;
232 DEF_VEC_O(location_t
);
233 DEF_VEC_ALLOC_O(location_t
,heap
);
234 static VEC(location_t
,heap
) *locations_locators_vals
;
235 int prologue_locator
;
236 int epilogue_locator
;
238 /* Hold current location information and last location information, so the
239 datastructures are built lazily only when some instructions in given
241 location_t curr_location
, last_location
;
242 static tree curr_block
, last_block
;
243 static int curr_rtl_loc
= -1;
245 /* Allocate insn locator datastructure. */
247 insn_locators_alloc (void)
249 prologue_locator
= epilogue_locator
= 0;
251 block_locators_locs
= VEC_alloc (int, heap
, 32);
252 block_locators_blocks
= VEC_alloc (tree
, gc
, 32);
253 locations_locators_locs
= VEC_alloc (int, heap
, 32);
254 locations_locators_vals
= VEC_alloc (location_t
, heap
, 32);
263 /* At the end of emit stage, clear current location. */
265 insn_locators_finalize (void)
267 if (curr_rtl_loc
>= 0)
268 epilogue_locator
= curr_insn_locator ();
272 /* Allocate insn locator datastructure. */
274 insn_locators_free (void)
276 prologue_locator
= epilogue_locator
= 0;
278 VEC_free (int, heap
, block_locators_locs
);
279 VEC_free (tree
,gc
, block_locators_blocks
);
280 VEC_free (int, heap
, locations_locators_locs
);
281 VEC_free (location_t
, heap
, locations_locators_vals
);
285 /* Set current location. */
287 set_curr_insn_source_location (location_t location
)
289 /* IV opts calls into RTL expansion to compute costs of operations. At this
290 time locators are not initialized. */
291 if (curr_rtl_loc
== -1)
293 if (location
== last_location
)
295 curr_location
= location
;
298 /* Set current scope block. */
300 set_curr_insn_block (tree b
)
302 /* IV opts calls into RTL expansion to compute costs of operations. At this
303 time locators are not initialized. */
304 if (curr_rtl_loc
== -1)
310 /* Return current insn locator. */
312 curr_insn_locator (void)
314 if (curr_rtl_loc
== -1)
316 if (last_block
!= curr_block
)
319 VEC_safe_push (int, heap
, block_locators_locs
, curr_rtl_loc
);
320 VEC_safe_push (tree
, gc
, block_locators_blocks
, curr_block
);
321 last_block
= curr_block
;
323 if (last_location
!= curr_location
)
326 VEC_safe_push (int, heap
, locations_locators_locs
, curr_rtl_loc
);
327 VEC_safe_push (location_t
, heap
, locations_locators_vals
, &curr_location
);
328 last_location
= curr_location
;
334 into_cfg_layout_mode (void)
336 cfg_layout_initialize (0);
341 outof_cfg_layout_mode (void)
346 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
347 bb
->aux
= bb
->next_bb
;
349 cfg_layout_finalize ();
354 struct rtl_opt_pass pass_into_cfg_layout_mode
=
358 "into_cfglayout", /* name */
360 into_cfg_layout_mode
, /* execute */
363 0, /* static_pass_number */
365 0, /* properties_required */
366 PROP_cfglayout
, /* properties_provided */
367 0, /* properties_destroyed */
368 0, /* todo_flags_start */
369 TODO_dump_func
, /* todo_flags_finish */
373 struct rtl_opt_pass pass_outof_cfg_layout_mode
=
377 "outof_cfglayout", /* name */
379 outof_cfg_layout_mode
, /* execute */
382 0, /* static_pass_number */
384 0, /* properties_required */
385 0, /* properties_provided */
386 PROP_cfglayout
, /* properties_destroyed */
387 0, /* todo_flags_start */
388 TODO_dump_func
, /* todo_flags_finish */
392 /* Return scope resulting from combination of S1 and S2. */
394 choose_inner_scope (tree s1
, tree s2
)
400 if (BLOCK_NUMBER (s1
) > BLOCK_NUMBER (s2
))
405 /* Emit lexical block notes needed to change scope from S1 to S2. */
408 change_scope (rtx orig_insn
, tree s1
, tree s2
)
410 rtx insn
= orig_insn
;
411 tree com
= NULL_TREE
;
412 tree ts1
= s1
, ts2
= s2
;
417 gcc_assert (ts1
&& ts2
);
418 if (BLOCK_NUMBER (ts1
) > BLOCK_NUMBER (ts2
))
419 ts1
= BLOCK_SUPERCONTEXT (ts1
);
420 else if (BLOCK_NUMBER (ts1
) < BLOCK_NUMBER (ts2
))
421 ts2
= BLOCK_SUPERCONTEXT (ts2
);
424 ts1
= BLOCK_SUPERCONTEXT (ts1
);
425 ts2
= BLOCK_SUPERCONTEXT (ts2
);
434 rtx note
= emit_note_before (NOTE_INSN_BLOCK_END
, insn
);
435 NOTE_BLOCK (note
) = s
;
436 s
= BLOCK_SUPERCONTEXT (s
);
443 insn
= emit_note_before (NOTE_INSN_BLOCK_BEG
, insn
);
444 NOTE_BLOCK (insn
) = s
;
445 s
= BLOCK_SUPERCONTEXT (s
);
449 /* Return lexical scope block locator belongs to. */
451 locator_scope (int loc
)
453 int max
= VEC_length (int, block_locators_locs
);
456 /* When block_locators_locs was initialized, the pro- and epilogue
457 insns didn't exist yet and can therefore not be found this way.
458 But we know that they belong to the outer most block of the
460 Without this test, the prologue would be put inside the block of
461 the first valid instruction in the function and when that first
462 insn is part of an inlined function then the low_pc of that
463 inlined function is messed up. Likewise for the epilogue and
464 the last valid instruction. */
465 if (loc
== prologue_locator
|| loc
== epilogue_locator
)
466 return DECL_INITIAL (cfun
->decl
);
472 int pos
= (min
+ max
) / 2;
473 int tmp
= VEC_index (int, block_locators_locs
, pos
);
475 if (tmp
<= loc
&& min
!= pos
)
477 else if (tmp
> loc
&& max
!= pos
)
485 return VEC_index (tree
, block_locators_blocks
, min
);
488 /* Return lexical scope block insn belongs to. */
490 insn_scope (const_rtx insn
)
492 return locator_scope (INSN_LOCATOR (insn
));
495 /* Return line number of the statement specified by the locator. */
497 locator_location (int loc
)
499 int max
= VEC_length (int, locations_locators_locs
);
504 int pos
= (min
+ max
) / 2;
505 int tmp
= VEC_index (int, locations_locators_locs
, pos
);
507 if (tmp
<= loc
&& min
!= pos
)
509 else if (tmp
> loc
&& max
!= pos
)
517 return *VEC_index (location_t
, locations_locators_vals
, min
);
520 /* Return source line of the statement that produced this insn. */
522 locator_line (int loc
)
524 expanded_location xloc
;
528 xloc
= expand_location (locator_location (loc
));
532 /* Return line number of the statement that produced this insn. */
534 insn_line (const_rtx insn
)
536 return locator_line (INSN_LOCATOR (insn
));
539 /* Return source file of the statement specified by LOC. */
541 locator_file (int loc
)
543 expanded_location xloc
;
547 xloc
= expand_location (locator_location (loc
));
551 /* Return source file of the statement that produced this insn. */
553 insn_file (const_rtx insn
)
555 return locator_file (INSN_LOCATOR (insn
));
558 /* Return true if LOC1 and LOC2 locators have the same location and scope. */
560 locator_eq (int loc1
, int loc2
)
564 if (locator_location (loc1
) != locator_location (loc2
))
566 return locator_scope (loc1
) == locator_scope (loc2
);
569 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
570 on the scope tree and the newly reordered instructions. */
573 reemit_insn_block_notes (void)
575 tree cur_block
= DECL_INITIAL (cfun
->decl
);
579 if (!active_insn_p (insn
))
580 insn
= next_active_insn (insn
);
581 for (; insn
; insn
= next_active_insn (insn
))
585 /* Avoid putting scope notes between jump table and its label. */
586 if (JUMP_TABLE_DATA_P (insn
))
589 this_block
= insn_scope (insn
);
590 /* For sequences compute scope resulting from merging all scopes
591 of instructions nested inside. */
592 if (GET_CODE (PATTERN (insn
)) == SEQUENCE
)
595 rtx body
= PATTERN (insn
);
598 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
599 this_block
= choose_inner_scope (this_block
,
600 insn_scope (XVECEXP (body
, 0, i
)));
605 if (this_block
!= cur_block
)
607 change_scope (insn
, cur_block
, this_block
);
608 cur_block
= this_block
;
612 /* change_scope emits before the insn, not after. */
613 note
= emit_note (NOTE_INSN_DELETED
);
614 change_scope (note
, cur_block
, DECL_INITIAL (cfun
->decl
));
621 /* Link the basic blocks in the correct order, compacting the basic
622 block queue while at it. This also clears the visited flag on
623 all basic blocks. If STAY_IN_CFGLAYOUT_MODE is false, this function
624 also clears the basic block header and footer fields.
626 This function is usually called after a pass (e.g. tracer) finishes
627 some transformations while in cfglayout mode. The required sequence
628 of the basic blocks is in a linked list along the bb->aux field.
629 This functions re-links the basic block prev_bb and next_bb pointers
630 accordingly, and it compacts and renumbers the blocks. */
633 relink_block_chain (bool stay_in_cfglayout_mode
)
635 basic_block bb
, prev_bb
;
638 /* Maybe dump the re-ordered sequence. */
641 fprintf (dump_file
, "Reordered sequence:\n");
642 for (bb
= ENTRY_BLOCK_PTR
->next_bb
, index
= NUM_FIXED_BLOCKS
;
644 bb
= (basic_block
) bb
->aux
, index
++)
646 fprintf (dump_file
, " %i ", index
);
647 if (get_bb_original (bb
))
648 fprintf (dump_file
, "duplicate of %i ",
649 get_bb_original (bb
)->index
);
650 else if (forwarder_block_p (bb
)
651 && !LABEL_P (BB_HEAD (bb
)))
652 fprintf (dump_file
, "compensation ");
654 fprintf (dump_file
, "bb %i ", bb
->index
);
655 fprintf (dump_file
, " [%i]\n", bb
->frequency
);
659 /* Now reorder the blocks. */
660 prev_bb
= ENTRY_BLOCK_PTR
;
661 bb
= ENTRY_BLOCK_PTR
->next_bb
;
662 for (; bb
; prev_bb
= bb
, bb
= (basic_block
) bb
->aux
)
664 bb
->prev_bb
= prev_bb
;
665 prev_bb
->next_bb
= bb
;
667 prev_bb
->next_bb
= EXIT_BLOCK_PTR
;
668 EXIT_BLOCK_PTR
->prev_bb
= prev_bb
;
670 /* Then, clean up the aux and visited fields. */
674 bb
->il
.rtl
->visited
= 0;
675 if (!stay_in_cfglayout_mode
)
676 bb
->il
.rtl
->header
= bb
->il
.rtl
->footer
= NULL
;
679 /* Maybe reset the original copy tables, they are not valid anymore
680 when we renumber the basic blocks in compact_blocks. If we are
681 are going out of cfglayout mode, don't re-allocate the tables. */
682 free_original_copy_tables ();
683 if (stay_in_cfglayout_mode
)
684 initialize_original_copy_tables ();
686 /* Finally, put basic_block_info in the new order. */
691 /* Given a reorder chain, rearrange the code to match. */
694 fixup_reorder_chain (void)
699 if (cfg_layout_function_header
)
701 set_first_insn (cfg_layout_function_header
);
702 insn
= cfg_layout_function_header
;
703 while (NEXT_INSN (insn
))
704 insn
= NEXT_INSN (insn
);
707 /* First do the bulk reordering -- rechain the blocks without regard to
708 the needed changes to jumps and labels. */
710 for (bb
= ENTRY_BLOCK_PTR
->next_bb
; bb
; bb
= (basic_block
) bb
->aux
)
712 if (bb
->il
.rtl
->header
)
715 NEXT_INSN (insn
) = bb
->il
.rtl
->header
;
717 set_first_insn (bb
->il
.rtl
->header
);
718 PREV_INSN (bb
->il
.rtl
->header
) = insn
;
719 insn
= bb
->il
.rtl
->header
;
720 while (NEXT_INSN (insn
))
721 insn
= NEXT_INSN (insn
);
724 NEXT_INSN (insn
) = BB_HEAD (bb
);
726 set_first_insn (BB_HEAD (bb
));
727 PREV_INSN (BB_HEAD (bb
)) = insn
;
729 if (bb
->il
.rtl
->footer
)
731 NEXT_INSN (insn
) = bb
->il
.rtl
->footer
;
732 PREV_INSN (bb
->il
.rtl
->footer
) = insn
;
733 while (NEXT_INSN (insn
))
734 insn
= NEXT_INSN (insn
);
738 NEXT_INSN (insn
) = cfg_layout_function_footer
;
739 if (cfg_layout_function_footer
)
740 PREV_INSN (cfg_layout_function_footer
) = insn
;
742 while (NEXT_INSN (insn
))
743 insn
= NEXT_INSN (insn
);
745 set_last_insn (insn
);
746 #ifdef ENABLE_CHECKING
747 verify_insn_chain ();
750 /* Now add jumps and labels as needed to match the blocks new
753 for (bb
= ENTRY_BLOCK_PTR
->next_bb
; bb
; bb
= (basic_block
) bb
->aux
)
755 edge e_fall
, e_taken
, e
;
760 if (EDGE_COUNT (bb
->succs
) == 0)
763 /* Find the old fallthru edge, and another non-EH edge for
765 e_taken
= e_fall
= NULL
;
767 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
768 if (e
->flags
& EDGE_FALLTHRU
)
770 else if (! (e
->flags
& EDGE_EH
))
773 bb_end_insn
= BB_END (bb
);
774 if (JUMP_P (bb_end_insn
))
776 if (any_condjump_p (bb_end_insn
))
778 /* If the old fallthru is still next, nothing to do. */
779 if (bb
->aux
== e_fall
->dest
780 || e_fall
->dest
== EXIT_BLOCK_PTR
)
783 /* The degenerated case of conditional jump jumping to the next
784 instruction can happen for jumps with side effects. We need
785 to construct a forwarder block and this will be done just
786 fine by force_nonfallthru below. */
790 /* There is another special case: if *neither* block is next,
791 such as happens at the very end of a function, then we'll
792 need to add a new unconditional jump. Choose the taken
793 edge based on known or assumed probability. */
794 else if (bb
->aux
!= e_taken
->dest
)
796 rtx note
= find_reg_note (bb_end_insn
, REG_BR_PROB
, 0);
799 && INTVAL (XEXP (note
, 0)) < REG_BR_PROB_BASE
/ 2
800 && invert_jump (bb_end_insn
,
801 (e_fall
->dest
== EXIT_BLOCK_PTR
803 : label_for_bb (e_fall
->dest
)), 0))
805 e_fall
->flags
&= ~EDGE_FALLTHRU
;
806 #ifdef ENABLE_CHECKING
807 gcc_assert (could_fall_through
808 (e_taken
->src
, e_taken
->dest
));
810 e_taken
->flags
|= EDGE_FALLTHRU
;
811 update_br_prob_note (bb
);
812 e
= e_fall
, e_fall
= e_taken
, e_taken
= e
;
816 /* If the "jumping" edge is a crossing edge, and the fall
817 through edge is non-crossing, leave things as they are. */
818 else if ((e_taken
->flags
& EDGE_CROSSING
)
819 && !(e_fall
->flags
& EDGE_CROSSING
))
822 /* Otherwise we can try to invert the jump. This will
823 basically never fail, however, keep up the pretense. */
824 else if (invert_jump (bb_end_insn
,
825 (e_fall
->dest
== EXIT_BLOCK_PTR
827 : label_for_bb (e_fall
->dest
)), 0))
829 e_fall
->flags
&= ~EDGE_FALLTHRU
;
830 #ifdef ENABLE_CHECKING
831 gcc_assert (could_fall_through
832 (e_taken
->src
, e_taken
->dest
));
834 e_taken
->flags
|= EDGE_FALLTHRU
;
835 update_br_prob_note (bb
);
841 /* Otherwise we have some return, switch or computed
842 jump. In the 99% case, there should not have been a
844 gcc_assert (returnjump_p (bb_end_insn
) || !e_fall
);
850 /* No fallthru implies a noreturn function with EH edges, or
851 something similarly bizarre. In any case, we don't need to
856 /* If the fallthru block is still next, nothing to do. */
857 if (bb
->aux
== e_fall
->dest
)
860 /* A fallthru to exit block. */
861 if (e_fall
->dest
== EXIT_BLOCK_PTR
)
865 /* We got here if we need to add a new jump insn. */
866 nb
= force_nonfallthru (e_fall
);
869 nb
->il
.rtl
->visited
= 1;
872 /* Don't process this new block. */
875 /* Make sure new bb is tagged for correct section (same as
876 fall-thru source, since you cannot fall-throu across
877 section boundaries). */
878 BB_COPY_PARTITION (e_fall
->src
, single_pred (bb
));
879 if (flag_reorder_blocks_and_partition
880 && targetm
.have_named_sections
881 && JUMP_P (BB_END (bb
))
882 && !any_condjump_p (BB_END (bb
))
883 && (EDGE_SUCC (bb
, 0)->flags
& EDGE_CROSSING
))
884 add_reg_note (BB_END (bb
), REG_CROSSING_JUMP
, NULL_RTX
);
888 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
890 /* Annoying special case - jump around dead jumptables left in the code. */
896 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
897 if (e
->flags
& EDGE_FALLTHRU
)
900 if (e
&& !can_fallthru (e
->src
, e
->dest
))
901 force_nonfallthru (e
);
904 /* Ensure goto_locus from edges has some instructions with that locus
912 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
913 if (e
->goto_locus
&& !(e
->flags
& EDGE_ABNORMAL
))
918 insn
= BB_END (e
->src
);
919 end
= PREV_INSN (BB_HEAD (e
->src
));
921 && (!INSN_P (insn
) || INSN_LOCATOR (insn
) == 0))
922 insn
= PREV_INSN (insn
);
924 && locator_eq (INSN_LOCATOR (insn
), (int) e
->goto_locus
))
926 if (simplejump_p (BB_END (e
->src
))
927 && INSN_LOCATOR (BB_END (e
->src
)) == 0)
929 INSN_LOCATOR (BB_END (e
->src
)) = e
->goto_locus
;
932 if (e
->dest
!= EXIT_BLOCK_PTR
)
934 insn
= BB_HEAD (e
->dest
);
935 end
= NEXT_INSN (BB_END (e
->dest
));
936 while (insn
!= end
&& !INSN_P (insn
))
937 insn
= NEXT_INSN (insn
);
938 if (insn
!= end
&& INSN_LOCATOR (insn
)
939 && locator_eq (INSN_LOCATOR (insn
), (int) e
->goto_locus
))
943 if (!INSN_P (BB_END (nb
)))
944 BB_END (nb
) = emit_insn_after_noloc (gen_nop (), BB_END (nb
),
946 INSN_LOCATOR (BB_END (nb
)) = e
->goto_locus
;
951 /* Perform sanity checks on the insn chain.
952 1. Check that next/prev pointers are consistent in both the forward and
954 2. Count insns in chain, going both directions, and check if equal.
955 3. Check that get_last_insn () returns the actual end of chain. */
958 verify_insn_chain (void)
961 int insn_cnt1
, insn_cnt2
;
963 for (prevx
= NULL
, insn_cnt1
= 1, x
= get_insns ();
965 prevx
= x
, insn_cnt1
++, x
= NEXT_INSN (x
))
966 gcc_assert (PREV_INSN (x
) == prevx
);
968 gcc_assert (prevx
== get_last_insn ());
970 for (nextx
= NULL
, insn_cnt2
= 1, x
= get_last_insn ();
972 nextx
= x
, insn_cnt2
++, x
= PREV_INSN (x
))
973 gcc_assert (NEXT_INSN (x
) == nextx
);
975 gcc_assert (insn_cnt1
== insn_cnt2
);
978 /* If we have assembler epilogues, the block falling through to exit must
979 be the last one in the reordered chain when we reach final. Ensure
980 that this condition is met. */
982 fixup_fallthru_exit_predecessor (void)
986 basic_block bb
= NULL
;
988 /* This transformation is not valid before reload, because we might
989 separate a call from the instruction that copies the return
991 gcc_assert (reload_completed
);
993 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
994 if (e
->flags
& EDGE_FALLTHRU
)
999 basic_block c
= ENTRY_BLOCK_PTR
->next_bb
;
1001 /* If the very first block is the one with the fall-through exit
1002 edge, we have to split that block. */
1005 bb
= split_block (bb
, NULL
)->dest
;
1008 bb
->il
.rtl
->footer
= c
->il
.rtl
->footer
;
1009 c
->il
.rtl
->footer
= NULL
;
1012 while (c
->aux
!= bb
)
1013 c
= (basic_block
) c
->aux
;
1017 c
= (basic_block
) c
->aux
;
1024 /* In case there are more than one fallthru predecessors of exit, force that
1025 there is only one. */
1028 force_one_exit_fallthru (void)
1030 edge e
, predecessor
= NULL
;
1033 basic_block forwarder
, bb
;
1035 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
1036 if (e
->flags
& EDGE_FALLTHRU
)
1038 if (predecessor
== NULL
)
1050 /* Exit has several fallthru predecessors. Create a forwarder block for
1052 forwarder
= split_edge (predecessor
);
1053 for (ei
= ei_start (EXIT_BLOCK_PTR
->preds
); (e
= ei_safe_edge (ei
)); )
1055 if (e
->src
== forwarder
1056 || !(e
->flags
& EDGE_FALLTHRU
))
1059 redirect_edge_and_branch_force (e
, forwarder
);
1062 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
1066 if (bb
->aux
== NULL
&& bb
!= forwarder
)
1068 bb
->aux
= forwarder
;
1074 /* Return true in case it is possible to duplicate the basic block BB. */
1076 /* We do not want to declare the function in a header file, since it should
1077 only be used through the cfghooks interface, and we do not want to move
1078 it to cfgrtl.c since it would require also moving quite a lot of related
1080 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block
);
1083 cfg_layout_can_duplicate_bb_p (const_basic_block bb
)
1085 /* Do not attempt to duplicate tablejumps, as we need to unshare
1086 the dispatch table. This is difficult to do, as the instructions
1087 computing jump destination may be hoisted outside the basic block. */
1088 if (tablejump_p (BB_END (bb
), NULL
, NULL
))
1091 /* Do not duplicate blocks containing insns that can't be copied. */
1092 if (targetm
.cannot_copy_insn_p
)
1094 rtx insn
= BB_HEAD (bb
);
1097 if (INSN_P (insn
) && targetm
.cannot_copy_insn_p (insn
))
1099 if (insn
== BB_END (bb
))
1101 insn
= NEXT_INSN (insn
);
1109 duplicate_insn_chain (rtx from
, rtx to
)
1111 rtx insn
, last
, copy
;
1113 /* Avoid updating of boundaries of previous basic block. The
1114 note will get removed from insn stream in fixup. */
1115 last
= emit_note (NOTE_INSN_DELETED
);
1117 /* Create copy at the end of INSN chain. The chain will
1118 be reordered later. */
1119 for (insn
= from
; insn
!= NEXT_INSN (to
); insn
= NEXT_INSN (insn
))
1121 switch (GET_CODE (insn
))
1126 /* Avoid copying of dispatch tables. We never duplicate
1127 tablejumps, so this can hit only in case the table got
1128 moved far from original jump. */
1129 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
1130 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
1132 copy
= emit_copy_of_insn_after (insn
, get_last_insn ());
1133 maybe_copy_epilogue_insn (insn
, copy
);
1144 switch (NOTE_KIND (insn
))
1146 /* In case prologue is empty and function contain label
1147 in first BB, we may want to copy the block. */
1148 case NOTE_INSN_PROLOGUE_END
:
1150 case NOTE_INSN_DELETED
:
1151 case NOTE_INSN_DELETED_LABEL
:
1152 /* No problem to strip these. */
1153 case NOTE_INSN_FUNCTION_BEG
:
1154 /* There is always just single entry to function. */
1155 case NOTE_INSN_BASIC_BLOCK
:
1158 case NOTE_INSN_EPILOGUE_BEG
:
1159 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
1160 emit_note_copy (insn
);
1164 /* All other notes should have already been eliminated. */
1172 insn
= NEXT_INSN (last
);
1176 /* Create a duplicate of the basic block BB. */
1178 /* We do not want to declare the function in a header file, since it should
1179 only be used through the cfghooks interface, and we do not want to move
1180 it to cfgrtl.c since it would require also moving quite a lot of related
1182 extern basic_block
cfg_layout_duplicate_bb (basic_block
);
1185 cfg_layout_duplicate_bb (basic_block bb
)
1190 insn
= duplicate_insn_chain (BB_HEAD (bb
), BB_END (bb
));
1191 new_bb
= create_basic_block (insn
,
1192 insn
? get_last_insn () : NULL
,
1193 EXIT_BLOCK_PTR
->prev_bb
);
1195 BB_COPY_PARTITION (new_bb
, bb
);
1196 if (bb
->il
.rtl
->header
)
1198 insn
= bb
->il
.rtl
->header
;
1199 while (NEXT_INSN (insn
))
1200 insn
= NEXT_INSN (insn
);
1201 insn
= duplicate_insn_chain (bb
->il
.rtl
->header
, insn
);
1203 new_bb
->il
.rtl
->header
= unlink_insn_chain (insn
, get_last_insn ());
1206 if (bb
->il
.rtl
->footer
)
1208 insn
= bb
->il
.rtl
->footer
;
1209 while (NEXT_INSN (insn
))
1210 insn
= NEXT_INSN (insn
);
1211 insn
= duplicate_insn_chain (bb
->il
.rtl
->footer
, insn
);
1213 new_bb
->il
.rtl
->footer
= unlink_insn_chain (insn
, get_last_insn ());
1220 /* Main entry point to this module - initialize the datastructures for
1221 CFG layout changes. It keeps LOOPS up-to-date if not null.
1223 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
1226 cfg_layout_initialize (unsigned int flags
)
1231 initialize_original_copy_tables ();
1233 cfg_layout_rtl_register_cfg_hooks ();
1235 record_effective_endpoints ();
1237 /* Make sure that the targets of non local gotos are marked. */
1238 for (x
= nonlocal_goto_handler_labels
; x
; x
= XEXP (x
, 1))
1240 bb
= BLOCK_FOR_INSN (XEXP (x
, 0));
1241 bb
->flags
|= BB_NON_LOCAL_GOTO_TARGET
;
1244 cleanup_cfg (CLEANUP_CFGLAYOUT
| flags
);
1247 /* Splits superblocks. */
1249 break_superblocks (void)
1251 sbitmap superblocks
;
1255 superblocks
= sbitmap_alloc (last_basic_block
);
1256 sbitmap_zero (superblocks
);
1259 if (bb
->flags
& BB_SUPERBLOCK
)
1261 bb
->flags
&= ~BB_SUPERBLOCK
;
1262 SET_BIT (superblocks
, bb
->index
);
1268 rebuild_jump_labels (get_insns ());
1269 find_many_sub_basic_blocks (superblocks
);
1275 /* Finalize the changes: reorder insn list according to the sequence specified
1276 by aux pointers, enter compensation code, rebuild scope forest. */
1279 cfg_layout_finalize (void)
1281 #ifdef ENABLE_CHECKING
1282 verify_flow_info ();
1284 force_one_exit_fallthru ();
1285 rtl_register_cfg_hooks ();
1286 if (reload_completed
1287 #ifdef HAVE_epilogue
1291 fixup_fallthru_exit_predecessor ();
1292 fixup_reorder_chain ();
1294 rebuild_jump_labels (get_insns ());
1295 delete_dead_jumptables ();
1297 #ifdef ENABLE_CHECKING
1298 verify_insn_chain ();
1299 verify_flow_info ();
1303 /* Checks whether all N blocks in BBS array can be copied. */
1305 can_copy_bbs_p (basic_block
*bbs
, unsigned n
)
1311 for (i
= 0; i
< n
; i
++)
1312 bbs
[i
]->flags
|= BB_DUPLICATED
;
1314 for (i
= 0; i
< n
; i
++)
1316 /* In case we should redirect abnormal edge during duplication, fail. */
1318 FOR_EACH_EDGE (e
, ei
, bbs
[i
]->succs
)
1319 if ((e
->flags
& EDGE_ABNORMAL
)
1320 && (e
->dest
->flags
& BB_DUPLICATED
))
1326 if (!can_duplicate_block_p (bbs
[i
]))
1334 for (i
= 0; i
< n
; i
++)
1335 bbs
[i
]->flags
&= ~BB_DUPLICATED
;
1340 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1341 are placed into array NEW_BBS in the same order. Edges from basic blocks
1342 in BBS are also duplicated and copies of those of them
1343 that lead into BBS are redirected to appropriate newly created block. The
1344 function assigns bbs into loops (copy of basic block bb is assigned to
1345 bb->loop_father->copy loop, so this must be set up correctly in advance)
1346 and updates dominators locally (LOOPS structure that contains the information
1347 about dominators is passed to enable this).
1349 BASE is the superloop to that basic block belongs; if its header or latch
1350 is copied, we do not set the new blocks as header or latch.
1352 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1353 also in the same order.
1355 Newly created basic blocks are put after the basic block AFTER in the
1356 instruction stream, and the order of the blocks in BBS array is preserved. */
1359 copy_bbs (basic_block
*bbs
, unsigned n
, basic_block
*new_bbs
,
1360 edge
*edges
, unsigned num_edges
, edge
*new_edges
,
1361 struct loop
*base
, basic_block after
)
1364 basic_block bb
, new_bb
, dom_bb
;
1367 /* Duplicate bbs, update dominators, assign bbs to loops. */
1368 for (i
= 0; i
< n
; i
++)
1372 new_bb
= new_bbs
[i
] = duplicate_block (bb
, NULL
, after
);
1374 bb
->flags
|= BB_DUPLICATED
;
1375 /* Possibly set loop header. */
1376 if (bb
->loop_father
->header
== bb
&& bb
->loop_father
!= base
)
1377 new_bb
->loop_father
->header
= new_bb
;
1379 if (bb
->loop_father
->latch
== bb
&& bb
->loop_father
!= base
)
1380 new_bb
->loop_father
->latch
= new_bb
;
1383 /* Set dominators. */
1384 for (i
= 0; i
< n
; i
++)
1387 new_bb
= new_bbs
[i
];
1389 dom_bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
1390 if (dom_bb
->flags
& BB_DUPLICATED
)
1392 dom_bb
= get_bb_copy (dom_bb
);
1393 set_immediate_dominator (CDI_DOMINATORS
, new_bb
, dom_bb
);
1397 /* Redirect edges. */
1398 for (j
= 0; j
< num_edges
; j
++)
1399 new_edges
[j
] = NULL
;
1400 for (i
= 0; i
< n
; i
++)
1403 new_bb
= new_bbs
[i
];
1406 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
1408 for (j
= 0; j
< num_edges
; j
++)
1409 if (edges
[j
] && edges
[j
]->src
== bb
&& edges
[j
]->dest
== e
->dest
)
1412 if (!(e
->dest
->flags
& BB_DUPLICATED
))
1414 redirect_edge_and_branch_force (e
, get_bb_copy (e
->dest
));
1418 /* Clear information about duplicates. */
1419 for (i
= 0; i
< n
; i
++)
1420 bbs
[i
]->flags
&= ~BB_DUPLICATED
;
1423 #include "gt-cfglayout.h"