1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "insn-config.h"
33 #include "cfglayout.h"
36 /* The contents of the current function definition are allocated
37 in this obstack, and all are freed at the end of the function. */
38 extern struct obstack flow_obstack
;
40 /* Holds the interesting trailing notes for the function. */
41 static rtx function_footer
;
43 static rtx skip_insns_after_block
PARAMS ((basic_block
));
44 static void record_effective_endpoints
PARAMS ((void));
45 static rtx label_for_bb
PARAMS ((basic_block
));
46 static void fixup_reorder_chain
PARAMS ((void));
48 static void set_block_levels
PARAMS ((tree
, int));
49 static void change_scope
PARAMS ((rtx
, tree
, tree
));
51 void verify_insn_chain
PARAMS ((void));
52 static void cleanup_unconditional_jumps
PARAMS ((struct loops
*));
53 static void fixup_fallthru_exit_predecessor
PARAMS ((void));
54 static rtx unlink_insn_chain
PARAMS ((rtx
, rtx
));
55 static rtx duplicate_insn_chain
PARAMS ((rtx
, rtx
));
56 static void break_superblocks
PARAMS ((void));
59 unlink_insn_chain (first
, last
)
63 rtx prevfirst
= PREV_INSN (first
);
64 rtx nextlast
= NEXT_INSN (last
);
66 PREV_INSN (first
) = NULL
;
67 NEXT_INSN (last
) = NULL
;
69 NEXT_INSN (prevfirst
) = nextlast
;
71 PREV_INSN (nextlast
) = prevfirst
;
73 set_last_insn (prevfirst
);
75 set_first_insn (nextlast
);
79 /* Skip over inter-block insns occurring after BB which are typically
80 associated with BB (e.g., barriers). If there are any such insns,
81 we return the last one. Otherwise, we return the end of BB. */
84 skip_insns_after_block (bb
)
87 rtx insn
, last_insn
, next_head
, prev
;
90 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
91 next_head
= bb
->next_bb
->head
;
93 for (last_insn
= insn
= bb
->end
; (insn
= NEXT_INSN (insn
)) != 0; )
95 if (insn
== next_head
)
98 switch (GET_CODE (insn
))
105 switch (NOTE_LINE_NUMBER (insn
))
107 case NOTE_INSN_LOOP_END
:
108 case NOTE_INSN_BLOCK_END
:
111 case NOTE_INSN_DELETED
:
112 case NOTE_INSN_DELETED_LABEL
:
123 && GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
124 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
125 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
127 insn
= NEXT_INSN (insn
);
140 /* It is possible to hit contradictory sequence. For instance:
146 Where barrier belongs to jump_insn, but the note does not. This can be
147 created by removing the basic block originally following
148 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
150 for (insn
= last_insn
; insn
!= bb
->end
; insn
= prev
)
152 prev
= PREV_INSN (insn
);
153 if (GET_CODE (insn
) == NOTE
)
154 switch (NOTE_LINE_NUMBER (insn
))
156 case NOTE_INSN_LOOP_END
:
157 case NOTE_INSN_BLOCK_END
:
158 case NOTE_INSN_DELETED
:
159 case NOTE_INSN_DELETED_LABEL
:
162 reorder_insns (insn
, insn
, last_insn
);
169 /* Locate or create a label for a given basic block. */
175 rtx label
= bb
->head
;
177 if (GET_CODE (label
) != CODE_LABEL
)
180 fprintf (rtl_dump_file
, "Emitting label for block %d\n", bb
->index
);
182 label
= block_label (bb
);
188 /* Locate the effective beginning and end of the insn chain for each
189 block, as defined by skip_insns_after_block above. */
192 record_effective_endpoints ()
194 rtx next_insn
= get_insns ();
201 if (PREV_INSN (bb
->head
) && next_insn
!= bb
->head
)
202 RBI (bb
)->header
= unlink_insn_chain (next_insn
,
203 PREV_INSN (bb
->head
));
204 end
= skip_insns_after_block (bb
);
205 if (NEXT_INSN (bb
->end
) && bb
->end
!= end
)
206 RBI (bb
)->footer
= unlink_insn_chain (NEXT_INSN (bb
->end
), end
);
207 next_insn
= NEXT_INSN (bb
->end
);
210 function_footer
= next_insn
;
212 function_footer
= unlink_insn_chain (function_footer
, get_last_insn ());
215 /* Build a varray mapping INSN_UID to lexical block. Return it. */
218 scope_to_insns_initialize ()
223 for (insn
= get_insns (); insn
; insn
= next
)
225 next
= NEXT_INSN (insn
);
227 if (active_insn_p (insn
)
228 && GET_CODE (PATTERN (insn
)) != ADDR_VEC
229 && GET_CODE (PATTERN (insn
)) != ADDR_DIFF_VEC
)
230 INSN_SCOPE (insn
) = block
;
231 else if (GET_CODE (insn
) == NOTE
)
233 switch (NOTE_LINE_NUMBER (insn
))
235 case NOTE_INSN_BLOCK_BEG
:
236 block
= NOTE_BLOCK (insn
);
239 case NOTE_INSN_BLOCK_END
:
240 block
= BLOCK_SUPERCONTEXT (block
);
249 /* Tag the blocks with a depth number so that change_scope can find
250 the common parent easily. */
251 set_block_levels (DECL_INITIAL (cfun
->decl
), 0);
254 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
255 found in the block tree. */
258 set_block_levels (block
, level
)
264 BLOCK_NUMBER (block
) = level
;
265 set_block_levels (BLOCK_SUBBLOCKS (block
), level
+ 1);
266 block
= BLOCK_CHAIN (block
);
270 /* Return sope resulting from combination of S1 and S2. */
272 choose_inner_scope (s1
, s2
)
279 if (BLOCK_NUMBER (s1
) > BLOCK_NUMBER (s2
))
284 /* Emit lexical block notes needed to change scope from S1 to S2. */
287 change_scope (orig_insn
, s1
, s2
)
291 rtx insn
= orig_insn
;
292 tree com
= NULL_TREE
;
293 tree ts1
= s1
, ts2
= s2
;
298 if (ts1
== NULL
|| ts2
== NULL
)
300 if (BLOCK_NUMBER (ts1
) > BLOCK_NUMBER (ts2
))
301 ts1
= BLOCK_SUPERCONTEXT (ts1
);
302 else if (BLOCK_NUMBER (ts1
) < BLOCK_NUMBER (ts2
))
303 ts2
= BLOCK_SUPERCONTEXT (ts2
);
306 ts1
= BLOCK_SUPERCONTEXT (ts1
);
307 ts2
= BLOCK_SUPERCONTEXT (ts2
);
316 rtx note
= emit_note_before (NOTE_INSN_BLOCK_END
, insn
);
317 NOTE_BLOCK (note
) = s
;
318 s
= BLOCK_SUPERCONTEXT (s
);
325 insn
= emit_note_before (NOTE_INSN_BLOCK_BEG
, insn
);
326 NOTE_BLOCK (insn
) = s
;
327 s
= BLOCK_SUPERCONTEXT (s
);
331 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
332 on the scope tree and the newly reordered instructions. */
335 scope_to_insns_finalize ()
337 tree cur_block
= DECL_INITIAL (cfun
->decl
);
341 if (!active_insn_p (insn
))
342 insn
= next_active_insn (insn
);
343 for (; insn
; insn
= next_active_insn (insn
))
347 this_block
= INSN_SCOPE (insn
);
348 /* For sequences compute scope resulting from merging all scopes
349 of instructions nested inside. */
350 if (GET_CODE (PATTERN (insn
)) == SEQUENCE
)
353 rtx body
= PATTERN (insn
);
356 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
357 this_block
= choose_inner_scope (this_block
,
358 INSN_SCOPE (XVECEXP (body
, 0, i
)));
363 if (this_block
!= cur_block
)
365 change_scope (insn
, cur_block
, this_block
);
366 cur_block
= this_block
;
370 /* change_scope emits before the insn, not after. */
371 note
= emit_note (NULL
, NOTE_INSN_DELETED
);
372 change_scope (note
, cur_block
, DECL_INITIAL (cfun
->decl
));
378 /* Given a reorder chain, rearrange the code to match. */
381 fixup_reorder_chain ()
383 basic_block bb
, prev_bb
;
387 /* First do the bulk reordering -- rechain the blocks without regard to
388 the needed changes to jumps and labels. */
390 for (bb
= ENTRY_BLOCK_PTR
->next_bb
, index
= 0;
392 bb
= RBI (bb
)->next
, index
++)
394 if (RBI (bb
)->header
)
397 NEXT_INSN (insn
) = RBI (bb
)->header
;
399 set_first_insn (RBI (bb
)->header
);
400 PREV_INSN (RBI (bb
)->header
) = insn
;
401 insn
= RBI (bb
)->header
;
402 while (NEXT_INSN (insn
))
403 insn
= NEXT_INSN (insn
);
406 NEXT_INSN (insn
) = bb
->head
;
408 set_first_insn (bb
->head
);
409 PREV_INSN (bb
->head
) = insn
;
411 if (RBI (bb
)->footer
)
413 NEXT_INSN (insn
) = RBI (bb
)->footer
;
414 PREV_INSN (RBI (bb
)->footer
) = insn
;
415 while (NEXT_INSN (insn
))
416 insn
= NEXT_INSN (insn
);
420 if (index
!= n_basic_blocks
)
423 NEXT_INSN (insn
) = function_footer
;
425 PREV_INSN (function_footer
) = insn
;
427 while (NEXT_INSN (insn
))
428 insn
= NEXT_INSN (insn
);
430 set_last_insn (insn
);
431 #ifdef ENABLE_CHECKING
432 verify_insn_chain ();
435 /* Now add jumps and labels as needed to match the blocks new
438 for (bb
= ENTRY_BLOCK_PTR
->next_bb
; bb
; bb
= RBI (bb
)->next
)
440 edge e_fall
, e_taken
, e
;
444 if (bb
->succ
== NULL
)
447 /* Find the old fallthru edge, and another non-EH edge for
449 e_taken
= e_fall
= NULL
;
450 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
451 if (e
->flags
& EDGE_FALLTHRU
)
453 else if (! (e
->flags
& EDGE_EH
))
456 bb_end_insn
= bb
->end
;
457 if (GET_CODE (bb_end_insn
) == JUMP_INSN
)
459 if (any_condjump_p (bb_end_insn
))
461 /* If the old fallthru is still next, nothing to do. */
462 if (RBI (bb
)->next
== e_fall
->dest
464 && e_fall
->dest
== EXIT_BLOCK_PTR
))
467 /* There is one special case: if *neither* block is next,
468 such as happens at the very end of a function, then we'll
469 need to add a new unconditional jump. Choose the taken
470 edge based on known or assumed probability. */
471 if (RBI (bb
)->next
!= e_taken
->dest
)
473 rtx note
= find_reg_note (bb_end_insn
, REG_BR_PROB
, 0);
476 && INTVAL (XEXP (note
, 0)) < REG_BR_PROB_BASE
/ 2
477 && invert_jump (bb_end_insn
,
478 label_for_bb (e_fall
->dest
), 0))
480 e_fall
->flags
&= ~EDGE_FALLTHRU
;
481 e_taken
->flags
|= EDGE_FALLTHRU
;
482 update_br_prob_note (bb
);
483 e
= e_fall
, e_fall
= e_taken
, e_taken
= e
;
487 /* Otherwise we can try to invert the jump. This will
488 basically never fail, however, keep up the pretense. */
489 else if (invert_jump (bb_end_insn
,
490 label_for_bb (e_fall
->dest
), 0))
492 e_fall
->flags
&= ~EDGE_FALLTHRU
;
493 e_taken
->flags
|= EDGE_FALLTHRU
;
494 update_br_prob_note (bb
);
498 else if (returnjump_p (bb_end_insn
))
502 /* Otherwise we have some switch or computed jump. In the
503 99% case, there should not have been a fallthru edge. */
507 #ifdef CASE_DROPS_THROUGH
508 /* Except for VAX. Since we didn't have predication for the
509 tablejump, the fallthru block should not have moved. */
510 if (RBI (bb
)->next
== e_fall
->dest
)
512 bb_end_insn
= skip_insns_after_block (bb
);
520 /* No fallthru implies a noreturn function with EH edges, or
521 something similarly bizarre. In any case, we don't need to
526 /* If the fallthru block is still next, nothing to do. */
527 if (RBI (bb
)->next
== e_fall
->dest
)
530 /* A fallthru to exit block. */
531 if (!RBI (bb
)->next
&& e_fall
->dest
== EXIT_BLOCK_PTR
)
535 /* We got here if we need to add a new jump insn. */
536 nb
= force_nonfallthru (e_fall
);
539 alloc_aux_for_block (nb
, sizeof (struct reorder_block_def
));
540 RBI (nb
)->visited
= 1;
541 RBI (nb
)->next
= RBI (bb
)->next
;
543 /* Don't process this new block. */
548 /* Put basic_block_info in the new order. */
552 fprintf (rtl_dump_file
, "Reordered sequence:\n");
553 for (bb
= ENTRY_BLOCK_PTR
->next_bb
, index
= 0; bb
; bb
= RBI (bb
)->next
, index
++)
555 fprintf (rtl_dump_file
, " %i ", index
);
556 if (RBI (bb
)->original
)
557 fprintf (rtl_dump_file
, "duplicate of %i ",
558 RBI (bb
)->original
->index
);
559 else if (forwarder_block_p (bb
) && GET_CODE (bb
->head
) != CODE_LABEL
)
560 fprintf (rtl_dump_file
, "compensation ");
562 fprintf (rtl_dump_file
, "bb %i ", bb
->index
);
563 fprintf (rtl_dump_file
, " [%i]\n", bb
->frequency
);
567 prev_bb
= ENTRY_BLOCK_PTR
;
568 bb
= ENTRY_BLOCK_PTR
->next_bb
;
571 for (; bb
; prev_bb
= bb
, bb
= RBI (bb
)->next
, index
++)
574 BASIC_BLOCK (index
) = bb
;
576 bb
->prev_bb
= prev_bb
;
577 prev_bb
->next_bb
= bb
;
579 prev_bb
->next_bb
= EXIT_BLOCK_PTR
;
580 EXIT_BLOCK_PTR
->prev_bb
= prev_bb
;
583 /* Perform sanity checks on the insn chain.
584 1. Check that next/prev pointers are consistent in both the forward and
586 2. Count insns in chain, going both directions, and check if equal.
587 3. Check that get_last_insn () returns the actual end of chain. */
593 int insn_cnt1
, insn_cnt2
;
595 for (prevx
= NULL
, insn_cnt1
= 1, x
= get_insns ();
597 prevx
= x
, insn_cnt1
++, x
= NEXT_INSN (x
))
598 if (PREV_INSN (x
) != prevx
)
601 if (prevx
!= get_last_insn ())
604 for (nextx
= NULL
, insn_cnt2
= 1, x
= get_last_insn ();
606 nextx
= x
, insn_cnt2
++, x
= PREV_INSN (x
))
607 if (NEXT_INSN (x
) != nextx
)
610 if (insn_cnt1
!= insn_cnt2
)
614 /* Remove any unconditional jumps and forwarder block creating fallthru
615 edges instead. During BB reordering, fallthru edges are not required
616 to target next basic block in the linear CFG layout, so the unconditional
617 jumps are not needed. If LOOPS is not null, also update loop structure &
621 cleanup_unconditional_jumps (loops
)
630 if (bb
->succ
->flags
& EDGE_FALLTHRU
)
632 if (!bb
->succ
->succ_next
)
635 if (GET_CODE (bb
->head
) != CODE_LABEL
&& forwarder_block_p (bb
)
636 && bb
->prev_bb
!= ENTRY_BLOCK_PTR
)
638 basic_block prev
= bb
->prev_bb
;
641 fprintf (rtl_dump_file
, "Removing forwarder BB %i\n",
646 /* bb cannot be loop header, as it only has one entry
647 edge. It could be a loop latch. */
648 if (bb
->loop_father
->header
== bb
)
651 if (bb
->loop_father
->latch
== bb
)
652 bb
->loop_father
->latch
= bb
->pred
->src
;
654 if (get_immediate_dominator
655 (loops
->cfg
.dom
, bb
->succ
->dest
) == bb
)
656 set_immediate_dominator
657 (loops
->cfg
.dom
, bb
->succ
->dest
, bb
->pred
->src
);
659 remove_bb_from_loops (bb
);
660 delete_from_dominance_info (loops
->cfg
.dom
, bb
);
663 redirect_edge_succ_nodup (bb
->pred
, bb
->succ
->dest
);
664 flow_delete_block (bb
);
667 else if (simplejump_p (bb
->end
))
672 fprintf (rtl_dump_file
, "Removing jump %i in BB %i\n",
673 INSN_UID (jump
), bb
->index
);
675 bb
->succ
->flags
|= EDGE_FALLTHRU
;
680 insn
= NEXT_INSN (bb
->end
);
682 && (GET_CODE (insn
) != NOTE
683 || NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BASIC_BLOCK
))
685 rtx next
= NEXT_INSN (insn
);
687 if (GET_CODE (insn
) == BARRIER
)
688 delete_barrier (insn
);
696 /* The block falling through to exit must be the last one in the
697 reordered chain. Ensure that this condition is met. */
699 fixup_fallthru_exit_predecessor ()
702 basic_block bb
= NULL
;
704 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
705 if (e
->flags
& EDGE_FALLTHRU
)
708 if (bb
&& RBI (bb
)->next
)
710 basic_block c
= ENTRY_BLOCK_PTR
->next_bb
;
712 while (RBI (c
)->next
!= bb
)
715 RBI (c
)->next
= RBI (bb
)->next
;
716 while (RBI (c
)->next
)
720 RBI (bb
)->next
= NULL
;
724 /* Return true in case it is possible to duplicate the basic block BB. */
727 cfg_layout_can_duplicate_bb_p (bb
)
733 if (bb
== EXIT_BLOCK_PTR
|| bb
== ENTRY_BLOCK_PTR
)
736 /* Duplicating fallthru block to exit would require adding a jump
737 and splitting the real last BB. */
738 for (s
= bb
->succ
; s
; s
= s
->succ_next
)
739 if (s
->dest
== EXIT_BLOCK_PTR
&& s
->flags
& EDGE_FALLTHRU
)
742 /* Do not attempt to duplicate tablejumps, as we need to unshare
743 the dispatch table. This is difficult to do, as the instructions
744 computing jump destination may be hoisted outside the basic block. */
745 if (GET_CODE (bb
->end
) == JUMP_INSN
&& JUMP_LABEL (bb
->end
)
746 && (next
= next_nonnote_insn (JUMP_LABEL (bb
->end
)))
747 && GET_CODE (next
) == JUMP_INSN
748 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
749 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
))
755 duplicate_insn_chain (from
, to
)
760 /* Avoid updating of boundaries of previous basic block. The
761 note will get removed from insn stream in fixup. */
762 last
= emit_note (NULL
, NOTE_INSN_DELETED
);
764 /* Create copy at the end of INSN chain. The chain will
765 be reordered later. */
766 for (insn
= from
; insn
!= NEXT_INSN (to
); insn
= NEXT_INSN (insn
))
768 switch (GET_CODE (insn
))
773 /* Avoid copying of dispatch tables. We never duplicate
774 tablejumps, so this can hit only in case the table got
775 moved far from original jump. */
776 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
777 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
779 emit_copy_of_insn_after (insn
, get_last_insn ());
790 switch (NOTE_LINE_NUMBER (insn
))
792 /* In case prologue is empty and function contain label
793 in first BB, we may want to copy the block. */
794 case NOTE_INSN_PROLOGUE_END
:
796 case NOTE_INSN_LOOP_VTOP
:
797 case NOTE_INSN_LOOP_CONT
:
798 case NOTE_INSN_LOOP_BEG
:
799 case NOTE_INSN_LOOP_END
:
800 /* Strip down the loop notes - we don't really want to keep
801 them consistent in loop copies. */
802 case NOTE_INSN_DELETED
:
803 case NOTE_INSN_DELETED_LABEL
:
804 /* No problem to strip these. */
805 case NOTE_INSN_EPILOGUE_BEG
:
806 case NOTE_INSN_FUNCTION_END
:
807 /* Debug code expect these notes to exist just once.
808 Keep them in the master copy.
809 ??? It probably makes more sense to duplicate them for each
811 case NOTE_INSN_FUNCTION_BEG
:
812 /* There is always just single entry to function. */
813 case NOTE_INSN_BASIC_BLOCK
:
816 /* There is no purpose to duplicate prologue. */
817 case NOTE_INSN_BLOCK_BEG
:
818 case NOTE_INSN_BLOCK_END
:
819 /* The BLOCK_BEG/BLOCK_END notes should be eliminated when BB
820 reordering is in the progress. */
821 case NOTE_INSN_EH_REGION_BEG
:
822 case NOTE_INSN_EH_REGION_END
:
823 /* Should never exist at BB duplication time. */
826 case NOTE_INSN_REPEATED_LINE_NUMBER
:
827 emit_note (NOTE_SOURCE_FILE (insn
), NOTE_LINE_NUMBER (insn
));
831 if (NOTE_LINE_NUMBER (insn
) < 0)
833 /* It is possible that no_line_number is set and the note
835 emit_note (NOTE_SOURCE_FILE (insn
), NOTE_LINE_NUMBER (insn
));
842 insn
= NEXT_INSN (last
);
847 /* Redirect Edge to DEST. */
849 cfg_layout_redirect_edge (e
, dest
)
853 basic_block src
= e
->src
;
854 basic_block old_next_bb
= src
->next_bb
;
857 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
858 in the case the basic block appears to be in sequence. Avoid this
862 if (e
->flags
& EDGE_FALLTHRU
)
864 /* In case we are redirecting fallthru edge to the branch edge
865 of conditional jump, remove it. */
866 if (src
->succ
->succ_next
867 && !src
->succ
->succ_next
->succ_next
)
869 edge s
= e
->succ_next
? e
->succ_next
: src
->succ
;
871 && any_condjump_p (src
->end
)
872 && onlyjump_p (src
->end
))
873 delete_insn (src
->end
);
875 redirect_edge_succ_nodup (e
, dest
);
880 ret
= redirect_edge_and_branch (e
, dest
);
882 /* We don't want simplejumps in the insn stream during cfglayout. */
883 if (simplejump_p (src
->end
))
885 delete_insn (src
->end
);
886 delete_barrier (NEXT_INSN (src
->end
));
887 src
->succ
->flags
|= EDGE_FALLTHRU
;
889 src
->next_bb
= old_next_bb
;
894 /* Same as split_block but update cfg_layout structures. */
896 cfg_layout_split_block (bb
, insn
)
900 edge fallthru
= split_block (bb
, insn
);
902 alloc_aux_for_block (fallthru
->dest
, sizeof (struct reorder_block_def
));
903 RBI (fallthru
->dest
)->footer
= RBI (fallthru
->src
)->footer
;
904 RBI (fallthru
->src
)->footer
= NULL
;
908 /* Create a duplicate of the basic block BB and redirect edge E into it. */
911 cfg_layout_duplicate_bb (bb
, e
)
918 gcov_type new_count
= e
? e
->count
: 0;
920 if (bb
->count
< new_count
)
921 new_count
= bb
->count
;
924 #ifdef ENABLE_CHECKING
925 if (!cfg_layout_can_duplicate_bb_p (bb
))
929 insn
= duplicate_insn_chain (bb
->head
, bb
->end
);
930 new_bb
= create_basic_block (insn
,
931 insn
? get_last_insn () : NULL
,
932 EXIT_BLOCK_PTR
->prev_bb
);
933 alloc_aux_for_block (new_bb
, sizeof (struct reorder_block_def
));
935 if (RBI (bb
)->header
)
937 insn
= RBI (bb
)->header
;
938 while (NEXT_INSN (insn
))
939 insn
= NEXT_INSN (insn
);
940 insn
= duplicate_insn_chain (RBI (bb
)->header
, insn
);
942 RBI (new_bb
)->header
= unlink_insn_chain (insn
, get_last_insn ());
945 if (RBI (bb
)->footer
)
947 insn
= RBI (bb
)->footer
;
948 while (NEXT_INSN (insn
))
949 insn
= NEXT_INSN (insn
);
950 insn
= duplicate_insn_chain (RBI (bb
)->footer
, insn
);
952 RBI (new_bb
)->footer
= unlink_insn_chain (insn
, get_last_insn ());
955 if (bb
->global_live_at_start
)
957 new_bb
->global_live_at_start
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
958 new_bb
->global_live_at_end
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
959 COPY_REG_SET (new_bb
->global_live_at_start
, bb
->global_live_at_start
);
960 COPY_REG_SET (new_bb
->global_live_at_end
, bb
->global_live_at_end
);
963 new_bb
->loop_depth
= bb
->loop_depth
;
964 new_bb
->flags
= bb
->flags
;
965 for (s
= bb
->succ
; s
; s
= s
->succ_next
)
967 n
= make_edge (new_bb
, s
->dest
, s
->flags
);
968 n
->probability
= s
->probability
;
970 /* Take care for overflows! */
971 n
->count
= s
->count
* (new_count
* 10000 / bb
->count
) / 10000;
974 s
->count
-= n
->count
;
977 new_bb
->count
= new_count
;
978 bb
->count
-= new_count
;
982 new_bb
->frequency
= EDGE_FREQUENCY (e
);
983 bb
->frequency
-= EDGE_FREQUENCY (e
);
985 cfg_layout_redirect_edge (e
, new_bb
);
990 if (bb
->frequency
< 0)
993 RBI (new_bb
)->original
= bb
;
994 RBI (bb
)->copy
= new_bb
;
998 /* Main entry point to this module - initialize the datastructures for
999 CFG layout changes. It keeps LOOPS up-to-date if not null. */
1002 cfg_layout_initialize (loops
)
1003 struct loops
*loops
;
1005 /* Our algorithm depends on fact that there are now dead jumptables
1007 alloc_aux_for_blocks (sizeof (struct reorder_block_def
));
1009 cleanup_unconditional_jumps (loops
);
1011 record_effective_endpoints ();
1014 /* Splits superblocks. */
1016 break_superblocks ()
1018 sbitmap superblocks
;
1021 superblocks
= sbitmap_alloc (n_basic_blocks
);
1022 sbitmap_zero (superblocks
);
1026 for (i
= 0; i
< n_basic_blocks
; i
++)
1027 if (BASIC_BLOCK(i
)->flags
& BB_SUPERBLOCK
)
1029 BASIC_BLOCK(i
)->flags
&= ~BB_SUPERBLOCK
;
1030 SET_BIT (superblocks
, i
);
1036 rebuild_jump_labels (get_insns ());
1037 find_many_sub_basic_blocks (superblocks
);
1043 /* Finalize the changes: reorder insn list according to the sequence, enter
1044 compensation code, rebuild scope forest. */
1047 cfg_layout_finalize ()
1049 fixup_fallthru_exit_predecessor ();
1050 fixup_reorder_chain ();
1052 #ifdef ENABLE_CHECKING
1053 verify_insn_chain ();
1056 free_aux_for_blocks ();
1058 break_superblocks ();
1060 #ifdef ENABLE_CHECKING
1061 verify_flow_info ();