1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "insn-config.h"
33 #include "cfglayout.h"
37 /* The contents of the current function definition are allocated
38 in this obstack, and all are freed at the end of the function. */
39 extern struct obstack flow_obstack
;
41 /* Holds the interesting trailing notes for the function. */
42 static rtx function_footer
;
44 static rtx skip_insns_after_block
PARAMS ((basic_block
));
45 static void record_effective_endpoints
PARAMS ((void));
46 static rtx label_for_bb
PARAMS ((basic_block
));
47 static void fixup_reorder_chain
PARAMS ((void));
49 static void set_block_levels
PARAMS ((tree
, int));
50 static void change_scope
PARAMS ((rtx
, tree
, tree
));
52 void verify_insn_chain
PARAMS ((void));
53 static void cleanup_unconditional_jumps
PARAMS ((struct loops
*));
54 static void fixup_fallthru_exit_predecessor
PARAMS ((void));
55 static rtx unlink_insn_chain
PARAMS ((rtx
, rtx
));
56 static rtx duplicate_insn_chain
PARAMS ((rtx
, rtx
));
57 static void break_superblocks
PARAMS ((void));
60 unlink_insn_chain (first
, last
)
64 rtx prevfirst
= PREV_INSN (first
);
65 rtx nextlast
= NEXT_INSN (last
);
67 PREV_INSN (first
) = NULL
;
68 NEXT_INSN (last
) = NULL
;
70 NEXT_INSN (prevfirst
) = nextlast
;
72 PREV_INSN (nextlast
) = prevfirst
;
74 set_last_insn (prevfirst
);
76 set_first_insn (nextlast
);
80 /* Skip over inter-block insns occurring after BB which are typically
81 associated with BB (e.g., barriers). If there are any such insns,
82 we return the last one. Otherwise, we return the end of BB. */
85 skip_insns_after_block (bb
)
88 rtx insn
, last_insn
, next_head
, prev
;
91 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
92 next_head
= bb
->next_bb
->head
;
94 for (last_insn
= insn
= bb
->end
; (insn
= NEXT_INSN (insn
)) != 0; )
96 if (insn
== next_head
)
99 switch (GET_CODE (insn
))
106 switch (NOTE_LINE_NUMBER (insn
))
108 case NOTE_INSN_LOOP_END
:
109 case NOTE_INSN_BLOCK_END
:
112 case NOTE_INSN_DELETED
:
113 case NOTE_INSN_DELETED_LABEL
:
124 && GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
125 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
126 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
128 insn
= NEXT_INSN (insn
);
141 /* It is possible to hit contradictory sequence. For instance:
147 Where barrier belongs to jump_insn, but the note does not. This can be
148 created by removing the basic block originally following
149 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
151 for (insn
= last_insn
; insn
!= bb
->end
; insn
= prev
)
153 prev
= PREV_INSN (insn
);
154 if (GET_CODE (insn
) == NOTE
)
155 switch (NOTE_LINE_NUMBER (insn
))
157 case NOTE_INSN_LOOP_END
:
158 case NOTE_INSN_BLOCK_END
:
159 case NOTE_INSN_DELETED
:
160 case NOTE_INSN_DELETED_LABEL
:
163 reorder_insns (insn
, insn
, last_insn
);
170 /* Locate or create a label for a given basic block. */
176 rtx label
= bb
->head
;
178 if (GET_CODE (label
) != CODE_LABEL
)
181 fprintf (rtl_dump_file
, "Emitting label for block %d\n", bb
->index
);
183 label
= block_label (bb
);
189 /* Locate the effective beginning and end of the insn chain for each
190 block, as defined by skip_insns_after_block above. */
193 record_effective_endpoints ()
195 rtx next_insn
= get_insns ();
202 if (PREV_INSN (bb
->head
) && next_insn
!= bb
->head
)
203 RBI (bb
)->header
= unlink_insn_chain (next_insn
,
204 PREV_INSN (bb
->head
));
205 end
= skip_insns_after_block (bb
);
206 if (NEXT_INSN (bb
->end
) && bb
->end
!= end
)
207 RBI (bb
)->footer
= unlink_insn_chain (NEXT_INSN (bb
->end
), end
);
208 next_insn
= NEXT_INSN (bb
->end
);
211 function_footer
= next_insn
;
213 function_footer
= unlink_insn_chain (function_footer
, get_last_insn ());
216 /* Build a varray mapping INSN_UID to lexical block. Return it. */
219 scope_to_insns_initialize ()
224 for (insn
= get_insns (); insn
; insn
= next
)
226 next
= NEXT_INSN (insn
);
228 if (active_insn_p (insn
)
229 && GET_CODE (PATTERN (insn
)) != ADDR_VEC
230 && GET_CODE (PATTERN (insn
)) != ADDR_DIFF_VEC
)
231 INSN_SCOPE (insn
) = block
;
232 else if (GET_CODE (insn
) == NOTE
)
234 switch (NOTE_LINE_NUMBER (insn
))
236 case NOTE_INSN_BLOCK_BEG
:
237 block
= NOTE_BLOCK (insn
);
240 case NOTE_INSN_BLOCK_END
:
241 block
= BLOCK_SUPERCONTEXT (block
);
250 /* Tag the blocks with a depth number so that change_scope can find
251 the common parent easily. */
252 set_block_levels (DECL_INITIAL (cfun
->decl
), 0);
255 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
256 found in the block tree. */
259 set_block_levels (block
, level
)
265 BLOCK_NUMBER (block
) = level
;
266 set_block_levels (BLOCK_SUBBLOCKS (block
), level
+ 1);
267 block
= BLOCK_CHAIN (block
);
271 /* Return sope resulting from combination of S1 and S2. */
273 choose_inner_scope (s1
, s2
)
280 if (BLOCK_NUMBER (s1
) > BLOCK_NUMBER (s2
))
285 /* Emit lexical block notes needed to change scope from S1 to S2. */
288 change_scope (orig_insn
, s1
, s2
)
292 rtx insn
= orig_insn
;
293 tree com
= NULL_TREE
;
294 tree ts1
= s1
, ts2
= s2
;
299 if (ts1
== NULL
|| ts2
== NULL
)
301 if (BLOCK_NUMBER (ts1
) > BLOCK_NUMBER (ts2
))
302 ts1
= BLOCK_SUPERCONTEXT (ts1
);
303 else if (BLOCK_NUMBER (ts1
) < BLOCK_NUMBER (ts2
))
304 ts2
= BLOCK_SUPERCONTEXT (ts2
);
307 ts1
= BLOCK_SUPERCONTEXT (ts1
);
308 ts2
= BLOCK_SUPERCONTEXT (ts2
);
317 rtx note
= emit_note_before (NOTE_INSN_BLOCK_END
, insn
);
318 NOTE_BLOCK (note
) = s
;
319 s
= BLOCK_SUPERCONTEXT (s
);
326 insn
= emit_note_before (NOTE_INSN_BLOCK_BEG
, insn
);
327 NOTE_BLOCK (insn
) = s
;
328 s
= BLOCK_SUPERCONTEXT (s
);
332 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
333 on the scope tree and the newly reordered instructions. */
336 scope_to_insns_finalize ()
338 tree cur_block
= DECL_INITIAL (cfun
->decl
);
342 if (!active_insn_p (insn
))
343 insn
= next_active_insn (insn
);
344 for (; insn
; insn
= next_active_insn (insn
))
348 this_block
= INSN_SCOPE (insn
);
349 /* For sequences compute scope resulting from merging all scopes
350 of instructions nested inside. */
351 if (GET_CODE (PATTERN (insn
)) == SEQUENCE
)
354 rtx body
= PATTERN (insn
);
357 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
358 this_block
= choose_inner_scope (this_block
,
359 INSN_SCOPE (XVECEXP (body
, 0, i
)));
364 if (this_block
!= cur_block
)
366 change_scope (insn
, cur_block
, this_block
);
367 cur_block
= this_block
;
371 /* change_scope emits before the insn, not after. */
372 note
= emit_note (NULL
, NOTE_INSN_DELETED
);
373 change_scope (note
, cur_block
, DECL_INITIAL (cfun
->decl
));
379 /* Given a reorder chain, rearrange the code to match. */
382 fixup_reorder_chain ()
384 basic_block bb
, prev_bb
;
388 /* First do the bulk reordering -- rechain the blocks without regard to
389 the needed changes to jumps and labels. */
391 for (bb
= ENTRY_BLOCK_PTR
->next_bb
, index
= 0;
393 bb
= RBI (bb
)->next
, index
++)
395 if (RBI (bb
)->header
)
398 NEXT_INSN (insn
) = RBI (bb
)->header
;
400 set_first_insn (RBI (bb
)->header
);
401 PREV_INSN (RBI (bb
)->header
) = insn
;
402 insn
= RBI (bb
)->header
;
403 while (NEXT_INSN (insn
))
404 insn
= NEXT_INSN (insn
);
407 NEXT_INSN (insn
) = bb
->head
;
409 set_first_insn (bb
->head
);
410 PREV_INSN (bb
->head
) = insn
;
412 if (RBI (bb
)->footer
)
414 NEXT_INSN (insn
) = RBI (bb
)->footer
;
415 PREV_INSN (RBI (bb
)->footer
) = insn
;
416 while (NEXT_INSN (insn
))
417 insn
= NEXT_INSN (insn
);
421 if (index
!= n_basic_blocks
)
424 NEXT_INSN (insn
) = function_footer
;
426 PREV_INSN (function_footer
) = insn
;
428 while (NEXT_INSN (insn
))
429 insn
= NEXT_INSN (insn
);
431 set_last_insn (insn
);
432 #ifdef ENABLE_CHECKING
433 verify_insn_chain ();
436 /* Now add jumps and labels as needed to match the blocks new
439 for (bb
= ENTRY_BLOCK_PTR
->next_bb
; bb
; bb
= RBI (bb
)->next
)
441 edge e_fall
, e_taken
, e
;
445 if (bb
->succ
== NULL
)
448 /* Find the old fallthru edge, and another non-EH edge for
450 e_taken
= e_fall
= NULL
;
451 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
452 if (e
->flags
& EDGE_FALLTHRU
)
454 else if (! (e
->flags
& EDGE_EH
))
457 bb_end_insn
= bb
->end
;
458 if (GET_CODE (bb_end_insn
) == JUMP_INSN
)
460 if (any_condjump_p (bb_end_insn
))
462 /* If the old fallthru is still next, nothing to do. */
463 if (RBI (bb
)->next
== e_fall
->dest
465 && e_fall
->dest
== EXIT_BLOCK_PTR
))
468 /* There is one special case: if *neither* block is next,
469 such as happens at the very end of a function, then we'll
470 need to add a new unconditional jump. Choose the taken
471 edge based on known or assumed probability. */
472 if (RBI (bb
)->next
!= e_taken
->dest
)
474 rtx note
= find_reg_note (bb_end_insn
, REG_BR_PROB
, 0);
477 && INTVAL (XEXP (note
, 0)) < REG_BR_PROB_BASE
/ 2
478 && invert_jump (bb_end_insn
,
479 label_for_bb (e_fall
->dest
), 0))
481 e_fall
->flags
&= ~EDGE_FALLTHRU
;
482 e_taken
->flags
|= EDGE_FALLTHRU
;
483 update_br_prob_note (bb
);
484 e
= e_fall
, e_fall
= e_taken
, e_taken
= e
;
488 /* Otherwise we can try to invert the jump. This will
489 basically never fail, however, keep up the pretense. */
490 else if (invert_jump (bb_end_insn
,
491 label_for_bb (e_fall
->dest
), 0))
493 e_fall
->flags
&= ~EDGE_FALLTHRU
;
494 e_taken
->flags
|= EDGE_FALLTHRU
;
495 update_br_prob_note (bb
);
499 else if (returnjump_p (bb_end_insn
))
503 /* Otherwise we have some switch or computed jump. In the
504 99% case, there should not have been a fallthru edge. */
508 #ifdef CASE_DROPS_THROUGH
509 /* Except for VAX. Since we didn't have predication for the
510 tablejump, the fallthru block should not have moved. */
511 if (RBI (bb
)->next
== e_fall
->dest
)
513 bb_end_insn
= skip_insns_after_block (bb
);
521 /* No fallthru implies a noreturn function with EH edges, or
522 something similarly bizarre. In any case, we don't need to
527 /* If the fallthru block is still next, nothing to do. */
528 if (RBI (bb
)->next
== e_fall
->dest
)
531 /* A fallthru to exit block. */
532 if (!RBI (bb
)->next
&& e_fall
->dest
== EXIT_BLOCK_PTR
)
536 /* We got here if we need to add a new jump insn. */
537 nb
= force_nonfallthru (e_fall
);
540 alloc_aux_for_block (nb
, sizeof (struct reorder_block_def
));
541 RBI (nb
)->visited
= 1;
542 RBI (nb
)->next
= RBI (bb
)->next
;
544 /* Don't process this new block. */
549 /* Put basic_block_info in the new order. */
553 fprintf (rtl_dump_file
, "Reordered sequence:\n");
554 for (bb
= ENTRY_BLOCK_PTR
->next_bb
, index
= 0; bb
; bb
= RBI (bb
)->next
, index
++)
556 fprintf (rtl_dump_file
, " %i ", index
);
557 if (RBI (bb
)->original
)
558 fprintf (rtl_dump_file
, "duplicate of %i ",
559 RBI (bb
)->original
->index
);
560 else if (forwarder_block_p (bb
) && GET_CODE (bb
->head
) != CODE_LABEL
)
561 fprintf (rtl_dump_file
, "compensation ");
563 fprintf (rtl_dump_file
, "bb %i ", bb
->index
);
564 fprintf (rtl_dump_file
, " [%i]\n", bb
->frequency
);
568 prev_bb
= ENTRY_BLOCK_PTR
;
569 bb
= ENTRY_BLOCK_PTR
->next_bb
;
572 for (; bb
; prev_bb
= bb
, bb
= RBI (bb
)->next
, index
++)
575 BASIC_BLOCK (index
) = bb
;
577 bb
->prev_bb
= prev_bb
;
578 prev_bb
->next_bb
= bb
;
580 prev_bb
->next_bb
= EXIT_BLOCK_PTR
;
581 EXIT_BLOCK_PTR
->prev_bb
= prev_bb
;
584 /* Perform sanity checks on the insn chain.
585 1. Check that next/prev pointers are consistent in both the forward and
587 2. Count insns in chain, going both directions, and check if equal.
588 3. Check that get_last_insn () returns the actual end of chain. */
594 int insn_cnt1
, insn_cnt2
;
596 for (prevx
= NULL
, insn_cnt1
= 1, x
= get_insns ();
598 prevx
= x
, insn_cnt1
++, x
= NEXT_INSN (x
))
599 if (PREV_INSN (x
) != prevx
)
602 if (prevx
!= get_last_insn ())
605 for (nextx
= NULL
, insn_cnt2
= 1, x
= get_last_insn ();
607 nextx
= x
, insn_cnt2
++, x
= PREV_INSN (x
))
608 if (NEXT_INSN (x
) != nextx
)
611 if (insn_cnt1
!= insn_cnt2
)
615 /* Remove any unconditional jumps and forwarder block creating fallthru
616 edges instead. During BB reordering, fallthru edges are not required
617 to target next basic block in the linear CFG layout, so the unconditional
618 jumps are not needed. If LOOPS is not null, also update loop structure &
622 cleanup_unconditional_jumps (loops
)
631 if (bb
->succ
->flags
& EDGE_FALLTHRU
)
633 if (!bb
->succ
->succ_next
)
636 if (GET_CODE (bb
->head
) != CODE_LABEL
&& forwarder_block_p (bb
)
637 && bb
->prev_bb
!= ENTRY_BLOCK_PTR
)
639 basic_block prev
= bb
->prev_bb
;
642 fprintf (rtl_dump_file
, "Removing forwarder BB %i\n",
647 /* bb cannot be loop header, as it only has one entry
648 edge. It could be a loop latch. */
649 if (bb
->loop_father
->header
== bb
)
652 if (bb
->loop_father
->latch
== bb
)
653 bb
->loop_father
->latch
= bb
->pred
->src
;
655 if (get_immediate_dominator
656 (loops
->cfg
.dom
, bb
->succ
->dest
) == bb
)
657 set_immediate_dominator
658 (loops
->cfg
.dom
, bb
->succ
->dest
, bb
->pred
->src
);
660 remove_bb_from_loops (bb
);
661 delete_from_dominance_info (loops
->cfg
.dom
, bb
);
664 redirect_edge_succ_nodup (bb
->pred
, bb
->succ
->dest
);
665 flow_delete_block (bb
);
668 else if (simplejump_p (bb
->end
))
673 fprintf (rtl_dump_file
, "Removing jump %i in BB %i\n",
674 INSN_UID (jump
), bb
->index
);
676 bb
->succ
->flags
|= EDGE_FALLTHRU
;
681 insn
= NEXT_INSN (bb
->end
);
683 && (GET_CODE (insn
) != NOTE
684 || NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BASIC_BLOCK
))
686 rtx next
= NEXT_INSN (insn
);
688 if (GET_CODE (insn
) == BARRIER
)
689 delete_barrier (insn
);
697 /* The block falling through to exit must be the last one in the
698 reordered chain. Ensure that this condition is met. */
700 fixup_fallthru_exit_predecessor ()
703 basic_block bb
= NULL
;
705 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
706 if (e
->flags
& EDGE_FALLTHRU
)
709 if (bb
&& RBI (bb
)->next
)
711 basic_block c
= ENTRY_BLOCK_PTR
->next_bb
;
713 while (RBI (c
)->next
!= bb
)
716 RBI (c
)->next
= RBI (bb
)->next
;
717 while (RBI (c
)->next
)
721 RBI (bb
)->next
= NULL
;
725 /* Return true in case it is possible to duplicate the basic block BB. */
728 cfg_layout_can_duplicate_bb_p (bb
)
734 if (bb
== EXIT_BLOCK_PTR
|| bb
== ENTRY_BLOCK_PTR
)
737 /* Duplicating fallthru block to exit would require adding a jump
738 and splitting the real last BB. */
739 for (s
= bb
->succ
; s
; s
= s
->succ_next
)
740 if (s
->dest
== EXIT_BLOCK_PTR
&& s
->flags
& EDGE_FALLTHRU
)
743 /* Do not attempt to duplicate tablejumps, as we need to unshare
744 the dispatch table. This is difficult to do, as the instructions
745 computing jump destination may be hoisted outside the basic block. */
746 if (GET_CODE (bb
->end
) == JUMP_INSN
&& JUMP_LABEL (bb
->end
)
747 && (next
= next_nonnote_insn (JUMP_LABEL (bb
->end
)))
748 && GET_CODE (next
) == JUMP_INSN
749 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
750 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
))
753 /* Do not duplicate blocks containing insns that can't be copied. */
754 if (targetm
.cannot_copy_insn_p
)
759 if (INSN_P (insn
) && (*targetm
.cannot_copy_insn_p
) (insn
))
763 insn
= NEXT_INSN (insn
);
771 duplicate_insn_chain (from
, to
)
776 /* Avoid updating of boundaries of previous basic block. The
777 note will get removed from insn stream in fixup. */
778 last
= emit_note (NULL
, NOTE_INSN_DELETED
);
780 /* Create copy at the end of INSN chain. The chain will
781 be reordered later. */
782 for (insn
= from
; insn
!= NEXT_INSN (to
); insn
= NEXT_INSN (insn
))
784 switch (GET_CODE (insn
))
789 /* Avoid copying of dispatch tables. We never duplicate
790 tablejumps, so this can hit only in case the table got
791 moved far from original jump. */
792 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
793 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
795 emit_copy_of_insn_after (insn
, get_last_insn ());
806 switch (NOTE_LINE_NUMBER (insn
))
808 /* In case prologue is empty and function contain label
809 in first BB, we may want to copy the block. */
810 case NOTE_INSN_PROLOGUE_END
:
812 case NOTE_INSN_LOOP_VTOP
:
813 case NOTE_INSN_LOOP_CONT
:
814 case NOTE_INSN_LOOP_BEG
:
815 case NOTE_INSN_LOOP_END
:
816 /* Strip down the loop notes - we don't really want to keep
817 them consistent in loop copies. */
818 case NOTE_INSN_DELETED
:
819 case NOTE_INSN_DELETED_LABEL
:
820 /* No problem to strip these. */
821 case NOTE_INSN_EPILOGUE_BEG
:
822 case NOTE_INSN_FUNCTION_END
:
823 /* Debug code expect these notes to exist just once.
824 Keep them in the master copy.
825 ??? It probably makes more sense to duplicate them for each
827 case NOTE_INSN_FUNCTION_BEG
:
828 /* There is always just single entry to function. */
829 case NOTE_INSN_BASIC_BLOCK
:
832 /* There is no purpose to duplicate prologue. */
833 case NOTE_INSN_BLOCK_BEG
:
834 case NOTE_INSN_BLOCK_END
:
835 /* The BLOCK_BEG/BLOCK_END notes should be eliminated when BB
836 reordering is in the progress. */
837 case NOTE_INSN_EH_REGION_BEG
:
838 case NOTE_INSN_EH_REGION_END
:
839 /* Should never exist at BB duplication time. */
842 case NOTE_INSN_REPEATED_LINE_NUMBER
:
843 emit_note (NOTE_SOURCE_FILE (insn
), NOTE_LINE_NUMBER (insn
));
847 if (NOTE_LINE_NUMBER (insn
) < 0)
849 /* It is possible that no_line_number is set and the note
851 emit_note (NOTE_SOURCE_FILE (insn
), NOTE_LINE_NUMBER (insn
));
858 insn
= NEXT_INSN (last
);
863 /* Redirect Edge to DEST. */
865 cfg_layout_redirect_edge (e
, dest
)
869 basic_block src
= e
->src
;
870 basic_block old_next_bb
= src
->next_bb
;
873 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
874 in the case the basic block appears to be in sequence. Avoid this
878 if (e
->flags
& EDGE_FALLTHRU
)
880 /* In case we are redirecting fallthru edge to the branch edge
881 of conditional jump, remove it. */
882 if (src
->succ
->succ_next
883 && !src
->succ
->succ_next
->succ_next
)
885 edge s
= e
->succ_next
? e
->succ_next
: src
->succ
;
887 && any_condjump_p (src
->end
)
888 && onlyjump_p (src
->end
))
889 delete_insn (src
->end
);
891 redirect_edge_succ_nodup (e
, dest
);
896 ret
= redirect_edge_and_branch (e
, dest
);
898 /* We don't want simplejumps in the insn stream during cfglayout. */
899 if (simplejump_p (src
->end
))
901 delete_insn (src
->end
);
902 delete_barrier (NEXT_INSN (src
->end
));
903 src
->succ
->flags
|= EDGE_FALLTHRU
;
905 src
->next_bb
= old_next_bb
;
910 /* Same as split_block but update cfg_layout structures. */
912 cfg_layout_split_block (bb
, insn
)
916 edge fallthru
= split_block (bb
, insn
);
918 alloc_aux_for_block (fallthru
->dest
, sizeof (struct reorder_block_def
));
919 RBI (fallthru
->dest
)->footer
= RBI (fallthru
->src
)->footer
;
920 RBI (fallthru
->src
)->footer
= NULL
;
924 /* Create a duplicate of the basic block BB and redirect edge E into it. */
927 cfg_layout_duplicate_bb (bb
, e
)
934 gcov_type new_count
= e
? e
->count
: 0;
936 if (bb
->count
< new_count
)
937 new_count
= bb
->count
;
940 #ifdef ENABLE_CHECKING
941 if (!cfg_layout_can_duplicate_bb_p (bb
))
945 insn
= duplicate_insn_chain (bb
->head
, bb
->end
);
946 new_bb
= create_basic_block (insn
,
947 insn
? get_last_insn () : NULL
,
948 EXIT_BLOCK_PTR
->prev_bb
);
949 alloc_aux_for_block (new_bb
, sizeof (struct reorder_block_def
));
951 if (RBI (bb
)->header
)
953 insn
= RBI (bb
)->header
;
954 while (NEXT_INSN (insn
))
955 insn
= NEXT_INSN (insn
);
956 insn
= duplicate_insn_chain (RBI (bb
)->header
, insn
);
958 RBI (new_bb
)->header
= unlink_insn_chain (insn
, get_last_insn ());
961 if (RBI (bb
)->footer
)
963 insn
= RBI (bb
)->footer
;
964 while (NEXT_INSN (insn
))
965 insn
= NEXT_INSN (insn
);
966 insn
= duplicate_insn_chain (RBI (bb
)->footer
, insn
);
968 RBI (new_bb
)->footer
= unlink_insn_chain (insn
, get_last_insn ());
971 if (bb
->global_live_at_start
)
973 new_bb
->global_live_at_start
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
974 new_bb
->global_live_at_end
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
975 COPY_REG_SET (new_bb
->global_live_at_start
, bb
->global_live_at_start
);
976 COPY_REG_SET (new_bb
->global_live_at_end
, bb
->global_live_at_end
);
979 new_bb
->loop_depth
= bb
->loop_depth
;
980 new_bb
->flags
= bb
->flags
;
981 for (s
= bb
->succ
; s
; s
= s
->succ_next
)
983 n
= make_edge (new_bb
, s
->dest
, s
->flags
);
984 n
->probability
= s
->probability
;
986 /* Take care for overflows! */
987 n
->count
= s
->count
* (new_count
* 10000 / bb
->count
) / 10000;
990 s
->count
-= n
->count
;
993 new_bb
->count
= new_count
;
994 bb
->count
-= new_count
;
998 new_bb
->frequency
= EDGE_FREQUENCY (e
);
999 bb
->frequency
-= EDGE_FREQUENCY (e
);
1001 cfg_layout_redirect_edge (e
, new_bb
);
1006 if (bb
->frequency
< 0)
1009 RBI (new_bb
)->original
= bb
;
1010 RBI (bb
)->copy
= new_bb
;
1014 /* Main entry point to this module - initialize the datastructures for
1015 CFG layout changes. It keeps LOOPS up-to-date if not null. */
1018 cfg_layout_initialize (loops
)
1019 struct loops
*loops
;
1021 /* Our algorithm depends on fact that there are now dead jumptables
1023 alloc_aux_for_blocks (sizeof (struct reorder_block_def
));
1025 cleanup_unconditional_jumps (loops
);
1027 record_effective_endpoints ();
1030 /* Splits superblocks. */
1032 break_superblocks ()
1034 sbitmap superblocks
;
1037 superblocks
= sbitmap_alloc (n_basic_blocks
);
1038 sbitmap_zero (superblocks
);
1042 for (i
= 0; i
< n_basic_blocks
; i
++)
1043 if (BASIC_BLOCK(i
)->flags
& BB_SUPERBLOCK
)
1045 BASIC_BLOCK(i
)->flags
&= ~BB_SUPERBLOCK
;
1046 SET_BIT (superblocks
, i
);
1052 rebuild_jump_labels (get_insns ());
1053 find_many_sub_basic_blocks (superblocks
);
1059 /* Finalize the changes: reorder insn list according to the sequence, enter
1060 compensation code, rebuild scope forest. */
1063 cfg_layout_finalize ()
1065 fixup_fallthru_exit_predecessor ();
1066 fixup_reorder_chain ();
1068 #ifdef ENABLE_CHECKING
1069 verify_insn_chain ();
1072 free_aux_for_blocks ();
1074 break_superblocks ();
1076 #ifdef ENABLE_CHECKING
1077 verify_flow_info ();