1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "insn-config.h"
33 #include "cfglayout.h"
37 /* The contents of the current function definition are allocated
38 in this obstack, and all are freed at the end of the function. */
39 extern struct obstack flow_obstack
;
41 /* Holds the interesting trailing notes for the function. */
42 static rtx function_footer
;
44 static rtx skip_insns_after_block
PARAMS ((basic_block
));
45 static void record_effective_endpoints
PARAMS ((void));
46 static rtx label_for_bb
PARAMS ((basic_block
));
47 static void fixup_reorder_chain
PARAMS ((void));
49 static void set_block_levels
PARAMS ((tree
, int));
50 static void change_scope
PARAMS ((rtx
, tree
, tree
));
52 void verify_insn_chain
PARAMS ((void));
53 static void cleanup_unconditional_jumps
PARAMS ((struct loops
*));
54 static void fixup_fallthru_exit_predecessor
PARAMS ((void));
55 static rtx unlink_insn_chain
PARAMS ((rtx
, rtx
));
56 static rtx duplicate_insn_chain
PARAMS ((rtx
, rtx
));
57 static void break_superblocks
PARAMS ((void));
60 unlink_insn_chain (first
, last
)
64 rtx prevfirst
= PREV_INSN (first
);
65 rtx nextlast
= NEXT_INSN (last
);
67 PREV_INSN (first
) = NULL
;
68 NEXT_INSN (last
) = NULL
;
70 NEXT_INSN (prevfirst
) = nextlast
;
72 PREV_INSN (nextlast
) = prevfirst
;
74 set_last_insn (prevfirst
);
76 set_first_insn (nextlast
);
80 /* Skip over inter-block insns occurring after BB which are typically
81 associated with BB (e.g., barriers). If there are any such insns,
82 we return the last one. Otherwise, we return the end of BB. */
85 skip_insns_after_block (bb
)
88 rtx insn
, last_insn
, next_head
, prev
;
91 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
92 next_head
= bb
->next_bb
->head
;
94 for (last_insn
= insn
= bb
->end
; (insn
= NEXT_INSN (insn
)) != 0; )
96 if (insn
== next_head
)
99 switch (GET_CODE (insn
))
106 switch (NOTE_LINE_NUMBER (insn
))
108 case NOTE_INSN_LOOP_END
:
109 case NOTE_INSN_BLOCK_END
:
112 case NOTE_INSN_DELETED
:
113 case NOTE_INSN_DELETED_LABEL
:
124 && GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
125 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
126 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
128 insn
= NEXT_INSN (insn
);
141 /* It is possible to hit contradictory sequence. For instance:
147 Where barrier belongs to jump_insn, but the note does not. This can be
148 created by removing the basic block originally following
149 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
151 for (insn
= last_insn
; insn
!= bb
->end
; insn
= prev
)
153 prev
= PREV_INSN (insn
);
154 if (GET_CODE (insn
) == NOTE
)
155 switch (NOTE_LINE_NUMBER (insn
))
157 case NOTE_INSN_LOOP_END
:
158 case NOTE_INSN_BLOCK_END
:
159 case NOTE_INSN_DELETED
:
160 case NOTE_INSN_DELETED_LABEL
:
163 reorder_insns (insn
, insn
, last_insn
);
170 /* Locate or create a label for a given basic block. */
176 rtx label
= bb
->head
;
178 if (GET_CODE (label
) != CODE_LABEL
)
181 fprintf (rtl_dump_file
, "Emitting label for block %d\n", bb
->index
);
183 label
= block_label (bb
);
189 /* Locate the effective beginning and end of the insn chain for each
190 block, as defined by skip_insns_after_block above. */
193 record_effective_endpoints ()
195 rtx next_insn
= get_insns ();
202 if (PREV_INSN (bb
->head
) && next_insn
!= bb
->head
)
203 RBI (bb
)->header
= unlink_insn_chain (next_insn
,
204 PREV_INSN (bb
->head
));
205 end
= skip_insns_after_block (bb
);
206 if (NEXT_INSN (bb
->end
) && bb
->end
!= end
)
207 RBI (bb
)->footer
= unlink_insn_chain (NEXT_INSN (bb
->end
), end
);
208 next_insn
= NEXT_INSN (bb
->end
);
211 function_footer
= next_insn
;
213 function_footer
= unlink_insn_chain (function_footer
, get_last_insn ());
216 /* Build a varray mapping INSN_UID to lexical block. Return it. */
219 scope_to_insns_initialize ()
224 for (insn
= get_insns (); insn
; insn
= next
)
226 next
= NEXT_INSN (insn
);
228 if (active_insn_p (insn
)
229 && GET_CODE (PATTERN (insn
)) != ADDR_VEC
230 && GET_CODE (PATTERN (insn
)) != ADDR_DIFF_VEC
)
231 INSN_SCOPE (insn
) = block
;
232 else if (GET_CODE (insn
) == NOTE
)
234 switch (NOTE_LINE_NUMBER (insn
))
236 case NOTE_INSN_BLOCK_BEG
:
237 block
= NOTE_BLOCK (insn
);
240 case NOTE_INSN_BLOCK_END
:
241 block
= BLOCK_SUPERCONTEXT (block
);
242 if (block
&& TREE_CODE (block
) == FUNCTION_DECL
)
252 /* Tag the blocks with a depth number so that change_scope can find
253 the common parent easily. */
254 set_block_levels (DECL_INITIAL (cfun
->decl
), 0);
257 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
258 found in the block tree. */
261 set_block_levels (block
, level
)
267 BLOCK_NUMBER (block
) = level
;
268 set_block_levels (BLOCK_SUBBLOCKS (block
), level
+ 1);
269 block
= BLOCK_CHAIN (block
);
273 /* Return sope resulting from combination of S1 and S2. */
275 choose_inner_scope (s1
, s2
)
282 if (BLOCK_NUMBER (s1
) > BLOCK_NUMBER (s2
))
287 /* Emit lexical block notes needed to change scope from S1 to S2. */
290 change_scope (orig_insn
, s1
, s2
)
294 rtx insn
= orig_insn
;
295 tree com
= NULL_TREE
;
296 tree ts1
= s1
, ts2
= s2
;
301 if (ts1
== NULL
|| ts2
== NULL
)
303 if (BLOCK_NUMBER (ts1
) > BLOCK_NUMBER (ts2
))
304 ts1
= BLOCK_SUPERCONTEXT (ts1
);
305 else if (BLOCK_NUMBER (ts1
) < BLOCK_NUMBER (ts2
))
306 ts2
= BLOCK_SUPERCONTEXT (ts2
);
309 ts1
= BLOCK_SUPERCONTEXT (ts1
);
310 ts2
= BLOCK_SUPERCONTEXT (ts2
);
319 rtx note
= emit_note_before (NOTE_INSN_BLOCK_END
, insn
);
320 NOTE_BLOCK (note
) = s
;
321 s
= BLOCK_SUPERCONTEXT (s
);
328 insn
= emit_note_before (NOTE_INSN_BLOCK_BEG
, insn
);
329 NOTE_BLOCK (insn
) = s
;
330 s
= BLOCK_SUPERCONTEXT (s
);
334 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
335 on the scope tree and the newly reordered instructions. */
338 scope_to_insns_finalize ()
340 tree cur_block
= DECL_INITIAL (cfun
->decl
);
344 if (!active_insn_p (insn
))
345 insn
= next_active_insn (insn
);
346 for (; insn
; insn
= next_active_insn (insn
))
350 this_block
= INSN_SCOPE (insn
);
351 /* For sequences compute scope resulting from merging all scopes
352 of instructions nested inside. */
353 if (GET_CODE (PATTERN (insn
)) == SEQUENCE
)
356 rtx body
= PATTERN (insn
);
359 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
360 this_block
= choose_inner_scope (this_block
,
361 INSN_SCOPE (XVECEXP (body
, 0, i
)));
366 if (this_block
!= cur_block
)
368 change_scope (insn
, cur_block
, this_block
);
369 cur_block
= this_block
;
373 /* change_scope emits before the insn, not after. */
374 note
= emit_note (NULL
, NOTE_INSN_DELETED
);
375 change_scope (note
, cur_block
, DECL_INITIAL (cfun
->decl
));
381 /* Given a reorder chain, rearrange the code to match. */
384 fixup_reorder_chain ()
386 basic_block bb
, prev_bb
;
390 /* First do the bulk reordering -- rechain the blocks without regard to
391 the needed changes to jumps and labels. */
393 for (bb
= ENTRY_BLOCK_PTR
->next_bb
, index
= 0;
395 bb
= RBI (bb
)->next
, index
++)
397 if (RBI (bb
)->header
)
400 NEXT_INSN (insn
) = RBI (bb
)->header
;
402 set_first_insn (RBI (bb
)->header
);
403 PREV_INSN (RBI (bb
)->header
) = insn
;
404 insn
= RBI (bb
)->header
;
405 while (NEXT_INSN (insn
))
406 insn
= NEXT_INSN (insn
);
409 NEXT_INSN (insn
) = bb
->head
;
411 set_first_insn (bb
->head
);
412 PREV_INSN (bb
->head
) = insn
;
414 if (RBI (bb
)->footer
)
416 NEXT_INSN (insn
) = RBI (bb
)->footer
;
417 PREV_INSN (RBI (bb
)->footer
) = insn
;
418 while (NEXT_INSN (insn
))
419 insn
= NEXT_INSN (insn
);
423 if (index
!= n_basic_blocks
)
426 NEXT_INSN (insn
) = function_footer
;
428 PREV_INSN (function_footer
) = insn
;
430 while (NEXT_INSN (insn
))
431 insn
= NEXT_INSN (insn
);
433 set_last_insn (insn
);
434 #ifdef ENABLE_CHECKING
435 verify_insn_chain ();
438 /* Now add jumps and labels as needed to match the blocks new
441 for (bb
= ENTRY_BLOCK_PTR
->next_bb
; bb
; bb
= RBI (bb
)->next
)
443 edge e_fall
, e_taken
, e
;
447 if (bb
->succ
== NULL
)
450 /* Find the old fallthru edge, and another non-EH edge for
452 e_taken
= e_fall
= NULL
;
453 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
454 if (e
->flags
& EDGE_FALLTHRU
)
456 else if (! (e
->flags
& EDGE_EH
))
459 bb_end_insn
= bb
->end
;
460 if (GET_CODE (bb_end_insn
) == JUMP_INSN
)
462 if (any_condjump_p (bb_end_insn
))
464 /* If the old fallthru is still next, nothing to do. */
465 if (RBI (bb
)->next
== e_fall
->dest
467 && e_fall
->dest
== EXIT_BLOCK_PTR
))
470 /* The degenerated case of conditional jump jumping to the next
471 instruction can happen on target having jumps with side
474 Create temporarily the duplicated edge representing branch.
475 It will get unidentified by force_nonfallthru_and_redirect
476 that would otherwise get confused by fallthru edge not pointing
477 to the next basic block. */
483 e_fake
= unchecked_make_edge (bb
, e_fall
->dest
, 0);
485 if (!redirect_jump (bb
->end
, block_label (bb
), 0))
487 note
= find_reg_note (bb
->end
, REG_BR_PROB
, NULL_RTX
);
490 int prob
= INTVAL (XEXP (note
, 0));
492 e_fake
->probability
= prob
;
493 e_fake
->count
= e_fall
->count
* prob
/ REG_BR_PROB_BASE
;
494 e_fall
->probability
-= e_fall
->probability
;
495 e_fall
->count
-= e_fake
->count
;
496 if (e_fall
->probability
< 0)
497 e_fall
->probability
= 0;
498 if (e_fall
->count
< 0)
502 /* There is one special case: if *neither* block is next,
503 such as happens at the very end of a function, then we'll
504 need to add a new unconditional jump. Choose the taken
505 edge based on known or assumed probability. */
506 else if (RBI (bb
)->next
!= e_taken
->dest
)
508 rtx note
= find_reg_note (bb_end_insn
, REG_BR_PROB
, 0);
511 && INTVAL (XEXP (note
, 0)) < REG_BR_PROB_BASE
/ 2
512 && invert_jump (bb_end_insn
,
513 label_for_bb (e_fall
->dest
), 0))
515 e_fall
->flags
&= ~EDGE_FALLTHRU
;
516 e_taken
->flags
|= EDGE_FALLTHRU
;
517 update_br_prob_note (bb
);
518 e
= e_fall
, e_fall
= e_taken
, e_taken
= e
;
522 /* Otherwise we can try to invert the jump. This will
523 basically never fail, however, keep up the pretense. */
524 else if (invert_jump (bb_end_insn
,
525 label_for_bb (e_fall
->dest
), 0))
527 e_fall
->flags
&= ~EDGE_FALLTHRU
;
528 e_taken
->flags
|= EDGE_FALLTHRU
;
529 update_br_prob_note (bb
);
533 else if (returnjump_p (bb_end_insn
))
537 /* Otherwise we have some switch or computed jump. In the
538 99% case, there should not have been a fallthru edge. */
542 #ifdef CASE_DROPS_THROUGH
543 /* Except for VAX. Since we didn't have predication for the
544 tablejump, the fallthru block should not have moved. */
545 if (RBI (bb
)->next
== e_fall
->dest
)
547 bb_end_insn
= skip_insns_after_block (bb
);
555 /* No fallthru implies a noreturn function with EH edges, or
556 something similarly bizarre. In any case, we don't need to
561 /* If the fallthru block is still next, nothing to do. */
562 if (RBI (bb
)->next
== e_fall
->dest
)
565 /* A fallthru to exit block. */
566 if (!RBI (bb
)->next
&& e_fall
->dest
== EXIT_BLOCK_PTR
)
570 /* We got here if we need to add a new jump insn. */
571 nb
= force_nonfallthru (e_fall
);
574 alloc_aux_for_block (nb
, sizeof (struct reorder_block_def
));
575 RBI (nb
)->visited
= 1;
576 RBI (nb
)->next
= RBI (bb
)->next
;
578 /* Don't process this new block. */
583 /* Put basic_block_info in the new order. */
587 fprintf (rtl_dump_file
, "Reordered sequence:\n");
588 for (bb
= ENTRY_BLOCK_PTR
->next_bb
, index
= 0; bb
; bb
= RBI (bb
)->next
, index
++)
590 fprintf (rtl_dump_file
, " %i ", index
);
591 if (RBI (bb
)->original
)
592 fprintf (rtl_dump_file
, "duplicate of %i ",
593 RBI (bb
)->original
->index
);
594 else if (forwarder_block_p (bb
) && GET_CODE (bb
->head
) != CODE_LABEL
)
595 fprintf (rtl_dump_file
, "compensation ");
597 fprintf (rtl_dump_file
, "bb %i ", bb
->index
);
598 fprintf (rtl_dump_file
, " [%i]\n", bb
->frequency
);
602 prev_bb
= ENTRY_BLOCK_PTR
;
603 bb
= ENTRY_BLOCK_PTR
->next_bb
;
606 for (; bb
; prev_bb
= bb
, bb
= RBI (bb
)->next
, index
++)
609 BASIC_BLOCK (index
) = bb
;
611 bb
->prev_bb
= prev_bb
;
612 prev_bb
->next_bb
= bb
;
614 prev_bb
->next_bb
= EXIT_BLOCK_PTR
;
615 EXIT_BLOCK_PTR
->prev_bb
= prev_bb
;
618 /* Perform sanity checks on the insn chain.
619 1. Check that next/prev pointers are consistent in both the forward and
621 2. Count insns in chain, going both directions, and check if equal.
622 3. Check that get_last_insn () returns the actual end of chain. */
628 int insn_cnt1
, insn_cnt2
;
630 for (prevx
= NULL
, insn_cnt1
= 1, x
= get_insns ();
632 prevx
= x
, insn_cnt1
++, x
= NEXT_INSN (x
))
633 if (PREV_INSN (x
) != prevx
)
636 if (prevx
!= get_last_insn ())
639 for (nextx
= NULL
, insn_cnt2
= 1, x
= get_last_insn ();
641 nextx
= x
, insn_cnt2
++, x
= PREV_INSN (x
))
642 if (NEXT_INSN (x
) != nextx
)
645 if (insn_cnt1
!= insn_cnt2
)
649 /* Remove any unconditional jumps and forwarder block creating fallthru
650 edges instead. During BB reordering, fallthru edges are not required
651 to target next basic block in the linear CFG layout, so the unconditional
652 jumps are not needed. If LOOPS is not null, also update loop structure &
656 cleanup_unconditional_jumps (loops
)
665 if (bb
->succ
->flags
& EDGE_FALLTHRU
)
667 if (!bb
->succ
->succ_next
)
670 if (GET_CODE (bb
->head
) != CODE_LABEL
&& forwarder_block_p (bb
)
671 && bb
->prev_bb
!= ENTRY_BLOCK_PTR
)
673 basic_block prev
= bb
->prev_bb
;
676 fprintf (rtl_dump_file
, "Removing forwarder BB %i\n",
681 /* bb cannot be loop header, as it only has one entry
682 edge. It could be a loop latch. */
683 if (bb
->loop_father
->header
== bb
)
686 if (bb
->loop_father
->latch
== bb
)
687 bb
->loop_father
->latch
= bb
->pred
->src
;
689 if (get_immediate_dominator
690 (loops
->cfg
.dom
, bb
->succ
->dest
) == bb
)
691 set_immediate_dominator
692 (loops
->cfg
.dom
, bb
->succ
->dest
, bb
->pred
->src
);
694 remove_bb_from_loops (bb
);
695 delete_from_dominance_info (loops
->cfg
.dom
, bb
);
698 redirect_edge_succ_nodup (bb
->pred
, bb
->succ
->dest
);
699 flow_delete_block (bb
);
702 else if (simplejump_p (bb
->end
))
707 fprintf (rtl_dump_file
, "Removing jump %i in BB %i\n",
708 INSN_UID (jump
), bb
->index
);
710 bb
->succ
->flags
|= EDGE_FALLTHRU
;
715 insn
= NEXT_INSN (bb
->end
);
717 && (GET_CODE (insn
) != NOTE
718 || NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BASIC_BLOCK
))
720 rtx next
= NEXT_INSN (insn
);
722 if (GET_CODE (insn
) == BARRIER
)
723 delete_barrier (insn
);
731 /* The block falling through to exit must be the last one in the
732 reordered chain. Ensure that this condition is met. */
734 fixup_fallthru_exit_predecessor ()
737 basic_block bb
= NULL
;
739 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
740 if (e
->flags
& EDGE_FALLTHRU
)
743 if (bb
&& RBI (bb
)->next
)
745 basic_block c
= ENTRY_BLOCK_PTR
->next_bb
;
747 while (RBI (c
)->next
!= bb
)
750 RBI (c
)->next
= RBI (bb
)->next
;
751 while (RBI (c
)->next
)
755 RBI (bb
)->next
= NULL
;
759 /* Return true in case it is possible to duplicate the basic block BB. */
762 cfg_layout_can_duplicate_bb_p (bb
)
767 if (bb
== EXIT_BLOCK_PTR
|| bb
== ENTRY_BLOCK_PTR
)
770 /* Duplicating fallthru block to exit would require adding a jump
771 and splitting the real last BB. */
772 for (s
= bb
->succ
; s
; s
= s
->succ_next
)
773 if (s
->dest
== EXIT_BLOCK_PTR
&& s
->flags
& EDGE_FALLTHRU
)
776 /* Do not attempt to duplicate tablejumps, as we need to unshare
777 the dispatch table. This is difficult to do, as the instructions
778 computing jump destination may be hoisted outside the basic block. */
779 if (tablejump_p (bb
->end
, NULL
, NULL
))
782 /* Do not duplicate blocks containing insns that can't be copied. */
783 if (targetm
.cannot_copy_insn_p
)
788 if (INSN_P (insn
) && (*targetm
.cannot_copy_insn_p
) (insn
))
792 insn
= NEXT_INSN (insn
);
800 duplicate_insn_chain (from
, to
)
805 /* Avoid updating of boundaries of previous basic block. The
806 note will get removed from insn stream in fixup. */
807 last
= emit_note (NULL
, NOTE_INSN_DELETED
);
809 /* Create copy at the end of INSN chain. The chain will
810 be reordered later. */
811 for (insn
= from
; insn
!= NEXT_INSN (to
); insn
= NEXT_INSN (insn
))
813 switch (GET_CODE (insn
))
818 /* Avoid copying of dispatch tables. We never duplicate
819 tablejumps, so this can hit only in case the table got
820 moved far from original jump. */
821 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
822 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
824 emit_copy_of_insn_after (insn
, get_last_insn ());
835 switch (NOTE_LINE_NUMBER (insn
))
837 /* In case prologue is empty and function contain label
838 in first BB, we may want to copy the block. */
839 case NOTE_INSN_PROLOGUE_END
:
841 case NOTE_INSN_LOOP_VTOP
:
842 case NOTE_INSN_LOOP_CONT
:
843 case NOTE_INSN_LOOP_BEG
:
844 case NOTE_INSN_LOOP_END
:
845 /* Strip down the loop notes - we don't really want to keep
846 them consistent in loop copies. */
847 case NOTE_INSN_DELETED
:
848 case NOTE_INSN_DELETED_LABEL
:
849 /* No problem to strip these. */
850 case NOTE_INSN_EPILOGUE_BEG
:
851 case NOTE_INSN_FUNCTION_END
:
852 /* Debug code expect these notes to exist just once.
853 Keep them in the master copy.
854 ??? It probably makes more sense to duplicate them for each
856 case NOTE_INSN_FUNCTION_BEG
:
857 /* There is always just single entry to function. */
858 case NOTE_INSN_BASIC_BLOCK
:
861 /* There is no purpose to duplicate prologue. */
862 case NOTE_INSN_BLOCK_BEG
:
863 case NOTE_INSN_BLOCK_END
:
864 /* The BLOCK_BEG/BLOCK_END notes should be eliminated when BB
865 reordering is in the progress. */
866 case NOTE_INSN_EH_REGION_BEG
:
867 case NOTE_INSN_EH_REGION_END
:
868 /* Should never exist at BB duplication time. */
871 case NOTE_INSN_REPEATED_LINE_NUMBER
:
872 emit_note (NOTE_SOURCE_FILE (insn
), NOTE_LINE_NUMBER (insn
));
876 if (NOTE_LINE_NUMBER (insn
) < 0)
878 /* It is possible that no_line_number is set and the note
880 emit_note (NOTE_SOURCE_FILE (insn
), NOTE_LINE_NUMBER (insn
));
887 insn
= NEXT_INSN (last
);
892 /* Redirect Edge to DEST. */
894 cfg_layout_redirect_edge (e
, dest
)
898 basic_block src
= e
->src
;
899 basic_block old_next_bb
= src
->next_bb
;
902 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
903 in the case the basic block appears to be in sequence. Avoid this
907 if (e
->flags
& EDGE_FALLTHRU
)
909 /* Redirect any branch edges unified with the fallthru one. */
910 if (GET_CODE (src
->end
) == JUMP_INSN
911 && JUMP_LABEL (src
->end
) == e
->dest
->head
)
913 if (!redirect_jump (src
->end
, block_label (dest
), 0))
916 /* In case we are redirecting fallthru edge to the branch edge
917 of conditional jump, remove it. */
918 if (src
->succ
->succ_next
919 && !src
->succ
->succ_next
->succ_next
)
921 edge s
= e
->succ_next
? e
->succ_next
: src
->succ
;
923 && any_condjump_p (src
->end
)
924 && onlyjump_p (src
->end
))
925 delete_insn (src
->end
);
927 redirect_edge_succ_nodup (e
, dest
);
932 ret
= redirect_edge_and_branch (e
, dest
);
934 /* We don't want simplejumps in the insn stream during cfglayout. */
935 if (simplejump_p (src
->end
))
937 delete_insn (src
->end
);
938 delete_barrier (NEXT_INSN (src
->end
));
939 src
->succ
->flags
|= EDGE_FALLTHRU
;
941 src
->next_bb
= old_next_bb
;
946 /* Same as split_block but update cfg_layout structures. */
948 cfg_layout_split_block (bb
, insn
)
952 edge fallthru
= split_block (bb
, insn
);
954 alloc_aux_for_block (fallthru
->dest
, sizeof (struct reorder_block_def
));
955 RBI (fallthru
->dest
)->footer
= RBI (fallthru
->src
)->footer
;
956 RBI (fallthru
->src
)->footer
= NULL
;
960 /* Create a duplicate of the basic block BB and redirect edge E into it. */
963 cfg_layout_duplicate_bb (bb
, e
)
970 gcov_type new_count
= e
? e
->count
: 0;
972 if (bb
->count
< new_count
)
973 new_count
= bb
->count
;
976 #ifdef ENABLE_CHECKING
977 if (!cfg_layout_can_duplicate_bb_p (bb
))
981 insn
= duplicate_insn_chain (bb
->head
, bb
->end
);
982 new_bb
= create_basic_block (insn
,
983 insn
? get_last_insn () : NULL
,
984 EXIT_BLOCK_PTR
->prev_bb
);
985 alloc_aux_for_block (new_bb
, sizeof (struct reorder_block_def
));
987 if (RBI (bb
)->header
)
989 insn
= RBI (bb
)->header
;
990 while (NEXT_INSN (insn
))
991 insn
= NEXT_INSN (insn
);
992 insn
= duplicate_insn_chain (RBI (bb
)->header
, insn
);
994 RBI (new_bb
)->header
= unlink_insn_chain (insn
, get_last_insn ());
997 if (RBI (bb
)->footer
)
999 insn
= RBI (bb
)->footer
;
1000 while (NEXT_INSN (insn
))
1001 insn
= NEXT_INSN (insn
);
1002 insn
= duplicate_insn_chain (RBI (bb
)->footer
, insn
);
1004 RBI (new_bb
)->footer
= unlink_insn_chain (insn
, get_last_insn ());
1007 if (bb
->global_live_at_start
)
1009 new_bb
->global_live_at_start
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
1010 new_bb
->global_live_at_end
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
1011 COPY_REG_SET (new_bb
->global_live_at_start
, bb
->global_live_at_start
);
1012 COPY_REG_SET (new_bb
->global_live_at_end
, bb
->global_live_at_end
);
1015 new_bb
->loop_depth
= bb
->loop_depth
;
1016 new_bb
->flags
= bb
->flags
;
1017 for (s
= bb
->succ
; s
; s
= s
->succ_next
)
1019 /* Since we are creating edges from a new block to successors
1020 of another block (which therefore are known to be disjoint), there
1021 is no need to actually check for duplicated edges. */
1022 n
= unchecked_make_edge (new_bb
, s
->dest
, s
->flags
);
1023 n
->probability
= s
->probability
;
1025 /* Take care for overflows! */
1026 n
->count
= s
->count
* (new_count
* 10000 / bb
->count
) / 10000;
1029 s
->count
-= n
->count
;
1032 new_bb
->count
= new_count
;
1033 bb
->count
-= new_count
;
1037 new_bb
->frequency
= EDGE_FREQUENCY (e
);
1038 bb
->frequency
-= EDGE_FREQUENCY (e
);
1040 cfg_layout_redirect_edge (e
, new_bb
);
1045 if (bb
->frequency
< 0)
1048 RBI (new_bb
)->original
= bb
;
1049 RBI (bb
)->copy
= new_bb
;
1053 /* Main entry point to this module - initialize the datastructures for
1054 CFG layout changes. It keeps LOOPS up-to-date if not null. */
1057 cfg_layout_initialize (loops
)
1058 struct loops
*loops
;
1060 /* Our algorithm depends on fact that there are now dead jumptables
1062 alloc_aux_for_blocks (sizeof (struct reorder_block_def
));
1064 cleanup_unconditional_jumps (loops
);
1066 record_effective_endpoints ();
1069 /* Splits superblocks. */
1071 break_superblocks ()
1073 sbitmap superblocks
;
1076 superblocks
= sbitmap_alloc (n_basic_blocks
);
1077 sbitmap_zero (superblocks
);
1081 for (i
= 0; i
< n_basic_blocks
; i
++)
1082 if (BASIC_BLOCK(i
)->flags
& BB_SUPERBLOCK
)
1084 BASIC_BLOCK(i
)->flags
&= ~BB_SUPERBLOCK
;
1085 SET_BIT (superblocks
, i
);
1091 rebuild_jump_labels (get_insns ());
1092 find_many_sub_basic_blocks (superblocks
);
1098 /* Finalize the changes: reorder insn list according to the sequence, enter
1099 compensation code, rebuild scope forest. */
1102 cfg_layout_finalize ()
1104 fixup_fallthru_exit_predecessor ();
1105 fixup_reorder_chain ();
1107 #ifdef ENABLE_CHECKING
1108 verify_insn_chain ();
1111 free_aux_for_blocks ();
1113 break_superblocks ();
1115 #ifdef ENABLE_CHECKING
1116 verify_flow_info ();