1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "insn-config.h"
33 #include "cfglayout.h"
37 #include "alloc-pool.h"
39 /* The contents of the current function definition are allocated
40 in this obstack, and all are freed at the end of the function. */
41 extern struct obstack flow_obstack
;
43 alloc_pool cfg_layout_pool
;
45 /* Holds the interesting trailing notes for the function. */
46 rtx cfg_layout_function_footer
, cfg_layout_function_header
;
48 static rtx
skip_insns_after_block (basic_block
);
49 static void record_effective_endpoints (void);
50 static rtx
label_for_bb (basic_block
);
51 static void fixup_reorder_chain (void);
53 static void set_block_levels (tree
, int);
54 static void change_scope (rtx
, tree
, tree
);
56 void verify_insn_chain (void);
57 static void fixup_fallthru_exit_predecessor (void);
58 static rtx
duplicate_insn_chain (rtx
, rtx
);
59 static void break_superblocks (void);
60 static tree
insn_scope (rtx
);
63 unlink_insn_chain (rtx first
, rtx last
)
65 rtx prevfirst
= PREV_INSN (first
);
66 rtx nextlast
= NEXT_INSN (last
);
68 PREV_INSN (first
) = NULL
;
69 NEXT_INSN (last
) = NULL
;
71 NEXT_INSN (prevfirst
) = nextlast
;
73 PREV_INSN (nextlast
) = prevfirst
;
75 set_last_insn (prevfirst
);
77 set_first_insn (nextlast
);
81 /* Skip over inter-block insns occurring after BB which are typically
82 associated with BB (e.g., barriers). If there are any such insns,
83 we return the last one. Otherwise, we return the end of BB. */
86 skip_insns_after_block (basic_block bb
)
88 rtx insn
, last_insn
, next_head
, prev
;
91 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
92 next_head
= BB_HEAD (bb
->next_bb
);
94 for (last_insn
= insn
= BB_END (bb
); (insn
= NEXT_INSN (insn
)) != 0; )
96 if (insn
== next_head
)
99 switch (GET_CODE (insn
))
106 switch (NOTE_LINE_NUMBER (insn
))
108 case NOTE_INSN_LOOP_END
:
109 case NOTE_INSN_BLOCK_END
:
112 case NOTE_INSN_DELETED
:
113 case NOTE_INSN_DELETED_LABEL
:
124 && GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
125 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
126 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
128 insn
= NEXT_INSN (insn
);
141 /* It is possible to hit contradictory sequence. For instance:
147 Where barrier belongs to jump_insn, but the note does not. This can be
148 created by removing the basic block originally following
149 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
151 for (insn
= last_insn
; insn
!= BB_END (bb
); insn
= prev
)
153 prev
= PREV_INSN (insn
);
154 if (GET_CODE (insn
) == NOTE
)
155 switch (NOTE_LINE_NUMBER (insn
))
157 case NOTE_INSN_LOOP_END
:
158 case NOTE_INSN_BLOCK_END
:
159 case NOTE_INSN_DELETED
:
160 case NOTE_INSN_DELETED_LABEL
:
163 reorder_insns (insn
, insn
, last_insn
);
170 /* Locate or create a label for a given basic block. */
173 label_for_bb (basic_block bb
)
175 rtx label
= BB_HEAD (bb
);
177 if (GET_CODE (label
) != CODE_LABEL
)
180 fprintf (rtl_dump_file
, "Emitting label for block %d\n", bb
->index
);
182 label
= block_label (bb
);
188 /* Locate the effective beginning and end of the insn chain for each
189 block, as defined by skip_insns_after_block above. */
192 record_effective_endpoints (void)
198 for (insn
= get_insns ();
200 && GET_CODE (insn
) == NOTE
201 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BASIC_BLOCK
;
202 insn
= NEXT_INSN (insn
))
205 abort (); /* No basic blocks at all? */
206 if (PREV_INSN (insn
))
207 cfg_layout_function_header
=
208 unlink_insn_chain (get_insns (), PREV_INSN (insn
));
210 cfg_layout_function_header
= NULL_RTX
;
212 next_insn
= get_insns ();
217 if (PREV_INSN (BB_HEAD (bb
)) && next_insn
!= BB_HEAD (bb
))
218 bb
->rbi
->header
= unlink_insn_chain (next_insn
,
219 PREV_INSN (BB_HEAD (bb
)));
220 end
= skip_insns_after_block (bb
);
221 if (NEXT_INSN (BB_END (bb
)) && BB_END (bb
) != end
)
222 bb
->rbi
->footer
= unlink_insn_chain (NEXT_INSN (BB_END (bb
)), end
);
223 next_insn
= NEXT_INSN (BB_END (bb
));
226 cfg_layout_function_footer
= next_insn
;
227 if (cfg_layout_function_footer
)
228 cfg_layout_function_footer
= unlink_insn_chain (cfg_layout_function_footer
, get_last_insn ());
231 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
232 numbers and files. In order to be GGC friendly we need to use separate
233 varrays. This also slightly improve the memory locality in binary search.
234 The _locs array contains locators where the given property change. The
235 block_locators_blocks contains the scope block that is used for all insn
236 locator greater than corresponding block_locators_locs value and smaller
237 than the following one. Similarly for the other properties. */
238 static GTY(()) varray_type block_locators_locs
;
239 static GTY(()) varray_type block_locators_blocks
;
240 static GTY(()) varray_type line_locators_locs
;
241 static GTY(()) varray_type line_locators_lines
;
242 static GTY(()) varray_type file_locators_locs
;
243 static GTY(()) varray_type file_locators_files
;
244 int prologue_locator
;
245 int epilogue_locator
;
247 /* During the RTL expansion the lexical blocks and line numbers are
248 represented via INSN_NOTEs. Replace them by representation using
252 insn_locators_initialize (void)
255 tree last_block
= NULL
;
258 int line_number
= 0, last_line_number
= 0;
259 char *file_name
= NULL
, *last_file_name
= NULL
;
261 prologue_locator
= epilogue_locator
= 0;
263 VARRAY_INT_INIT (block_locators_locs
, 32, "block_locators_locs");
264 VARRAY_TREE_INIT (block_locators_blocks
, 32, "block_locators_blocks");
265 VARRAY_INT_INIT (line_locators_locs
, 32, "line_locators_locs");
266 VARRAY_INT_INIT (line_locators_lines
, 32, "line_locators_lines");
267 VARRAY_INT_INIT (file_locators_locs
, 32, "file_locators_locs");
268 VARRAY_CHAR_PTR_INIT (file_locators_files
, 32, "file_locators_files");
270 for (insn
= get_insns (); insn
; insn
= next
)
272 next
= NEXT_INSN (insn
);
274 if ((active_insn_p (insn
)
275 && GET_CODE (PATTERN (insn
)) != ADDR_VEC
276 && GET_CODE (PATTERN (insn
)) != ADDR_DIFF_VEC
)
278 || (!prologue_locator
&& file_name
))
280 if (last_block
!= block
)
283 VARRAY_PUSH_INT (block_locators_locs
, loc
);
284 VARRAY_PUSH_TREE (block_locators_blocks
, block
);
287 if (last_line_number
!= line_number
)
290 VARRAY_PUSH_INT (line_locators_locs
, loc
);
291 VARRAY_PUSH_INT (line_locators_lines
, line_number
);
292 last_line_number
= line_number
;
294 if (last_file_name
!= file_name
)
297 VARRAY_PUSH_INT (file_locators_locs
, loc
);
298 VARRAY_PUSH_CHAR_PTR (file_locators_files
, file_name
);
299 last_file_name
= file_name
;
302 if (!prologue_locator
&& file_name
)
303 prologue_locator
= loc
;
304 if (!NEXT_INSN (insn
))
305 epilogue_locator
= loc
;
306 if (active_insn_p (insn
))
307 INSN_LOCATOR (insn
) = loc
;
308 else if (GET_CODE (insn
) == NOTE
)
310 switch (NOTE_LINE_NUMBER (insn
))
312 case NOTE_INSN_BLOCK_BEG
:
313 block
= NOTE_BLOCK (insn
);
316 case NOTE_INSN_BLOCK_END
:
317 block
= BLOCK_SUPERCONTEXT (block
);
318 if (block
&& TREE_CODE (block
) == FUNCTION_DECL
)
323 if (NOTE_LINE_NUMBER (insn
) > 0)
325 line_number
= NOTE_LINE_NUMBER (insn
);
326 file_name
= (char *)NOTE_SOURCE_FILE (insn
);
333 /* Tag the blocks with a depth number so that change_scope can find
334 the common parent easily. */
335 set_block_levels (DECL_INITIAL (cfun
->decl
), 0);
338 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
339 found in the block tree. */
342 set_block_levels (tree block
, int level
)
346 BLOCK_NUMBER (block
) = level
;
347 set_block_levels (BLOCK_SUBBLOCKS (block
), level
+ 1);
348 block
= BLOCK_CHAIN (block
);
352 /* Return sope resulting from combination of S1 and S2. */
354 choose_inner_scope (tree s1
, tree s2
)
360 if (BLOCK_NUMBER (s1
) > BLOCK_NUMBER (s2
))
365 /* Emit lexical block notes needed to change scope from S1 to S2. */
368 change_scope (rtx orig_insn
, tree s1
, tree s2
)
370 rtx insn
= orig_insn
;
371 tree com
= NULL_TREE
;
372 tree ts1
= s1
, ts2
= s2
;
377 if (ts1
== NULL
|| ts2
== NULL
)
379 if (BLOCK_NUMBER (ts1
) > BLOCK_NUMBER (ts2
))
380 ts1
= BLOCK_SUPERCONTEXT (ts1
);
381 else if (BLOCK_NUMBER (ts1
) < BLOCK_NUMBER (ts2
))
382 ts2
= BLOCK_SUPERCONTEXT (ts2
);
385 ts1
= BLOCK_SUPERCONTEXT (ts1
);
386 ts2
= BLOCK_SUPERCONTEXT (ts2
);
395 rtx note
= emit_note_before (NOTE_INSN_BLOCK_END
, insn
);
396 NOTE_BLOCK (note
) = s
;
397 s
= BLOCK_SUPERCONTEXT (s
);
404 insn
= emit_note_before (NOTE_INSN_BLOCK_BEG
, insn
);
405 NOTE_BLOCK (insn
) = s
;
406 s
= BLOCK_SUPERCONTEXT (s
);
410 /* Return lexical scope block insn belong to. */
412 insn_scope (rtx insn
)
414 int max
= VARRAY_ACTIVE_SIZE (block_locators_locs
);
416 int loc
= INSN_LOCATOR (insn
);
418 /* When block_locators_locs was initialized, the pro- and epilogue
419 insns didn't exist yet and can therefore not be found this way.
420 But we know that they belong to the outer most block of the
422 Without this test, the prologue would be put inside the block of
423 the first valid instruction in the function and when that first
424 insn is part of an inlined function then the low_pc of that
425 inlined function is messed up. Likewise for the epilogue and
426 the last valid instruction. */
427 if (loc
== prologue_locator
|| loc
== epilogue_locator
)
428 return DECL_INITIAL (cfun
->decl
);
434 int pos
= (min
+ max
) / 2;
435 int tmp
= VARRAY_INT (block_locators_locs
, pos
);
437 if (tmp
<= loc
&& min
!= pos
)
439 else if (tmp
> loc
&& max
!= pos
)
447 return VARRAY_TREE (block_locators_blocks
, min
);
450 /* Return line number of the statement specified by the locator. */
452 locator_line (int loc
)
454 int max
= VARRAY_ACTIVE_SIZE (line_locators_locs
);
461 int pos
= (min
+ max
) / 2;
462 int tmp
= VARRAY_INT (line_locators_locs
, pos
);
464 if (tmp
<= loc
&& min
!= pos
)
466 else if (tmp
> loc
&& max
!= pos
)
474 return VARRAY_INT (line_locators_lines
, min
);
477 /* Return line number of the statement that produced this insn. */
481 return locator_line (INSN_LOCATOR (insn
));
484 /* Return source file of the statement specified by LOC. */
486 locator_file (int loc
)
488 int max
= VARRAY_ACTIVE_SIZE (file_locators_locs
);
495 int pos
= (min
+ max
) / 2;
496 int tmp
= VARRAY_INT (file_locators_locs
, pos
);
498 if (tmp
<= loc
&& min
!= pos
)
500 else if (tmp
> loc
&& max
!= pos
)
508 return VARRAY_CHAR_PTR (file_locators_files
, min
);
511 /* Return source file of the statement that produced this insn. */
515 return locator_file (INSN_LOCATOR (insn
));
518 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
519 on the scope tree and the newly reordered instructions. */
522 reemit_insn_block_notes (void)
524 tree cur_block
= DECL_INITIAL (cfun
->decl
);
528 if (!active_insn_p (insn
))
529 insn
= next_active_insn (insn
);
530 for (; insn
; insn
= next_active_insn (insn
))
534 this_block
= insn_scope (insn
);
535 /* For sequences compute scope resulting from merging all scopes
536 of instructions nested inside. */
537 if (GET_CODE (PATTERN (insn
)) == SEQUENCE
)
540 rtx body
= PATTERN (insn
);
543 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
544 this_block
= choose_inner_scope (this_block
,
545 insn_scope (XVECEXP (body
, 0, i
)));
550 if (this_block
!= cur_block
)
552 change_scope (insn
, cur_block
, this_block
);
553 cur_block
= this_block
;
557 /* change_scope emits before the insn, not after. */
558 note
= emit_note (NOTE_INSN_DELETED
);
559 change_scope (note
, cur_block
, DECL_INITIAL (cfun
->decl
));
565 /* Given a reorder chain, rearrange the code to match. */
568 fixup_reorder_chain (void)
570 basic_block bb
, prev_bb
;
574 if (cfg_layout_function_header
)
576 set_first_insn (cfg_layout_function_header
);
577 insn
= cfg_layout_function_header
;
578 while (NEXT_INSN (insn
))
579 insn
= NEXT_INSN (insn
);
582 /* First do the bulk reordering -- rechain the blocks without regard to
583 the needed changes to jumps and labels. */
585 for (bb
= ENTRY_BLOCK_PTR
->next_bb
, index
= 0;
587 bb
= bb
->rbi
->next
, index
++)
592 NEXT_INSN (insn
) = bb
->rbi
->header
;
594 set_first_insn (bb
->rbi
->header
);
595 PREV_INSN (bb
->rbi
->header
) = insn
;
596 insn
= bb
->rbi
->header
;
597 while (NEXT_INSN (insn
))
598 insn
= NEXT_INSN (insn
);
601 NEXT_INSN (insn
) = BB_HEAD (bb
);
603 set_first_insn (BB_HEAD (bb
));
604 PREV_INSN (BB_HEAD (bb
)) = insn
;
608 NEXT_INSN (insn
) = bb
->rbi
->footer
;
609 PREV_INSN (bb
->rbi
->footer
) = insn
;
610 while (NEXT_INSN (insn
))
611 insn
= NEXT_INSN (insn
);
615 if (index
!= n_basic_blocks
)
618 NEXT_INSN (insn
) = cfg_layout_function_footer
;
619 if (cfg_layout_function_footer
)
620 PREV_INSN (cfg_layout_function_footer
) = insn
;
622 while (NEXT_INSN (insn
))
623 insn
= NEXT_INSN (insn
);
625 set_last_insn (insn
);
626 #ifdef ENABLE_CHECKING
627 verify_insn_chain ();
629 delete_dead_jumptables ();
631 /* Now add jumps and labels as needed to match the blocks new
634 for (bb
= ENTRY_BLOCK_PTR
->next_bb
; bb
; bb
= bb
->rbi
->next
)
636 edge e_fall
, e_taken
, e
;
640 if (bb
->succ
== NULL
)
643 /* Find the old fallthru edge, and another non-EH edge for
645 e_taken
= e_fall
= NULL
;
646 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
647 if (e
->flags
& EDGE_FALLTHRU
)
649 else if (! (e
->flags
& EDGE_EH
))
652 bb_end_insn
= BB_END (bb
);
653 if (GET_CODE (bb_end_insn
) == JUMP_INSN
)
655 if (any_condjump_p (bb_end_insn
))
657 /* If the old fallthru is still next, nothing to do. */
658 if (bb
->rbi
->next
== e_fall
->dest
660 && e_fall
->dest
== EXIT_BLOCK_PTR
))
663 /* The degenerated case of conditional jump jumping to the next
664 instruction can happen on target having jumps with side
667 Create temporarily the duplicated edge representing branch.
668 It will get unidentified by force_nonfallthru_and_redirect
669 that would otherwise get confused by fallthru edge not pointing
670 to the next basic block. */
676 e_fake
= unchecked_make_edge (bb
, e_fall
->dest
, 0);
678 if (!redirect_jump (BB_END (bb
), block_label (bb
), 0))
680 note
= find_reg_note (BB_END (bb
), REG_BR_PROB
, NULL_RTX
);
683 int prob
= INTVAL (XEXP (note
, 0));
685 e_fake
->probability
= prob
;
686 e_fake
->count
= e_fall
->count
* prob
/ REG_BR_PROB_BASE
;
687 e_fall
->probability
-= e_fall
->probability
;
688 e_fall
->count
-= e_fake
->count
;
689 if (e_fall
->probability
< 0)
690 e_fall
->probability
= 0;
691 if (e_fall
->count
< 0)
695 /* There is one special case: if *neither* block is next,
696 such as happens at the very end of a function, then we'll
697 need to add a new unconditional jump. Choose the taken
698 edge based on known or assumed probability. */
699 else if (bb
->rbi
->next
!= e_taken
->dest
)
701 rtx note
= find_reg_note (bb_end_insn
, REG_BR_PROB
, 0);
704 && INTVAL (XEXP (note
, 0)) < REG_BR_PROB_BASE
/ 2
705 && invert_jump (bb_end_insn
,
706 label_for_bb (e_fall
->dest
), 0))
708 e_fall
->flags
&= ~EDGE_FALLTHRU
;
709 e_taken
->flags
|= EDGE_FALLTHRU
;
710 update_br_prob_note (bb
);
711 e
= e_fall
, e_fall
= e_taken
, e_taken
= e
;
715 /* Otherwise we can try to invert the jump. This will
716 basically never fail, however, keep up the pretense. */
717 else if (invert_jump (bb_end_insn
,
718 label_for_bb (e_fall
->dest
), 0))
720 e_fall
->flags
&= ~EDGE_FALLTHRU
;
721 e_taken
->flags
|= EDGE_FALLTHRU
;
722 update_br_prob_note (bb
);
726 else if (returnjump_p (bb_end_insn
))
730 /* Otherwise we have some switch or computed jump. In the
731 99% case, there should not have been a fallthru edge. */
735 #ifdef CASE_DROPS_THROUGH
736 /* Except for VAX. Since we didn't have predication for the
737 tablejump, the fallthru block should not have moved. */
738 if (bb
->rbi
->next
== e_fall
->dest
)
740 bb_end_insn
= skip_insns_after_block (bb
);
748 /* No fallthru implies a noreturn function with EH edges, or
749 something similarly bizarre. In any case, we don't need to
754 /* If the fallthru block is still next, nothing to do. */
755 if (bb
->rbi
->next
== e_fall
->dest
)
758 /* A fallthru to exit block. */
759 if (!bb
->rbi
->next
&& e_fall
->dest
== EXIT_BLOCK_PTR
)
763 /* We got here if we need to add a new jump insn. */
764 nb
= force_nonfallthru (e_fall
);
767 cfg_layout_initialize_rbi (nb
);
768 nb
->rbi
->visited
= 1;
769 nb
->rbi
->next
= bb
->rbi
->next
;
771 /* Don't process this new block. */
776 /* Put basic_block_info in the new order. */
780 fprintf (rtl_dump_file
, "Reordered sequence:\n");
781 for (bb
= ENTRY_BLOCK_PTR
->next_bb
, index
= 0; bb
; bb
= bb
->rbi
->next
, index
++)
783 fprintf (rtl_dump_file
, " %i ", index
);
784 if (bb
->rbi
->original
)
785 fprintf (rtl_dump_file
, "duplicate of %i ",
786 bb
->rbi
->original
->index
);
787 else if (forwarder_block_p (bb
) && GET_CODE (BB_HEAD (bb
)) != CODE_LABEL
)
788 fprintf (rtl_dump_file
, "compensation ");
790 fprintf (rtl_dump_file
, "bb %i ", bb
->index
);
791 fprintf (rtl_dump_file
, " [%i]\n", bb
->frequency
);
795 prev_bb
= ENTRY_BLOCK_PTR
;
796 bb
= ENTRY_BLOCK_PTR
->next_bb
;
799 for (; bb
; prev_bb
= bb
, bb
= bb
->rbi
->next
, index
++)
802 BASIC_BLOCK (index
) = bb
;
804 bb
->prev_bb
= prev_bb
;
805 prev_bb
->next_bb
= bb
;
807 prev_bb
->next_bb
= EXIT_BLOCK_PTR
;
808 EXIT_BLOCK_PTR
->prev_bb
= prev_bb
;
810 /* Annoying special case - jump around dead jumptables left in the code. */
814 for (e
= bb
->succ
; e
&& !(e
->flags
& EDGE_FALLTHRU
); e
= e
->succ_next
)
816 if (e
&& !can_fallthru (e
->src
, e
->dest
))
817 force_nonfallthru (e
);
821 /* Perform sanity checks on the insn chain.
822 1. Check that next/prev pointers are consistent in both the forward and
824 2. Count insns in chain, going both directions, and check if equal.
825 3. Check that get_last_insn () returns the actual end of chain. */
828 verify_insn_chain (void)
831 int insn_cnt1
, insn_cnt2
;
833 for (prevx
= NULL
, insn_cnt1
= 1, x
= get_insns ();
835 prevx
= x
, insn_cnt1
++, x
= NEXT_INSN (x
))
836 if (PREV_INSN (x
) != prevx
)
839 if (prevx
!= get_last_insn ())
842 for (nextx
= NULL
, insn_cnt2
= 1, x
= get_last_insn ();
844 nextx
= x
, insn_cnt2
++, x
= PREV_INSN (x
))
845 if (NEXT_INSN (x
) != nextx
)
848 if (insn_cnt1
!= insn_cnt2
)
852 /* The block falling through to exit must be the last one in the
853 reordered chain. Ensure that this condition is met. */
855 fixup_fallthru_exit_predecessor (void)
858 basic_block bb
= NULL
;
860 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
861 if (e
->flags
& EDGE_FALLTHRU
)
864 if (bb
&& bb
->rbi
->next
)
866 basic_block c
= ENTRY_BLOCK_PTR
->next_bb
;
868 while (c
->rbi
->next
!= bb
)
871 c
->rbi
->next
= bb
->rbi
->next
;
876 bb
->rbi
->next
= NULL
;
880 /* Return true in case it is possible to duplicate the basic block BB. */
883 cfg_layout_can_duplicate_bb_p (basic_block bb
)
887 if (bb
== EXIT_BLOCK_PTR
|| bb
== ENTRY_BLOCK_PTR
)
890 /* Duplicating fallthru block to exit would require adding a jump
891 and splitting the real last BB. */
892 for (s
= bb
->succ
; s
; s
= s
->succ_next
)
893 if (s
->dest
== EXIT_BLOCK_PTR
&& s
->flags
& EDGE_FALLTHRU
)
896 /* Do not attempt to duplicate tablejumps, as we need to unshare
897 the dispatch table. This is difficult to do, as the instructions
898 computing jump destination may be hoisted outside the basic block. */
899 if (tablejump_p (BB_END (bb
), NULL
, NULL
))
902 /* Do not duplicate blocks containing insns that can't be copied. */
903 if (targetm
.cannot_copy_insn_p
)
905 rtx insn
= BB_HEAD (bb
);
908 if (INSN_P (insn
) && (*targetm
.cannot_copy_insn_p
) (insn
))
910 if (insn
== BB_END (bb
))
912 insn
= NEXT_INSN (insn
);
920 duplicate_insn_chain (rtx from
, rtx to
)
924 /* Avoid updating of boundaries of previous basic block. The
925 note will get removed from insn stream in fixup. */
926 last
= emit_note (NOTE_INSN_DELETED
);
928 /* Create copy at the end of INSN chain. The chain will
929 be reordered later. */
930 for (insn
= from
; insn
!= NEXT_INSN (to
); insn
= NEXT_INSN (insn
))
932 switch (GET_CODE (insn
))
937 /* Avoid copying of dispatch tables. We never duplicate
938 tablejumps, so this can hit only in case the table got
939 moved far from original jump. */
940 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
941 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
943 emit_copy_of_insn_after (insn
, get_last_insn ());
954 switch (NOTE_LINE_NUMBER (insn
))
956 /* In case prologue is empty and function contain label
957 in first BB, we may want to copy the block. */
958 case NOTE_INSN_PROLOGUE_END
:
960 case NOTE_INSN_LOOP_VTOP
:
961 case NOTE_INSN_LOOP_CONT
:
962 case NOTE_INSN_LOOP_BEG
:
963 case NOTE_INSN_LOOP_END
:
964 /* Strip down the loop notes - we don't really want to keep
965 them consistent in loop copies. */
966 case NOTE_INSN_DELETED
:
967 case NOTE_INSN_DELETED_LABEL
:
968 /* No problem to strip these. */
969 case NOTE_INSN_EPILOGUE_BEG
:
970 case NOTE_INSN_FUNCTION_END
:
971 /* Debug code expect these notes to exist just once.
972 Keep them in the master copy.
973 ??? It probably makes more sense to duplicate them for each
975 case NOTE_INSN_FUNCTION_BEG
:
976 /* There is always just single entry to function. */
977 case NOTE_INSN_BASIC_BLOCK
:
980 /* There is no purpose to duplicate prologue. */
981 case NOTE_INSN_BLOCK_BEG
:
982 case NOTE_INSN_BLOCK_END
:
983 /* The BLOCK_BEG/BLOCK_END notes should be eliminated when BB
984 reordering is in the progress. */
985 case NOTE_INSN_EH_REGION_BEG
:
986 case NOTE_INSN_EH_REGION_END
:
987 /* Should never exist at BB duplication time. */
990 case NOTE_INSN_REPEATED_LINE_NUMBER
:
991 emit_note_copy (insn
);
995 if (NOTE_LINE_NUMBER (insn
) < 0)
997 /* It is possible that no_line_number is set and the note
999 emit_note_copy (insn
);
1006 insn
= NEXT_INSN (last
);
1010 /* Create a duplicate of the basic block BB and redirect edge E into it.
1011 If E is not specified, BB is just copied, but updating the frequencies
1012 etc. is left to the caller. */
1015 cfg_layout_duplicate_bb (basic_block bb
, edge e
)
1020 gcov_type new_count
= e
? e
->count
: 0;
1022 if (bb
->count
< new_count
)
1023 new_count
= bb
->count
;
1026 #ifdef ENABLE_CHECKING
1027 if (!cfg_layout_can_duplicate_bb_p (bb
))
1031 insn
= duplicate_insn_chain (BB_HEAD (bb
), BB_END (bb
));
1032 new_bb
= create_basic_block (insn
,
1033 insn
? get_last_insn () : NULL
,
1034 EXIT_BLOCK_PTR
->prev_bb
);
1036 if (bb
->rbi
->header
)
1038 insn
= bb
->rbi
->header
;
1039 while (NEXT_INSN (insn
))
1040 insn
= NEXT_INSN (insn
);
1041 insn
= duplicate_insn_chain (bb
->rbi
->header
, insn
);
1043 new_bb
->rbi
->header
= unlink_insn_chain (insn
, get_last_insn ());
1046 if (bb
->rbi
->footer
)
1048 insn
= bb
->rbi
->footer
;
1049 while (NEXT_INSN (insn
))
1050 insn
= NEXT_INSN (insn
);
1051 insn
= duplicate_insn_chain (bb
->rbi
->footer
, insn
);
1053 new_bb
->rbi
->footer
= unlink_insn_chain (insn
, get_last_insn ());
1056 if (bb
->global_live_at_start
)
1058 new_bb
->global_live_at_start
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
1059 new_bb
->global_live_at_end
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
1060 COPY_REG_SET (new_bb
->global_live_at_start
, bb
->global_live_at_start
);
1061 COPY_REG_SET (new_bb
->global_live_at_end
, bb
->global_live_at_end
);
1064 new_bb
->loop_depth
= bb
->loop_depth
;
1065 new_bb
->flags
= bb
->flags
;
1066 for (s
= bb
->succ
; s
; s
= s
->succ_next
)
1068 /* Since we are creating edges from a new block to successors
1069 of another block (which therefore are known to be disjoint), there
1070 is no need to actually check for duplicated edges. */
1071 n
= unchecked_make_edge (new_bb
, s
->dest
, s
->flags
);
1072 n
->probability
= s
->probability
;
1075 /* Take care for overflows! */
1076 n
->count
= s
->count
* (new_count
* 10000 / bb
->count
) / 10000;
1077 s
->count
-= n
->count
;
1080 n
->count
= s
->count
;
1086 new_bb
->count
= new_count
;
1087 bb
->count
-= new_count
;
1089 new_bb
->frequency
= EDGE_FREQUENCY (e
);
1090 bb
->frequency
-= EDGE_FREQUENCY (e
);
1092 redirect_edge_and_branch_force (e
, new_bb
);
1096 if (bb
->frequency
< 0)
1101 new_bb
->count
= bb
->count
;
1102 new_bb
->frequency
= bb
->frequency
;
1105 new_bb
->rbi
->original
= bb
;
1106 bb
->rbi
->copy
= new_bb
;
1112 cfg_layout_initialize_rbi (basic_block bb
)
1116 bb
->rbi
= pool_alloc (cfg_layout_pool
);
1117 memset (bb
->rbi
, 0, sizeof (struct reorder_block_def
));
1120 /* Main entry point to this module - initialize the datastructures for
1121 CFG layout changes. It keeps LOOPS up-to-date if not null. */
1124 cfg_layout_initialize (void)
1128 /* Our algorithm depends on fact that there are now dead jumptables
1131 create_alloc_pool ("cfg layout pool", sizeof (struct reorder_block_def
),
1132 n_basic_blocks
+ 2);
1133 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1134 cfg_layout_initialize_rbi (bb
);
1136 cfg_layout_rtl_register_cfg_hooks ();
1138 record_effective_endpoints ();
1140 cleanup_cfg (CLEANUP_CFGLAYOUT
);
1143 /* Splits superblocks. */
1145 break_superblocks (void)
1147 sbitmap superblocks
;
1150 superblocks
= sbitmap_alloc (n_basic_blocks
);
1151 sbitmap_zero (superblocks
);
1155 for (i
= 0; i
< n_basic_blocks
; i
++)
1156 if (BASIC_BLOCK(i
)->flags
& BB_SUPERBLOCK
)
1158 BASIC_BLOCK(i
)->flags
&= ~BB_SUPERBLOCK
;
1159 SET_BIT (superblocks
, i
);
1165 rebuild_jump_labels (get_insns ());
1166 find_many_sub_basic_blocks (superblocks
);
1172 /* Finalize the changes: reorder insn list according to the sequence, enter
1173 compensation code, rebuild scope forest. */
1176 cfg_layout_finalize (void)
1180 #ifdef ENABLE_CHECKING
1181 verify_flow_info ();
1183 rtl_register_cfg_hooks ();
1184 fixup_fallthru_exit_predecessor ();
1185 fixup_reorder_chain ();
1187 #ifdef ENABLE_CHECKING
1188 verify_insn_chain ();
1191 free_alloc_pool (cfg_layout_pool
);
1192 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1195 break_superblocks ();
1197 #ifdef ENABLE_CHECKING
1198 verify_flow_info ();
1202 /* Checks whether all N blocks in BBS array can be copied. */
1204 can_copy_bbs_p (basic_block
*bbs
, unsigned n
)
1210 for (i
= 0; i
< n
; i
++)
1211 bbs
[i
]->rbi
->duplicated
= 1;
1213 for (i
= 0; i
< n
; i
++)
1215 /* In case we should redirect abnormal edge during duplication, fail. */
1216 for (e
= bbs
[i
]->succ
; e
; e
= e
->succ_next
)
1217 if ((e
->flags
& EDGE_ABNORMAL
)
1218 && e
->dest
->rbi
->duplicated
)
1224 if (!cfg_layout_can_duplicate_bb_p (bbs
[i
]))
1232 for (i
= 0; i
< n
; i
++)
1233 bbs
[i
]->rbi
->duplicated
= 0;
1238 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1239 are placed into array NEW_BBS in the same order. Edges from basic blocks
1240 in BBS are also duplicated and copies of those of them
1241 that lead into BBS are redirected to appropriate newly created block. The
1242 function assigns bbs into loops (copy of basic block bb is assigned to
1243 bb->loop_father->copy loop, so this must be set up correctly in advance)
1244 and updates dominators locally (LOOPS structure that contains the information
1245 about dominators is passed to enable this).
1247 BASE is the superloop to that basic block belongs; if its header or latch
1248 is copied, we do not set the new blocks as header or latch.
1250 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1251 also in the same order. */
1254 copy_bbs (basic_block
*bbs
, unsigned n
, basic_block
*new_bbs
,
1255 edge
*edges
, unsigned n_edges
, edge
*new_edges
,
1259 basic_block bb
, new_bb
, dom_bb
;
1262 /* Duplicate bbs, update dominators, assign bbs to loops. */
1263 for (i
= 0; i
< n
; i
++)
1267 new_bb
= new_bbs
[i
] = cfg_layout_duplicate_bb (bb
, NULL
);
1268 bb
->rbi
->duplicated
= 1;
1270 add_bb_to_loop (new_bb
, bb
->loop_father
->copy
);
1271 /* Possibly set header. */
1272 if (bb
->loop_father
->header
== bb
&& bb
->loop_father
!= base
)
1273 new_bb
->loop_father
->header
= new_bb
;
1275 if (bb
->loop_father
->latch
== bb
&& bb
->loop_father
!= base
)
1276 new_bb
->loop_father
->latch
= new_bb
;
1279 /* Set dominators. */
1280 for (i
= 0; i
< n
; i
++)
1283 new_bb
= new_bbs
[i
];
1285 dom_bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
1286 if (dom_bb
->rbi
->duplicated
)
1288 dom_bb
= dom_bb
->rbi
->copy
;
1289 set_immediate_dominator (CDI_DOMINATORS
, new_bb
, dom_bb
);
1293 /* Redirect edges. */
1294 for (j
= 0; j
< n_edges
; j
++)
1295 new_edges
[j
] = NULL
;
1296 for (i
= 0; i
< n
; i
++)
1298 new_bb
= new_bbs
[i
];
1301 for (e
= new_bb
->succ
; e
; e
= e
->succ_next
)
1303 for (j
= 0; j
< n_edges
; j
++)
1304 if (edges
[j
] && edges
[j
]->src
== bb
&& edges
[j
]->dest
== e
->dest
)
1307 if (!e
->dest
->rbi
->duplicated
)
1309 redirect_edge_and_branch_force (e
, e
->dest
->rbi
->copy
);
1313 /* Clear information about duplicates. */
1314 for (i
= 0; i
< n
; i
++)
1315 bbs
[i
]->rbi
->duplicated
= 0;
1318 #include "gt-cfglayout.h"