index.html: Correct link to libg++ information.
[official-gcc.git] / gcc / cfglayout.c
blob7ac9cb6a9d8a86757708d1203c2ae55637b2e7f1
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "insn-config.h"
30 #include "output.h"
31 #include "function.h"
32 #include "obstack.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
36 /* The contents of the current function definition are allocated
37 in this obstack, and all are freed at the end of the function. */
38 extern struct obstack flow_obstack;
40 /* Holds the interesting trailing notes for the function. */
41 static rtx function_footer;
43 static rtx skip_insns_after_block PARAMS ((basic_block));
44 static void record_effective_endpoints PARAMS ((void));
45 static rtx label_for_bb PARAMS ((basic_block));
46 static void fixup_reorder_chain PARAMS ((void));
48 static void set_block_levels PARAMS ((tree, int));
49 static void change_scope PARAMS ((rtx, tree, tree));
51 void verify_insn_chain PARAMS ((void));
52 static void cleanup_unconditional_jumps PARAMS ((struct loops *));
53 static void fixup_fallthru_exit_predecessor PARAMS ((void));
54 static rtx unlink_insn_chain PARAMS ((rtx, rtx));
55 static rtx duplicate_insn_chain PARAMS ((rtx, rtx));
56 static void break_superblocks PARAMS ((void));
58 static rtx
59 unlink_insn_chain (first, last)
60 rtx first;
61 rtx last;
63 rtx prevfirst = PREV_INSN (first);
64 rtx nextlast = NEXT_INSN (last);
66 PREV_INSN (first) = NULL;
67 NEXT_INSN (last) = NULL;
68 if (prevfirst)
69 NEXT_INSN (prevfirst) = nextlast;
70 if (nextlast)
71 PREV_INSN (nextlast) = prevfirst;
72 else
73 set_last_insn (prevfirst);
74 if (!prevfirst)
75 set_first_insn (nextlast);
76 return first;
79 /* Skip over inter-block insns occurring after BB which are typically
80 associated with BB (e.g., barriers). If there are any such insns,
81 we return the last one. Otherwise, we return the end of BB. */
83 static rtx
84 skip_insns_after_block (bb)
85 basic_block bb;
87 rtx insn, last_insn, next_head, prev;
89 next_head = NULL_RTX;
90 if (bb->next_bb != EXIT_BLOCK_PTR)
91 next_head = bb->next_bb->head;
93 for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)) != 0; )
95 if (insn == next_head)
96 break;
98 switch (GET_CODE (insn))
100 case BARRIER:
101 last_insn = insn;
102 continue;
104 case NOTE:
105 switch (NOTE_LINE_NUMBER (insn))
107 case NOTE_INSN_LOOP_END:
108 case NOTE_INSN_BLOCK_END:
109 last_insn = insn;
110 continue;
111 case NOTE_INSN_DELETED:
112 case NOTE_INSN_DELETED_LABEL:
113 continue;
115 default:
116 continue;
117 break;
119 break;
121 case CODE_LABEL:
122 if (NEXT_INSN (insn)
123 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
124 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
125 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
127 insn = NEXT_INSN (insn);
128 last_insn = insn;
129 continue;
131 break;
133 default:
134 break;
137 break;
140 /* It is possible to hit contradictory sequence. For instance:
142 jump_insn
143 NOTE_INSN_LOOP_BEG
144 barrier
146 Where barrier belongs to jump_insn, but the note does not. This can be
147 created by removing the basic block originally following
148 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
150 for (insn = last_insn; insn != bb->end; insn = prev)
152 prev = PREV_INSN (insn);
153 if (GET_CODE (insn) == NOTE)
154 switch (NOTE_LINE_NUMBER (insn))
156 case NOTE_INSN_LOOP_END:
157 case NOTE_INSN_BLOCK_END:
158 case NOTE_INSN_DELETED:
159 case NOTE_INSN_DELETED_LABEL:
160 continue;
161 default:
162 reorder_insns (insn, insn, last_insn);
166 return last_insn;
169 /* Locate or create a label for a given basic block. */
171 static rtx
172 label_for_bb (bb)
173 basic_block bb;
175 rtx label = bb->head;
177 if (GET_CODE (label) != CODE_LABEL)
179 if (rtl_dump_file)
180 fprintf (rtl_dump_file, "Emitting label for block %d\n", bb->index);
182 label = block_label (bb);
185 return label;
188 /* Locate the effective beginning and end of the insn chain for each
189 block, as defined by skip_insns_after_block above. */
191 static void
192 record_effective_endpoints ()
194 rtx next_insn = get_insns ();
195 basic_block bb;
197 FOR_EACH_BB (bb)
199 rtx end;
201 if (PREV_INSN (bb->head) && next_insn != bb->head)
202 RBI (bb)->header = unlink_insn_chain (next_insn,
203 PREV_INSN (bb->head));
204 end = skip_insns_after_block (bb);
205 if (NEXT_INSN (bb->end) && bb->end != end)
206 RBI (bb)->footer = unlink_insn_chain (NEXT_INSN (bb->end), end);
207 next_insn = NEXT_INSN (bb->end);
210 function_footer = next_insn;
211 if (function_footer)
212 function_footer = unlink_insn_chain (function_footer, get_last_insn ());
215 /* Build a varray mapping INSN_UID to lexical block. Return it. */
217 void
218 scope_to_insns_initialize ()
220 tree block = NULL;
221 rtx insn, next;
223 for (insn = get_insns (); insn; insn = next)
225 next = NEXT_INSN (insn);
227 if (active_insn_p (insn)
228 && GET_CODE (PATTERN (insn)) != ADDR_VEC
229 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
230 INSN_SCOPE (insn) = block;
231 else if (GET_CODE (insn) == NOTE)
233 switch (NOTE_LINE_NUMBER (insn))
235 case NOTE_INSN_BLOCK_BEG:
236 block = NOTE_BLOCK (insn);
237 delete_insn (insn);
238 break;
239 case NOTE_INSN_BLOCK_END:
240 block = BLOCK_SUPERCONTEXT (block);
241 delete_insn (insn);
242 break;
243 default:
244 break;
249 /* Tag the blocks with a depth number so that change_scope can find
250 the common parent easily. */
251 set_block_levels (DECL_INITIAL (cfun->decl), 0);
254 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
255 found in the block tree. */
257 static void
258 set_block_levels (block, level)
259 tree block;
260 int level;
262 while (block)
264 BLOCK_NUMBER (block) = level;
265 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
266 block = BLOCK_CHAIN (block);
270 /* Return sope resulting from combination of S1 and S2. */
271 tree
272 choose_inner_scope (s1, s2)
273 tree s1, s2;
275 if (!s1)
276 return s2;
277 if (!s2)
278 return s1;
279 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
280 return s1;
281 return s2;
284 /* Emit lexical block notes needed to change scope from S1 to S2. */
286 static void
287 change_scope (orig_insn, s1, s2)
288 rtx orig_insn;
289 tree s1, s2;
291 rtx insn = orig_insn;
292 tree com = NULL_TREE;
293 tree ts1 = s1, ts2 = s2;
294 tree s;
296 while (ts1 != ts2)
298 if (ts1 == NULL || ts2 == NULL)
299 abort ();
300 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
301 ts1 = BLOCK_SUPERCONTEXT (ts1);
302 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
303 ts2 = BLOCK_SUPERCONTEXT (ts2);
304 else
306 ts1 = BLOCK_SUPERCONTEXT (ts1);
307 ts2 = BLOCK_SUPERCONTEXT (ts2);
310 com = ts1;
312 /* Close scopes. */
313 s = s1;
314 while (s != com)
316 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
317 NOTE_BLOCK (note) = s;
318 s = BLOCK_SUPERCONTEXT (s);
321 /* Open scopes. */
322 s = s2;
323 while (s != com)
325 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
326 NOTE_BLOCK (insn) = s;
327 s = BLOCK_SUPERCONTEXT (s);
331 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
332 on the scope tree and the newly reordered instructions. */
334 void
335 scope_to_insns_finalize ()
337 tree cur_block = DECL_INITIAL (cfun->decl);
338 rtx insn, note;
340 insn = get_insns ();
341 if (!active_insn_p (insn))
342 insn = next_active_insn (insn);
343 for (; insn; insn = next_active_insn (insn))
345 tree this_block;
347 this_block = INSN_SCOPE (insn);
348 /* For sequences compute scope resulting from merging all scopes
349 of instructions nested inside. */
350 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
352 int i;
353 rtx body = PATTERN (insn);
355 this_block = NULL;
356 for (i = 0; i < XVECLEN (body, 0); i++)
357 this_block = choose_inner_scope (this_block,
358 INSN_SCOPE (XVECEXP (body, 0, i)));
360 if (! this_block)
361 continue;
363 if (this_block != cur_block)
365 change_scope (insn, cur_block, this_block);
366 cur_block = this_block;
370 /* change_scope emits before the insn, not after. */
371 note = emit_note (NULL, NOTE_INSN_DELETED);
372 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
373 delete_insn (note);
375 reorder_blocks ();
378 /* Given a reorder chain, rearrange the code to match. */
380 static void
381 fixup_reorder_chain ()
383 basic_block bb, prev_bb;
384 int index;
385 rtx insn = NULL;
387 /* First do the bulk reordering -- rechain the blocks without regard to
388 the needed changes to jumps and labels. */
390 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
391 bb != 0;
392 bb = RBI (bb)->next, index++)
394 if (RBI (bb)->header)
396 if (insn)
397 NEXT_INSN (insn) = RBI (bb)->header;
398 else
399 set_first_insn (RBI (bb)->header);
400 PREV_INSN (RBI (bb)->header) = insn;
401 insn = RBI (bb)->header;
402 while (NEXT_INSN (insn))
403 insn = NEXT_INSN (insn);
405 if (insn)
406 NEXT_INSN (insn) = bb->head;
407 else
408 set_first_insn (bb->head);
409 PREV_INSN (bb->head) = insn;
410 insn = bb->end;
411 if (RBI (bb)->footer)
413 NEXT_INSN (insn) = RBI (bb)->footer;
414 PREV_INSN (RBI (bb)->footer) = insn;
415 while (NEXT_INSN (insn))
416 insn = NEXT_INSN (insn);
420 if (index != n_basic_blocks)
421 abort ();
423 NEXT_INSN (insn) = function_footer;
424 if (function_footer)
425 PREV_INSN (function_footer) = insn;
427 while (NEXT_INSN (insn))
428 insn = NEXT_INSN (insn);
430 set_last_insn (insn);
431 #ifdef ENABLE_CHECKING
432 verify_insn_chain ();
433 #endif
435 /* Now add jumps and labels as needed to match the blocks new
436 outgoing edges. */
438 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = RBI (bb)->next)
440 edge e_fall, e_taken, e;
441 rtx bb_end_insn;
442 basic_block nb;
444 if (bb->succ == NULL)
445 continue;
447 /* Find the old fallthru edge, and another non-EH edge for
448 a taken jump. */
449 e_taken = e_fall = NULL;
450 for (e = bb->succ; e ; e = e->succ_next)
451 if (e->flags & EDGE_FALLTHRU)
452 e_fall = e;
453 else if (! (e->flags & EDGE_EH))
454 e_taken = e;
456 bb_end_insn = bb->end;
457 if (GET_CODE (bb_end_insn) == JUMP_INSN)
459 if (any_condjump_p (bb_end_insn))
461 /* If the old fallthru is still next, nothing to do. */
462 if (RBI (bb)->next == e_fall->dest
463 || (!RBI (bb)->next
464 && e_fall->dest == EXIT_BLOCK_PTR))
465 continue;
467 /* There is one special case: if *neither* block is next,
468 such as happens at the very end of a function, then we'll
469 need to add a new unconditional jump. Choose the taken
470 edge based on known or assumed probability. */
471 if (RBI (bb)->next != e_taken->dest)
473 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
475 if (note
476 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
477 && invert_jump (bb_end_insn,
478 label_for_bb (e_fall->dest), 0))
480 e_fall->flags &= ~EDGE_FALLTHRU;
481 e_taken->flags |= EDGE_FALLTHRU;
482 update_br_prob_note (bb);
483 e = e_fall, e_fall = e_taken, e_taken = e;
487 /* Otherwise we can try to invert the jump. This will
488 basically never fail, however, keep up the pretense. */
489 else if (invert_jump (bb_end_insn,
490 label_for_bb (e_fall->dest), 0))
492 e_fall->flags &= ~EDGE_FALLTHRU;
493 e_taken->flags |= EDGE_FALLTHRU;
494 update_br_prob_note (bb);
495 continue;
498 else if (returnjump_p (bb_end_insn))
499 continue;
500 else
502 /* Otherwise we have some switch or computed jump. In the
503 99% case, there should not have been a fallthru edge. */
504 if (! e_fall)
505 continue;
507 #ifdef CASE_DROPS_THROUGH
508 /* Except for VAX. Since we didn't have predication for the
509 tablejump, the fallthru block should not have moved. */
510 if (RBI (bb)->next == e_fall->dest)
511 continue;
512 bb_end_insn = skip_insns_after_block (bb);
513 #else
514 abort ();
515 #endif
518 else
520 /* No fallthru implies a noreturn function with EH edges, or
521 something similarly bizarre. In any case, we don't need to
522 do anything. */
523 if (! e_fall)
524 continue;
526 /* If the fallthru block is still next, nothing to do. */
527 if (RBI (bb)->next == e_fall->dest)
528 continue;
530 /* A fallthru to exit block. */
531 if (!RBI (bb)->next && e_fall->dest == EXIT_BLOCK_PTR)
532 continue;
535 /* We got here if we need to add a new jump insn. */
536 nb = force_nonfallthru (e_fall);
537 if (nb)
539 alloc_aux_for_block (nb, sizeof (struct reorder_block_def));
540 RBI (nb)->visited = 1;
541 RBI (nb)->next = RBI (bb)->next;
542 RBI (bb)->next = nb;
543 /* Don't process this new block. */
544 bb = nb;
548 /* Put basic_block_info in the new order. */
550 if (rtl_dump_file)
552 fprintf (rtl_dump_file, "Reordered sequence:\n");
553 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0; bb; bb = RBI (bb)->next, index ++)
555 fprintf (rtl_dump_file, " %i ", index);
556 if (RBI (bb)->original)
557 fprintf (rtl_dump_file, "duplicate of %i ",
558 RBI (bb)->original->index);
559 else if (forwarder_block_p (bb) && GET_CODE (bb->head) != CODE_LABEL)
560 fprintf (rtl_dump_file, "compensation ");
561 else
562 fprintf (rtl_dump_file, "bb %i ", bb->index);
563 fprintf (rtl_dump_file, " [%i]\n", bb->frequency);
567 prev_bb = ENTRY_BLOCK_PTR;
568 bb = ENTRY_BLOCK_PTR->next_bb;
569 index = 0;
571 for (; bb; prev_bb = bb, bb = RBI (bb)->next, index ++)
573 bb->index = index;
574 BASIC_BLOCK (index) = bb;
576 bb->prev_bb = prev_bb;
577 prev_bb->next_bb = bb;
579 prev_bb->next_bb = EXIT_BLOCK_PTR;
580 EXIT_BLOCK_PTR->prev_bb = prev_bb;
583 /* Perform sanity checks on the insn chain.
584 1. Check that next/prev pointers are consistent in both the forward and
585 reverse direction.
586 2. Count insns in chain, going both directions, and check if equal.
587 3. Check that get_last_insn () returns the actual end of chain. */
589 void
590 verify_insn_chain ()
592 rtx x, prevx, nextx;
593 int insn_cnt1, insn_cnt2;
595 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
596 x != 0;
597 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
598 if (PREV_INSN (x) != prevx)
599 abort ();
601 if (prevx != get_last_insn ())
602 abort ();
604 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
605 x != 0;
606 nextx = x, insn_cnt2++, x = PREV_INSN (x))
607 if (NEXT_INSN (x) != nextx)
608 abort ();
610 if (insn_cnt1 != insn_cnt2)
611 abort ();
614 /* Remove any unconditional jumps and forwarder block creating fallthru
615 edges instead. During BB reordering, fallthru edges are not required
616 to target next basic block in the linear CFG layout, so the unconditional
617 jumps are not needed. If LOOPS is not null, also update loop structure &
618 dominators. */
620 static void
621 cleanup_unconditional_jumps (loops)
622 struct loops *loops;
624 basic_block bb;
626 FOR_EACH_BB (bb)
628 if (!bb->succ)
629 continue;
630 if (bb->succ->flags & EDGE_FALLTHRU)
631 continue;
632 if (!bb->succ->succ_next)
634 rtx insn;
635 if (GET_CODE (bb->head) != CODE_LABEL && forwarder_block_p (bb)
636 && bb->prev_bb != ENTRY_BLOCK_PTR)
638 basic_block prev = bb->prev_bb;
640 if (rtl_dump_file)
641 fprintf (rtl_dump_file, "Removing forwarder BB %i\n",
642 bb->index);
644 if (loops)
646 /* bb cannot be loop header, as it only has one entry
647 edge. It could be a loop latch. */
648 if (bb->loop_father->header == bb)
649 abort ();
651 if (bb->loop_father->latch == bb)
652 bb->loop_father->latch = bb->pred->src;
654 if (get_immediate_dominator
655 (loops->cfg.dom, bb->succ->dest) == bb)
656 set_immediate_dominator
657 (loops->cfg.dom, bb->succ->dest, bb->pred->src);
659 remove_bb_from_loops (bb);
660 delete_from_dominance_info (loops->cfg.dom, bb);
663 redirect_edge_succ_nodup (bb->pred, bb->succ->dest);
664 flow_delete_block (bb);
665 bb = prev;
667 else if (simplejump_p (bb->end))
669 rtx jump = bb->end;
671 if (rtl_dump_file)
672 fprintf (rtl_dump_file, "Removing jump %i in BB %i\n",
673 INSN_UID (jump), bb->index);
674 delete_insn (jump);
675 bb->succ->flags |= EDGE_FALLTHRU;
677 else
678 continue;
680 insn = NEXT_INSN (bb->end);
681 while (insn
682 && (GET_CODE (insn) != NOTE
683 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
685 rtx next = NEXT_INSN (insn);
687 if (GET_CODE (insn) == BARRIER)
688 delete_barrier (insn);
690 insn = next;
696 /* The block falling through to exit must be the last one in the
697 reordered chain. Ensure that this condition is met. */
698 static void
699 fixup_fallthru_exit_predecessor ()
701 edge e;
702 basic_block bb = NULL;
704 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
705 if (e->flags & EDGE_FALLTHRU)
706 bb = e->src;
708 if (bb && RBI (bb)->next)
710 basic_block c = ENTRY_BLOCK_PTR->next_bb;
712 while (RBI (c)->next != bb)
713 c = RBI (c)->next;
715 RBI (c)->next = RBI (bb)->next;
716 while (RBI (c)->next)
717 c = RBI (c)->next;
719 RBI (c)->next = bb;
720 RBI (bb)->next = NULL;
724 /* Return true in case it is possible to duplicate the basic block BB. */
726 bool
727 cfg_layout_can_duplicate_bb_p (bb)
728 basic_block bb;
730 rtx next;
731 edge s;
733 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR)
734 return false;
736 /* Duplicating fallthru block to exit would require adding a jump
737 and splitting the real last BB. */
738 for (s = bb->succ; s; s = s->succ_next)
739 if (s->dest == EXIT_BLOCK_PTR && s->flags & EDGE_FALLTHRU)
740 return false;
742 /* Do not attempt to duplicate tablejumps, as we need to unshare
743 the dispatch table. This is difficult to do, as the instructions
744 computing jump destination may be hoisted outside the basic block. */
745 if (GET_CODE (bb->end) == JUMP_INSN && JUMP_LABEL (bb->end)
746 && (next = next_nonnote_insn (JUMP_LABEL (bb->end)))
747 && GET_CODE (next) == JUMP_INSN
748 && (GET_CODE (PATTERN (next)) == ADDR_VEC
749 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
750 return false;
751 return true;
754 static rtx
755 duplicate_insn_chain (from, to)
756 rtx from, to;
758 rtx insn, last;
760 /* Avoid updating of boundaries of previous basic block. The
761 note will get removed from insn stream in fixup. */
762 last = emit_note (NULL, NOTE_INSN_DELETED);
764 /* Create copy at the end of INSN chain. The chain will
765 be reordered later. */
766 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
768 switch (GET_CODE (insn))
770 case INSN:
771 case CALL_INSN:
772 case JUMP_INSN:
773 /* Avoid copying of dispatch tables. We never duplicate
774 tablejumps, so this can hit only in case the table got
775 moved far from original jump. */
776 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
777 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
778 break;
779 emit_copy_of_insn_after (insn, get_last_insn ());
780 break;
782 case CODE_LABEL:
783 break;
785 case BARRIER:
786 emit_barrier ();
787 break;
789 case NOTE:
790 switch (NOTE_LINE_NUMBER (insn))
792 /* In case prologue is empty and function contain label
793 in first BB, we may want to copy the block. */
794 case NOTE_INSN_PROLOGUE_END:
796 case NOTE_INSN_LOOP_VTOP:
797 case NOTE_INSN_LOOP_CONT:
798 case NOTE_INSN_LOOP_BEG:
799 case NOTE_INSN_LOOP_END:
800 /* Strip down the loop notes - we don't really want to keep
801 them consistent in loop copies. */
802 case NOTE_INSN_DELETED:
803 case NOTE_INSN_DELETED_LABEL:
804 /* No problem to strip these. */
805 case NOTE_INSN_EPILOGUE_BEG:
806 case NOTE_INSN_FUNCTION_END:
807 /* Debug code expect these notes to exist just once.
808 Keep them in the master copy.
809 ??? It probably makes more sense to duplicate them for each
810 epilogue copy. */
811 case NOTE_INSN_FUNCTION_BEG:
812 /* There is always just single entry to function. */
813 case NOTE_INSN_BASIC_BLOCK:
814 break;
816 /* There is no purpose to duplicate prologue. */
817 case NOTE_INSN_BLOCK_BEG:
818 case NOTE_INSN_BLOCK_END:
819 /* The BLOCK_BEG/BLOCK_END notes should be eliminated when BB
820 reordering is in the progress. */
821 case NOTE_INSN_EH_REGION_BEG:
822 case NOTE_INSN_EH_REGION_END:
823 /* Should never exist at BB duplication time. */
824 abort ();
825 break;
826 case NOTE_INSN_REPEATED_LINE_NUMBER:
827 emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
828 break;
830 default:
831 if (NOTE_LINE_NUMBER (insn) < 0)
832 abort ();
833 /* It is possible that no_line_number is set and the note
834 won't be emitted. */
835 emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
837 break;
838 default:
839 abort ();
842 insn = NEXT_INSN (last);
843 delete_insn (last);
844 return insn;
847 /* Redirect Edge to DEST. */
848 bool
849 cfg_layout_redirect_edge (e, dest)
850 edge e;
851 basic_block dest;
853 basic_block src = e->src;
854 basic_block old_next_bb = src->next_bb;
855 bool ret;
857 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
858 in the case the basic block appears to be in sequence. Avoid this
859 transformation. */
861 src->next_bb = NULL;
862 if (e->flags & EDGE_FALLTHRU)
864 /* In case we are redirecting fallthru edge to the branch edge
865 of conditional jump, remove it. */
866 if (src->succ->succ_next
867 && !src->succ->succ_next->succ_next)
869 edge s = e->succ_next ? e->succ_next : src->succ;
870 if (s->dest == dest
871 && any_condjump_p (src->end)
872 && onlyjump_p (src->end))
873 delete_insn (src->end);
875 redirect_edge_succ_nodup (e, dest);
877 ret = true;
879 else
880 ret = redirect_edge_and_branch (e, dest);
882 /* We don't want simplejumps in the insn stream during cfglayout. */
883 if (simplejump_p (src->end))
885 delete_insn (src->end);
886 delete_barrier (NEXT_INSN (src->end));
887 src->succ->flags |= EDGE_FALLTHRU;
889 src->next_bb = old_next_bb;
891 return ret;
894 /* Same as split_block but update cfg_layout structures. */
895 edge
896 cfg_layout_split_block (bb, insn)
897 basic_block bb;
898 rtx insn;
900 edge fallthru = split_block (bb, insn);
902 alloc_aux_for_block (fallthru->dest, sizeof (struct reorder_block_def));
903 RBI (fallthru->dest)->footer = RBI (fallthru->src)->footer;
904 RBI (fallthru->src)->footer = NULL;
905 return fallthru;
908 /* Create a duplicate of the basic block BB and redirect edge E into it. */
910 basic_block
911 cfg_layout_duplicate_bb (bb, e)
912 basic_block bb;
913 edge e;
915 rtx insn;
916 edge s, n;
917 basic_block new_bb;
918 gcov_type new_count = e ? e->count : 0;
920 if (bb->count < new_count)
921 new_count = bb->count;
922 if (!bb->pred)
923 abort ();
924 #ifdef ENABLE_CHECKING
925 if (!cfg_layout_can_duplicate_bb_p (bb))
926 abort ();
927 #endif
929 insn = duplicate_insn_chain (bb->head, bb->end);
930 new_bb = create_basic_block (insn,
931 insn ? get_last_insn () : NULL,
932 EXIT_BLOCK_PTR->prev_bb);
933 alloc_aux_for_block (new_bb, sizeof (struct reorder_block_def));
935 if (RBI (bb)->header)
937 insn = RBI (bb)->header;
938 while (NEXT_INSN (insn))
939 insn = NEXT_INSN (insn);
940 insn = duplicate_insn_chain (RBI (bb)->header, insn);
941 if (insn)
942 RBI (new_bb)->header = unlink_insn_chain (insn, get_last_insn ());
945 if (RBI (bb)->footer)
947 insn = RBI (bb)->footer;
948 while (NEXT_INSN (insn))
949 insn = NEXT_INSN (insn);
950 insn = duplicate_insn_chain (RBI (bb)->footer, insn);
951 if (insn)
952 RBI (new_bb)->footer = unlink_insn_chain (insn, get_last_insn ());
955 if (bb->global_live_at_start)
957 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
958 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
959 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
960 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
963 new_bb->loop_depth = bb->loop_depth;
964 new_bb->flags = bb->flags;
965 for (s = bb->succ; s; s = s->succ_next)
967 n = make_edge (new_bb, s->dest, s->flags);
968 n->probability = s->probability;
969 if (new_count)
970 /* Take care for overflows! */
971 n->count = s->count * (new_count * 10000 / bb->count) / 10000;
972 else
973 n->count = 0;
974 s->count -= n->count;
977 new_bb->count = new_count;
978 bb->count -= new_count;
980 if (e)
982 new_bb->frequency = EDGE_FREQUENCY (e);
983 bb->frequency -= EDGE_FREQUENCY (e);
985 cfg_layout_redirect_edge (e, new_bb);
988 if (bb->count < 0)
989 bb->count = 0;
990 if (bb->frequency < 0)
991 bb->frequency = 0;
993 RBI (new_bb)->original = bb;
994 RBI (bb)->copy = new_bb;
995 return new_bb;
998 /* Main entry point to this module - initialize the datastructures for
999 CFG layout changes. It keeps LOOPS up-to-date if not null. */
1001 void
1002 cfg_layout_initialize (loops)
1003 struct loops *loops;
1005 /* Our algorithm depends on fact that there are now dead jumptables
1006 around the code. */
1007 alloc_aux_for_blocks (sizeof (struct reorder_block_def));
1009 cleanup_unconditional_jumps (loops);
1011 record_effective_endpoints ();
1014 /* Splits superblocks. */
1015 static void
1016 break_superblocks ()
1018 sbitmap superblocks;
1019 int i, need;
1021 superblocks = sbitmap_alloc (n_basic_blocks);
1022 sbitmap_zero (superblocks);
1024 need = 0;
1026 for (i = 0; i < n_basic_blocks; i++)
1027 if (BASIC_BLOCK(i)->flags & BB_SUPERBLOCK)
1029 BASIC_BLOCK(i)->flags &= ~BB_SUPERBLOCK;
1030 SET_BIT (superblocks, i);
1031 need = 1;
1034 if (need)
1036 rebuild_jump_labels (get_insns ());
1037 find_many_sub_basic_blocks (superblocks);
1040 free (superblocks);
1043 /* Finalize the changes: reorder insn list according to the sequence, enter
1044 compensation code, rebuild scope forest. */
1046 void
1047 cfg_layout_finalize ()
1049 fixup_fallthru_exit_predecessor ();
1050 fixup_reorder_chain ();
1052 #ifdef ENABLE_CHECKING
1053 verify_insn_chain ();
1054 #endif
1056 free_aux_for_blocks ();
1058 break_superblocks ();
1060 #ifdef ENABLE_CHECKING
1061 verify_flow_info ();
1062 #endif