oops - minor formatting tidy ups to previous delta
[official-gcc.git] / gcc / cfglayout.c
blob70b7b17a7a1064609f95efc913ea4ce3f593eef6
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "tree.h"
24 #include "rtl.h"
25 #include "hard-reg-set.h"
26 #include "basic-block.h"
27 #include "insn-config.h"
28 #include "output.h"
29 #include "function.h"
30 #include "obstack.h"
31 #include "cfglayout.h"
33 /* The contents of the current function definition are allocated
34 in this obstack, and all are freed at the end of the function. */
35 extern struct obstack flow_obstack;
37 /* Holds the interesting trailing notes for the function. */
38 static rtx function_footer;
40 static rtx skip_insns_after_block PARAMS ((basic_block));
41 static void record_effective_endpoints PARAMS ((void));
42 static rtx label_for_bb PARAMS ((basic_block));
43 static void fixup_reorder_chain PARAMS ((void));
45 static void set_block_levels PARAMS ((tree, int));
46 static void change_scope PARAMS ((rtx, tree, tree));
48 void verify_insn_chain PARAMS ((void));
49 static void cleanup_unconditional_jumps PARAMS ((void));
50 static void fixup_fallthru_exit_predecessor PARAMS ((void));
51 static rtx unlink_insn_chain PARAMS ((rtx, rtx));
52 static rtx duplicate_insn_chain PARAMS ((rtx, rtx));
54 static rtx
55 unlink_insn_chain (first, last)
56 rtx first;
57 rtx last;
59 rtx prevfirst = PREV_INSN (first);
60 rtx nextlast = NEXT_INSN (last);
62 PREV_INSN (first) = NULL;
63 NEXT_INSN (last) = NULL;
64 if (prevfirst)
65 NEXT_INSN (prevfirst) = nextlast;
66 if (nextlast)
67 PREV_INSN (nextlast) = prevfirst;
68 else
69 set_last_insn (prevfirst);
70 if (!prevfirst)
71 set_first_insn (nextlast);
72 return first;
75 /* Skip over inter-block insns occurring after BB which are typically
76 associated with BB (e.g., barriers). If there are any such insns,
77 we return the last one. Otherwise, we return the end of BB. */
79 static rtx
80 skip_insns_after_block (bb)
81 basic_block bb;
83 rtx insn, last_insn, next_head, prev;
85 next_head = NULL_RTX;
86 if (bb->next_bb != EXIT_BLOCK_PTR)
87 next_head = bb->next_bb->head;
89 for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)) != 0; )
91 if (insn == next_head)
92 break;
94 switch (GET_CODE (insn))
96 case BARRIER:
97 last_insn = insn;
98 continue;
100 case NOTE:
101 switch (NOTE_LINE_NUMBER (insn))
103 case NOTE_INSN_LOOP_END:
104 case NOTE_INSN_BLOCK_END:
105 last_insn = insn;
106 continue;
107 case NOTE_INSN_DELETED:
108 case NOTE_INSN_DELETED_LABEL:
109 continue;
111 default:
112 continue;
113 break;
115 break;
117 case CODE_LABEL:
118 if (NEXT_INSN (insn)
119 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
120 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
121 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
123 insn = NEXT_INSN (insn);
124 last_insn = insn;
125 continue;
127 break;
129 default:
130 break;
133 break;
136 /* It is possible to hit contradictory sequence. For instance:
138 jump_insn
139 NOTE_INSN_LOOP_BEG
140 barrier
142 Where barrier belongs to jump_insn, but the note does not. This can be
143 created by removing the basic block originally following
144 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
146 for (insn = last_insn; insn != bb->end; insn = prev)
148 prev = PREV_INSN (insn);
149 if (GET_CODE (insn) == NOTE)
150 switch (NOTE_LINE_NUMBER (insn))
152 case NOTE_INSN_LOOP_END:
153 case NOTE_INSN_BLOCK_END:
154 case NOTE_INSN_DELETED:
155 case NOTE_INSN_DELETED_LABEL:
156 continue;
157 default:
158 reorder_insns (insn, insn, last_insn);
162 return last_insn;
165 /* Locate or create a label for a given basic block. */
167 static rtx
168 label_for_bb (bb)
169 basic_block bb;
171 rtx label = bb->head;
173 if (GET_CODE (label) != CODE_LABEL)
175 if (rtl_dump_file)
176 fprintf (rtl_dump_file, "Emitting label for block %d\n", bb->index);
178 label = block_label (bb);
181 return label;
184 /* Locate the effective beginning and end of the insn chain for each
185 block, as defined by skip_insns_after_block above. */
187 static void
188 record_effective_endpoints ()
190 rtx next_insn = get_insns ();
191 basic_block bb;
193 FOR_EACH_BB (bb)
195 rtx end;
197 if (PREV_INSN (bb->head) && next_insn != bb->head)
198 RBI (bb)->header = unlink_insn_chain (next_insn,
199 PREV_INSN (bb->head));
200 end = skip_insns_after_block (bb);
201 if (NEXT_INSN (bb->end) && bb->end != end)
202 RBI (bb)->footer = unlink_insn_chain (NEXT_INSN (bb->end), end);
203 next_insn = NEXT_INSN (bb->end);
206 function_footer = next_insn;
207 if (function_footer)
208 function_footer = unlink_insn_chain (function_footer, get_last_insn ());
211 /* Build a varray mapping INSN_UID to lexical block. Return it. */
213 void
214 scope_to_insns_initialize ()
216 tree block = NULL;
217 rtx insn, next;
219 for (insn = get_insns (); insn; insn = next)
221 next = NEXT_INSN (insn);
223 if (active_insn_p (insn)
224 && GET_CODE (PATTERN (insn)) != ADDR_VEC
225 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
226 INSN_SCOPE (insn) = block;
227 else if (GET_CODE (insn) == NOTE)
229 switch (NOTE_LINE_NUMBER (insn))
231 case NOTE_INSN_BLOCK_BEG:
232 block = NOTE_BLOCK (insn);
233 delete_insn (insn);
234 break;
235 case NOTE_INSN_BLOCK_END:
236 block = BLOCK_SUPERCONTEXT (block);
237 delete_insn (insn);
238 break;
239 default:
240 break;
245 /* Tag the blocks with a depth number so that change_scope can find
246 the common parent easily. */
247 set_block_levels (DECL_INITIAL (cfun->decl), 0);
250 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
251 found in the block tree. */
253 static void
254 set_block_levels (block, level)
255 tree block;
256 int level;
258 while (block)
260 BLOCK_NUMBER (block) = level;
261 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
262 block = BLOCK_CHAIN (block);
266 /* Return sope resulting from combination of S1 and S2. */
267 tree
268 choose_inner_scope (s1, s2)
269 tree s1, s2;
271 if (!s1)
272 return s2;
273 if (!s2)
274 return s1;
275 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
276 return s1;
277 return s2;
280 /* Emit lexical block notes needed to change scope from S1 to S2. */
282 static void
283 change_scope (orig_insn, s1, s2)
284 rtx orig_insn;
285 tree s1, s2;
287 rtx insn = orig_insn;
288 tree com = NULL_TREE;
289 tree ts1 = s1, ts2 = s2;
290 tree s;
292 while (ts1 != ts2)
294 if (ts1 == NULL || ts2 == NULL)
295 abort ();
296 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
297 ts1 = BLOCK_SUPERCONTEXT (ts1);
298 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
299 ts2 = BLOCK_SUPERCONTEXT (ts2);
300 else
302 ts1 = BLOCK_SUPERCONTEXT (ts1);
303 ts2 = BLOCK_SUPERCONTEXT (ts2);
306 com = ts1;
308 /* Close scopes. */
309 s = s1;
310 while (s != com)
312 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
313 NOTE_BLOCK (note) = s;
314 s = BLOCK_SUPERCONTEXT (s);
317 /* Open scopes. */
318 s = s2;
319 while (s != com)
321 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
322 NOTE_BLOCK (insn) = s;
323 s = BLOCK_SUPERCONTEXT (s);
327 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
328 on the scope tree and the newly reordered instructions. */
330 void
331 scope_to_insns_finalize ()
333 tree cur_block = DECL_INITIAL (cfun->decl);
334 rtx insn, note;
336 insn = get_insns ();
337 if (!active_insn_p (insn))
338 insn = next_active_insn (insn);
339 for (; insn; insn = next_active_insn (insn))
341 tree this_block;
343 this_block = INSN_SCOPE (insn);
344 /* For sequences compute scope resulting from merging all scopes
345 of instructions nested inside. */
346 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
348 int i;
349 rtx body = PATTERN (insn);
351 this_block = NULL;
352 for (i = 0; i < XVECLEN (body, 0); i++)
353 this_block = choose_inner_scope (this_block,
354 INSN_SCOPE (XVECEXP (body, 0, i)));
356 if (! this_block)
357 continue;
359 if (this_block != cur_block)
361 change_scope (insn, cur_block, this_block);
362 cur_block = this_block;
366 /* change_scope emits before the insn, not after. */
367 note = emit_note (NULL, NOTE_INSN_DELETED);
368 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
369 delete_insn (note);
371 reorder_blocks ();
374 /* Given a reorder chain, rearrange the code to match. */
376 static void
377 fixup_reorder_chain ()
379 basic_block bb, prev_bb;
380 int index;
381 rtx insn = NULL;
383 /* First do the bulk reordering -- rechain the blocks without regard to
384 the needed changes to jumps and labels. */
386 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
387 bb != 0;
388 bb = RBI (bb)->next, index++)
390 if (RBI (bb)->header)
392 if (insn)
393 NEXT_INSN (insn) = RBI (bb)->header;
394 else
395 set_first_insn (RBI (bb)->header);
396 PREV_INSN (RBI (bb)->header) = insn;
397 insn = RBI (bb)->header;
398 while (NEXT_INSN (insn))
399 insn = NEXT_INSN (insn);
401 if (insn)
402 NEXT_INSN (insn) = bb->head;
403 else
404 set_first_insn (bb->head);
405 PREV_INSN (bb->head) = insn;
406 insn = bb->end;
407 if (RBI (bb)->footer)
409 NEXT_INSN (insn) = RBI (bb)->footer;
410 PREV_INSN (RBI (bb)->footer) = insn;
411 while (NEXT_INSN (insn))
412 insn = NEXT_INSN (insn);
416 if (index != n_basic_blocks)
417 abort ();
419 NEXT_INSN (insn) = function_footer;
420 if (function_footer)
421 PREV_INSN (function_footer) = insn;
423 while (NEXT_INSN (insn))
424 insn = NEXT_INSN (insn);
426 set_last_insn (insn);
427 #ifdef ENABLE_CHECKING
428 verify_insn_chain ();
429 #endif
431 /* Now add jumps and labels as needed to match the blocks new
432 outgoing edges. */
434 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = RBI (bb)->next)
436 edge e_fall, e_taken, e;
437 rtx bb_end_insn;
438 basic_block nb;
440 if (bb->succ == NULL)
441 continue;
443 /* Find the old fallthru edge, and another non-EH edge for
444 a taken jump. */
445 e_taken = e_fall = NULL;
446 for (e = bb->succ; e ; e = e->succ_next)
447 if (e->flags & EDGE_FALLTHRU)
448 e_fall = e;
449 else if (! (e->flags & EDGE_EH))
450 e_taken = e;
452 bb_end_insn = bb->end;
453 if (GET_CODE (bb_end_insn) == JUMP_INSN)
455 if (any_condjump_p (bb_end_insn))
457 /* If the old fallthru is still next, nothing to do. */
458 if (RBI (bb)->next == e_fall->dest
459 || (!RBI (bb)->next
460 && e_fall->dest == EXIT_BLOCK_PTR))
461 continue;
463 /* There is one special case: if *neither* block is next,
464 such as happens at the very end of a function, then we'll
465 need to add a new unconditional jump. Choose the taken
466 edge based on known or assumed probability. */
467 if (RBI (bb)->next != e_taken->dest)
469 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
471 if (note
472 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
473 && invert_jump (bb_end_insn,
474 label_for_bb (e_fall->dest), 0))
476 e_fall->flags &= ~EDGE_FALLTHRU;
477 e_taken->flags |= EDGE_FALLTHRU;
478 update_br_prob_note (bb);
479 e = e_fall, e_fall = e_taken, e_taken = e;
483 /* Otherwise we can try to invert the jump. This will
484 basically never fail, however, keep up the pretense. */
485 else if (invert_jump (bb_end_insn,
486 label_for_bb (e_fall->dest), 0))
488 e_fall->flags &= ~EDGE_FALLTHRU;
489 e_taken->flags |= EDGE_FALLTHRU;
490 update_br_prob_note (bb);
491 continue;
494 else if (returnjump_p (bb_end_insn))
495 continue;
496 else
498 /* Otherwise we have some switch or computed jump. In the
499 99% case, there should not have been a fallthru edge. */
500 if (! e_fall)
501 continue;
503 #ifdef CASE_DROPS_THROUGH
504 /* Except for VAX. Since we didn't have predication for the
505 tablejump, the fallthru block should not have moved. */
506 if (RBI (bb)->next == e_fall->dest)
507 continue;
508 bb_end_insn = skip_insns_after_block (bb);
509 #else
510 abort ();
511 #endif
514 else
516 /* No fallthru implies a noreturn function with EH edges, or
517 something similarly bizarre. In any case, we don't need to
518 do anything. */
519 if (! e_fall)
520 continue;
522 /* If the fallthru block is still next, nothing to do. */
523 if (RBI (bb)->next == e_fall->dest)
524 continue;
526 /* A fallthru to exit block. */
527 if (!RBI (bb)->next && e_fall->dest == EXIT_BLOCK_PTR)
528 continue;
531 /* We got here if we need to add a new jump insn. */
532 nb = force_nonfallthru (e_fall);
533 if (nb)
535 alloc_aux_for_block (nb, sizeof (struct reorder_block_def));
536 RBI (nb)->visited = 1;
537 RBI (nb)->next = RBI (bb)->next;
538 RBI (bb)->next = nb;
539 /* Don't process this new block. */
540 bb = nb;
544 /* Put basic_block_info in the new order. */
546 if (rtl_dump_file)
548 fprintf (rtl_dump_file, "Reordered sequence:\n");
549 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0; bb; bb = RBI (bb)->next, index ++)
551 fprintf (rtl_dump_file, " %i ", index);
552 if (RBI (bb)->original)
553 fprintf (rtl_dump_file, "duplicate of %i ",
554 RBI (bb)->original->index);
555 else if (forwarder_block_p (bb) && GET_CODE (bb->head) != CODE_LABEL)
556 fprintf (rtl_dump_file, "compensation ");
557 else
558 fprintf (rtl_dump_file, "bb %i ", bb->index);
559 fprintf (rtl_dump_file, " [%i]\n", bb->frequency);
563 prev_bb = ENTRY_BLOCK_PTR;
564 bb = ENTRY_BLOCK_PTR->next_bb;
565 index = 0;
567 for (; bb; prev_bb = bb, bb = RBI (bb)->next, index ++)
569 bb->index = index;
570 BASIC_BLOCK (index) = bb;
572 bb->prev_bb = prev_bb;
573 prev_bb->next_bb = bb;
575 prev_bb->next_bb = EXIT_BLOCK_PTR;
576 EXIT_BLOCK_PTR->prev_bb = prev_bb;
579 /* Perform sanity checks on the insn chain.
580 1. Check that next/prev pointers are consistent in both the forward and
581 reverse direction.
582 2. Count insns in chain, going both directions, and check if equal.
583 3. Check that get_last_insn () returns the actual end of chain. */
585 void
586 verify_insn_chain ()
588 rtx x, prevx, nextx;
589 int insn_cnt1, insn_cnt2;
591 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
592 x != 0;
593 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
594 if (PREV_INSN (x) != prevx)
595 abort ();
597 if (prevx != get_last_insn ())
598 abort ();
600 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
601 x != 0;
602 nextx = x, insn_cnt2++, x = PREV_INSN (x))
603 if (NEXT_INSN (x) != nextx)
604 abort ();
606 if (insn_cnt1 != insn_cnt2)
607 abort ();
610 /* Remove any unconditional jumps and forwarder block creating fallthru
611 edges instead. During BB reordering, fallthru edges are not required
612 to target next basic block in the linear CFG layout, so the unconditional
613 jumps are not needed. */
615 static void
616 cleanup_unconditional_jumps ()
618 basic_block bb;
620 FOR_EACH_BB (bb)
622 if (!bb->succ)
623 continue;
624 if (bb->succ->flags & EDGE_FALLTHRU)
625 continue;
626 if (!bb->succ->succ_next)
628 rtx insn;
629 if (GET_CODE (bb->head) != CODE_LABEL && forwarder_block_p (bb)
630 && bb->prev_bb != ENTRY_BLOCK_PTR)
632 basic_block prev = bb->prev_bb;
634 if (rtl_dump_file)
635 fprintf (rtl_dump_file, "Removing forwarder BB %i\n",
636 bb->index);
638 redirect_edge_succ_nodup (bb->pred, bb->succ->dest);
639 flow_delete_block (bb);
640 bb = prev;
642 else if (simplejump_p (bb->end))
644 rtx jump = bb->end;
646 if (rtl_dump_file)
647 fprintf (rtl_dump_file, "Removing jump %i in BB %i\n",
648 INSN_UID (jump), bb->index);
649 delete_insn (jump);
650 bb->succ->flags |= EDGE_FALLTHRU;
652 else
653 continue;
655 insn = NEXT_INSN (bb->end);
656 while (insn
657 && (GET_CODE (insn) != NOTE
658 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
660 rtx next = NEXT_INSN (insn);
662 if (GET_CODE (insn) == BARRIER)
663 delete_barrier (insn);
665 insn = next;
671 /* The block falling through to exit must be the last one in the
672 reordered chain. Ensure that this condition is met. */
673 static void
674 fixup_fallthru_exit_predecessor ()
676 edge e;
677 basic_block bb = NULL;
679 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
680 if (e->flags & EDGE_FALLTHRU)
681 bb = e->src;
683 if (bb && RBI (bb)->next)
685 basic_block c = ENTRY_BLOCK_PTR->next_bb;
687 while (RBI (c)->next != bb)
688 c = RBI (c)->next;
690 RBI (c)->next = RBI (bb)->next;
691 while (RBI (c)->next)
692 c = RBI (c)->next;
694 RBI (c)->next = bb;
695 RBI (bb)->next = NULL;
699 /* Return true in case it is possible to duplicate the basic block BB. */
701 bool
702 cfg_layout_can_duplicate_bb_p (bb)
703 basic_block bb;
705 rtx next;
706 edge s;
708 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR)
709 return false;
711 /* Duplicating fallthru block to exit would require adding an jump
712 and splitting the real last BB. */
713 for (s = bb->succ; s; s = s->succ_next)
714 if (s->dest == EXIT_BLOCK_PTR && s->flags & EDGE_FALLTHRU)
715 return false;
717 /* Do not attempt to duplicate tablejumps, as we need to unshare
718 the dispatch table. This is dificult to do, as the instructions
719 computing jump destination may be hoisted outside the basic block. */
720 if (GET_CODE (bb->end) == JUMP_INSN && JUMP_LABEL (bb->end)
721 && (next = next_nonnote_insn (JUMP_LABEL (bb->end)))
722 && GET_CODE (next) == JUMP_INSN
723 && (GET_CODE (PATTERN (next)) == ADDR_VEC
724 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
725 return false;
726 return true;
729 static rtx
730 duplicate_insn_chain (from, to)
731 rtx from, to;
733 rtx insn, last;
735 /* Avoid updating of boundaries of previous basic block. The
736 note will get removed from insn stream in fixup. */
737 last = emit_note (NULL, NOTE_INSN_DELETED);
739 /* Create copy at the end of INSN chain. The chain will
740 be reordered later. */
741 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
743 rtx new;
744 switch (GET_CODE (insn))
746 case INSN:
747 case CALL_INSN:
748 case JUMP_INSN:
749 /* Avoid copying of dispatch tables. We never duplicate
750 tablejumps, so this can hit only in case the table got
751 moved far from original jump. */
752 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
753 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
754 break;
755 new = emit_copy_of_insn_after (insn, get_last_insn ());
756 break;
758 case CODE_LABEL:
759 break;
761 case BARRIER:
762 emit_barrier ();
763 break;
765 case NOTE:
766 switch (NOTE_LINE_NUMBER (insn))
768 /* In case prologue is empty and function contain label
769 in first BB, we may want to copy the block. */
770 case NOTE_INSN_PROLOGUE_END:
772 case NOTE_INSN_LOOP_VTOP:
773 case NOTE_INSN_LOOP_CONT:
774 case NOTE_INSN_LOOP_BEG:
775 case NOTE_INSN_LOOP_END:
776 /* Strip down the loop notes - we don't really want to keep
777 them consistent in loop copies. */
778 case NOTE_INSN_DELETED:
779 case NOTE_INSN_DELETED_LABEL:
780 /* No problem to strip these. */
781 case NOTE_INSN_EPILOGUE_BEG:
782 case NOTE_INSN_FUNCTION_END:
783 /* Debug code expect these notes to exist just once.
784 Keep them in the master copy.
785 ??? It probably makes more sense to duplicate them for each
786 epilogue copy. */
787 case NOTE_INSN_FUNCTION_BEG:
788 /* There is always just single entry to function. */
789 case NOTE_INSN_BASIC_BLOCK:
790 break;
792 /* There is no purpose to duplicate prologue. */
793 case NOTE_INSN_BLOCK_BEG:
794 case NOTE_INSN_BLOCK_END:
795 /* The BLOCK_BEG/BLOCK_END notes should be eliminated when BB
796 reordering is in the progress. */
797 case NOTE_INSN_EH_REGION_BEG:
798 case NOTE_INSN_EH_REGION_END:
799 /* Should never exist at BB duplication time. */
800 abort ();
801 break;
802 case NOTE_INSN_REPEATED_LINE_NUMBER:
803 emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
804 break;
806 default:
807 if (NOTE_LINE_NUMBER (insn) < 0)
808 abort ();
809 /* It is possible that no_line_number is set and the note
810 won't be emitted. */
811 emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
813 break;
814 default:
815 abort ();
818 insn = NEXT_INSN (last);
819 delete_insn (last);
820 return insn;
823 /* Redirect Edge to DEST. */
824 void
825 cfg_layout_redirect_edge (e, dest)
826 edge e;
827 basic_block dest;
829 basic_block src = e->src;
830 basic_block old_next_bb = src->next_bb;
832 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
833 in the case the basic block appears to be in sequence. Avoid this
834 transformation. */
836 src->next_bb = NULL;
837 if (e->flags & EDGE_FALLTHRU)
839 /* In case we are redirecting fallthru edge to the branch edge
840 of conditional jump, remove it. */
841 if (src->succ->succ_next
842 && !src->succ->succ_next->succ_next)
844 edge s = e->succ_next ? e->succ_next : src->succ;
845 if (s->dest == dest
846 && any_condjump_p (src->end)
847 && onlyjump_p (src->end))
848 delete_insn (src->end);
850 redirect_edge_succ_nodup (e, dest);
852 else
853 redirect_edge_and_branch (e, dest);
855 /* We don't want simplejumps in the insn stream during cfglayout. */
856 if (simplejump_p (src->end))
858 delete_insn (src->end);
859 delete_barrier (NEXT_INSN (src->end));
860 src->succ->flags |= EDGE_FALLTHRU;
862 src->next_bb = old_next_bb;
865 /* Create an duplicate of the basic block BB and redirect edge E into it. */
867 basic_block
868 cfg_layout_duplicate_bb (bb, e)
869 basic_block bb;
870 edge e;
872 rtx insn;
873 edge s, n;
874 basic_block new_bb;
875 gcov_type new_count = e ? e->count : 0;
877 if (bb->count < new_count)
878 new_count = bb->count;
879 if (!bb->pred)
880 abort ();
881 #ifdef ENABLE_CHECKING
882 if (!cfg_layout_can_duplicate_bb_p (bb))
883 abort ();
884 #endif
886 insn = duplicate_insn_chain (bb->head, bb->end);
887 new_bb = create_basic_block (insn,
888 insn ? get_last_insn () : NULL,
889 EXIT_BLOCK_PTR->prev_bb);
890 alloc_aux_for_block (new_bb, sizeof (struct reorder_block_def));
892 if (RBI (bb)->header)
894 insn = RBI (bb)->header;
895 while (NEXT_INSN (insn))
896 insn = NEXT_INSN (insn);
897 insn = duplicate_insn_chain (RBI (bb)->header, insn);
898 if (insn)
899 RBI (new_bb)->header = unlink_insn_chain (insn, get_last_insn ());
902 if (RBI (bb)->footer)
904 insn = RBI (bb)->footer;
905 while (NEXT_INSN (insn))
906 insn = NEXT_INSN (insn);
907 insn = duplicate_insn_chain (RBI (bb)->footer, insn);
908 if (insn)
909 RBI (new_bb)->footer = unlink_insn_chain (insn, get_last_insn ());
912 if (bb->global_live_at_start)
914 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
915 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
916 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
917 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
920 new_bb->loop_depth = bb->loop_depth;
921 new_bb->flags = bb->flags;
922 for (s = bb->succ; s; s = s->succ_next)
924 n = make_edge (new_bb, s->dest, s->flags);
925 n->probability = s->probability;
926 if (new_count)
927 /* Take care for overflows! */
928 n->count = s->count * (new_count * 10000 / bb->count) / 10000;
929 else
930 n->count = 0;
931 s->count -= n->count;
934 new_bb->count = new_count;
935 bb->count -= new_count;
937 if (e)
939 new_bb->frequency = EDGE_FREQUENCY (e);
940 bb->frequency -= EDGE_FREQUENCY (e);
942 cfg_layout_redirect_edge (e, new_bb);
945 if (bb->count < 0)
946 bb->count = 0;
947 if (bb->frequency < 0)
948 bb->frequency = 0;
950 RBI (new_bb)->original = bb;
951 return new_bb;
954 /* Main entry point to this module - initialize the datastructures for
955 CFG layout changes. It keeps LOOPS up-to-date if not null. */
957 void
958 cfg_layout_initialize ()
960 /* Our algorithm depends on fact that there are now dead jumptables
961 around the code. */
962 alloc_aux_for_blocks (sizeof (struct reorder_block_def));
964 cleanup_unconditional_jumps ();
966 record_effective_endpoints ();
969 /* Finalize the changes: reorder insn list according to the sequence, enter
970 compensation code, rebuild scope forest. */
972 void
973 cfg_layout_finalize ()
975 fixup_fallthru_exit_predecessor ();
976 fixup_reorder_chain ();
978 #ifdef ENABLE_CHECKING
979 verify_insn_chain ();
980 #endif
982 free_aux_for_blocks ();
984 #ifdef ENABLE_CHECKING
985 verify_flow_info ();
986 #endif