stl_bvector.h (swap(_Bit_reference,_Bit_reference)): Move/rename...
[official-gcc.git] / gcc / cfglayout.c
blob0c648b65f676ad3256dc013027670e47ac0a489b
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "tree.h"
24 #include "rtl.h"
25 #include "hard-reg-set.h"
26 #include "basic-block.h"
27 #include "insn-config.h"
28 #include "output.h"
29 #include "function.h"
30 #include "obstack.h"
31 #include "cfglayout.h"
33 /* The contents of the current function definition are allocated
34 in this obstack, and all are freed at the end of the function. */
35 extern struct obstack flow_obstack;
37 /* Holds the interesting trailing notes for the function. */
38 static rtx function_footer;
40 static rtx skip_insns_after_block PARAMS ((basic_block));
41 static void record_effective_endpoints PARAMS ((void));
42 static rtx label_for_bb PARAMS ((basic_block));
43 static void fixup_reorder_chain PARAMS ((void));
45 static void set_block_levels PARAMS ((tree, int));
46 static void change_scope PARAMS ((rtx, tree, tree));
48 void verify_insn_chain PARAMS ((void));
49 static void cleanup_unconditional_jumps PARAMS ((void));
50 static void fixup_fallthru_exit_predecessor PARAMS ((void));
51 static rtx unlink_insn_chain PARAMS ((rtx, rtx));
52 static rtx duplicate_insn_chain PARAMS ((rtx, rtx));
54 static rtx
55 unlink_insn_chain (first, last)
56 rtx first;
57 rtx last;
59 rtx prevfirst = PREV_INSN (first);
60 rtx nextlast = NEXT_INSN (last);
62 PREV_INSN (first) = NULL;
63 NEXT_INSN (last) = NULL;
64 if (prevfirst)
65 NEXT_INSN (prevfirst) = nextlast;
66 if (nextlast)
67 PREV_INSN (nextlast) = prevfirst;
68 else
69 set_last_insn (prevfirst);
70 if (!prevfirst)
71 set_first_insn (nextlast);
72 return first;
75 /* Skip over inter-block insns occurring after BB which are typically
76 associated with BB (e.g., barriers). If there are any such insns,
77 we return the last one. Otherwise, we return the end of BB. */
79 static rtx
80 skip_insns_after_block (bb)
81 basic_block bb;
83 rtx insn, last_insn, next_head, prev;
85 next_head = NULL_RTX;
86 if (bb->next_bb != EXIT_BLOCK_PTR)
87 next_head = bb->next_bb->head;
89 for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)) != 0; )
91 if (insn == next_head)
92 break;
94 switch (GET_CODE (insn))
96 case BARRIER:
97 last_insn = insn;
98 continue;
100 case NOTE:
101 switch (NOTE_LINE_NUMBER (insn))
103 case NOTE_INSN_LOOP_END:
104 case NOTE_INSN_BLOCK_END:
105 last_insn = insn;
106 continue;
107 case NOTE_INSN_DELETED:
108 case NOTE_INSN_DELETED_LABEL:
109 continue;
111 default:
112 continue;
113 break;
115 break;
117 case CODE_LABEL:
118 if (NEXT_INSN (insn)
119 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
120 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
121 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
123 insn = NEXT_INSN (insn);
124 last_insn = insn;
125 continue;
127 break;
129 default:
130 break;
133 break;
136 /* It is possible to hit contradictory sequence. For instance:
138 jump_insn
139 NOTE_INSN_LOOP_BEG
140 barrier
142 Where barrier belongs to jump_insn, but the note does not. This can be
143 created by removing the basic block originally following
144 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
146 for (insn = last_insn; insn != bb->end; insn = prev)
148 prev = PREV_INSN (insn);
149 if (GET_CODE (insn) == NOTE)
150 switch (NOTE_LINE_NUMBER (insn))
152 case NOTE_INSN_LOOP_END:
153 case NOTE_INSN_BLOCK_END:
154 case NOTE_INSN_DELETED:
155 case NOTE_INSN_DELETED_LABEL:
156 continue;
157 default:
158 reorder_insns (insn, insn, last_insn);
162 return last_insn;
165 /* Locate or create a label for a given basic block. */
167 static rtx
168 label_for_bb (bb)
169 basic_block bb;
171 rtx label = bb->head;
173 if (GET_CODE (label) != CODE_LABEL)
175 if (rtl_dump_file)
176 fprintf (rtl_dump_file, "Emitting label for block %d\n", bb->index);
178 label = block_label (bb);
181 return label;
184 /* Locate the effective beginning and end of the insn chain for each
185 block, as defined by skip_insns_after_block above. */
187 static void
188 record_effective_endpoints ()
190 rtx next_insn = get_insns ();
191 basic_block bb;
193 FOR_EACH_BB (bb)
195 rtx end;
197 if (PREV_INSN (bb->head) && next_insn != bb->head)
198 RBI (bb)->header = unlink_insn_chain (next_insn,
199 PREV_INSN (bb->head));
200 end = skip_insns_after_block (bb);
201 if (NEXT_INSN (bb->end) && bb->end != end)
202 RBI (bb)->footer = unlink_insn_chain (NEXT_INSN (bb->end), end);
203 next_insn = NEXT_INSN (bb->end);
206 function_footer = next_insn;
207 if (function_footer)
208 function_footer = unlink_insn_chain (function_footer, get_last_insn ());
211 /* Build a varray mapping INSN_UID to lexical block. Return it. */
213 void
214 scope_to_insns_initialize ()
216 tree block = NULL;
217 rtx insn, next;
219 for (insn = get_insns (); insn; insn = next)
221 next = NEXT_INSN (insn);
223 if (active_insn_p (insn)
224 && GET_CODE (PATTERN (insn)) != ADDR_VEC
225 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
226 INSN_SCOPE (insn) = block;
227 else if (GET_CODE (insn) == NOTE)
229 switch (NOTE_LINE_NUMBER (insn))
231 case NOTE_INSN_BLOCK_BEG:
232 block = NOTE_BLOCK (insn);
233 delete_insn (insn);
234 break;
235 case NOTE_INSN_BLOCK_END:
236 block = BLOCK_SUPERCONTEXT (block);
237 delete_insn (insn);
238 break;
239 default:
240 break;
246 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
247 found in the block tree. */
249 static void
250 set_block_levels (block, level)
251 tree block;
252 int level;
254 while (block)
256 BLOCK_NUMBER (block) = level;
257 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
258 block = BLOCK_CHAIN (block);
262 /* Emit lexical block notes needed to change scope from S1 to S2. */
264 static void
265 change_scope (orig_insn, s1, s2)
266 rtx orig_insn;
267 tree s1, s2;
269 rtx insn = orig_insn;
270 tree com = NULL_TREE;
271 tree ts1 = s1, ts2 = s2;
272 tree s;
274 while (ts1 != ts2)
276 if (ts1 == NULL || ts2 == NULL)
277 abort ();
278 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
279 ts1 = BLOCK_SUPERCONTEXT (ts1);
280 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
281 ts2 = BLOCK_SUPERCONTEXT (ts2);
282 else
284 ts1 = BLOCK_SUPERCONTEXT (ts1);
285 ts2 = BLOCK_SUPERCONTEXT (ts2);
288 com = ts1;
290 /* Close scopes. */
291 s = s1;
292 while (s != com)
294 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
295 NOTE_BLOCK (note) = s;
296 s = BLOCK_SUPERCONTEXT (s);
299 /* Open scopes. */
300 s = s2;
301 while (s != com)
303 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
304 NOTE_BLOCK (insn) = s;
305 s = BLOCK_SUPERCONTEXT (s);
309 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
310 on the scope tree and the newly reordered instructions. */
312 void
313 scope_to_insns_finalize ()
315 tree cur_block = DECL_INITIAL (cfun->decl);
316 rtx insn, note;
318 /* Tag the blocks with a depth number so that change_scope can find
319 the common parent easily. */
320 set_block_levels (cur_block, 0);
322 insn = get_insns ();
323 if (!active_insn_p (insn))
324 insn = next_active_insn (insn);
325 for (; insn; insn = next_active_insn (insn))
327 tree this_block;
329 this_block = INSN_SCOPE (insn);
330 if (! this_block)
331 continue;
333 if (this_block != cur_block)
335 change_scope (insn, cur_block, this_block);
336 cur_block = this_block;
340 /* change_scope emits before the insn, not after. */
341 note = emit_note (NULL, NOTE_INSN_DELETED);
342 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
343 delete_insn (note);
345 reorder_blocks ();
348 /* Given a reorder chain, rearrange the code to match. */
350 static void
351 fixup_reorder_chain ()
353 basic_block bb, prev_bb;
354 int index;
355 rtx insn = NULL;
357 /* First do the bulk reordering -- rechain the blocks without regard to
358 the needed changes to jumps and labels. */
360 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
361 bb != 0;
362 bb = RBI (bb)->next, index++)
364 if (RBI (bb)->header)
366 if (insn)
367 NEXT_INSN (insn) = RBI (bb)->header;
368 else
369 set_first_insn (RBI (bb)->header);
370 PREV_INSN (RBI (bb)->header) = insn;
371 insn = RBI (bb)->header;
372 while (NEXT_INSN (insn))
373 insn = NEXT_INSN (insn);
375 if (insn)
376 NEXT_INSN (insn) = bb->head;
377 else
378 set_first_insn (bb->head);
379 PREV_INSN (bb->head) = insn;
380 insn = bb->end;
381 if (RBI (bb)->footer)
383 NEXT_INSN (insn) = RBI (bb)->footer;
384 PREV_INSN (RBI (bb)->footer) = insn;
385 while (NEXT_INSN (insn))
386 insn = NEXT_INSN (insn);
390 if (index != n_basic_blocks)
391 abort ();
393 NEXT_INSN (insn) = function_footer;
394 if (function_footer)
395 PREV_INSN (function_footer) = insn;
397 while (NEXT_INSN (insn))
398 insn = NEXT_INSN (insn);
400 set_last_insn (insn);
401 #ifdef ENABLE_CHECKING
402 verify_insn_chain ();
403 #endif
405 /* Now add jumps and labels as needed to match the blocks new
406 outgoing edges. */
408 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = RBI (bb)->next)
410 edge e_fall, e_taken, e;
411 rtx bb_end_insn;
412 basic_block nb;
414 if (bb->succ == NULL)
415 continue;
417 /* Find the old fallthru edge, and another non-EH edge for
418 a taken jump. */
419 e_taken = e_fall = NULL;
420 for (e = bb->succ; e ; e = e->succ_next)
421 if (e->flags & EDGE_FALLTHRU)
422 e_fall = e;
423 else if (! (e->flags & EDGE_EH))
424 e_taken = e;
426 bb_end_insn = bb->end;
427 if (GET_CODE (bb_end_insn) == JUMP_INSN)
429 if (any_condjump_p (bb_end_insn))
431 /* If the old fallthru is still next, nothing to do. */
432 if (RBI (bb)->next == e_fall->dest
433 || (!RBI (bb)->next
434 && e_fall->dest == EXIT_BLOCK_PTR))
435 continue;
437 /* There is one special case: if *neither* block is next,
438 such as happens at the very end of a function, then we'll
439 need to add a new unconditional jump. Choose the taken
440 edge based on known or assumed probability. */
441 if (RBI (bb)->next != e_taken->dest)
443 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
445 if (note
446 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
447 && invert_jump (bb_end_insn,
448 label_for_bb (e_fall->dest), 0))
450 e_fall->flags &= ~EDGE_FALLTHRU;
451 e_taken->flags |= EDGE_FALLTHRU;
452 update_br_prob_note (bb);
453 e = e_fall, e_fall = e_taken, e_taken = e;
457 /* Otherwise we can try to invert the jump. This will
458 basically never fail, however, keep up the pretense. */
459 else if (invert_jump (bb_end_insn,
460 label_for_bb (e_fall->dest), 0))
462 e_fall->flags &= ~EDGE_FALLTHRU;
463 e_taken->flags |= EDGE_FALLTHRU;
464 update_br_prob_note (bb);
465 continue;
468 else if (returnjump_p (bb_end_insn))
469 continue;
470 else
472 /* Otherwise we have some switch or computed jump. In the
473 99% case, there should not have been a fallthru edge. */
474 if (! e_fall)
475 continue;
477 #ifdef CASE_DROPS_THROUGH
478 /* Except for VAX. Since we didn't have predication for the
479 tablejump, the fallthru block should not have moved. */
480 if (RBI (bb)->next == e_fall->dest)
481 continue;
482 bb_end_insn = skip_insns_after_block (bb);
483 #else
484 abort ();
485 #endif
488 else
490 /* No fallthru implies a noreturn function with EH edges, or
491 something similarly bizarre. In any case, we don't need to
492 do anything. */
493 if (! e_fall)
494 continue;
496 /* If the fallthru block is still next, nothing to do. */
497 if (RBI (bb)->next == e_fall->dest)
498 continue;
500 /* A fallthru to exit block. */
501 if (!RBI (bb)->next && e_fall->dest == EXIT_BLOCK_PTR)
502 continue;
505 /* We got here if we need to add a new jump insn. */
506 nb = force_nonfallthru (e_fall);
507 if (nb)
509 alloc_aux_for_block (nb, sizeof (struct reorder_block_def));
510 RBI (nb)->visited = 1;
511 RBI (nb)->next = RBI (bb)->next;
512 RBI (bb)->next = nb;
513 /* Don't process this new block. */
514 bb = nb;
518 /* Put basic_block_info in the new order. */
520 if (rtl_dump_file)
522 fprintf (rtl_dump_file, "Reordered sequence:\n");
523 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0; bb; bb = RBI (bb)->next, index ++)
525 fprintf (rtl_dump_file, " %i ", index);
526 if (RBI (bb)->original)
527 fprintf (rtl_dump_file, "duplicate of %i ",
528 RBI (bb)->original->index);
529 else if (forwarder_block_p (bb) && GET_CODE (bb->head) != CODE_LABEL)
530 fprintf (rtl_dump_file, "compensation ");
531 else
532 fprintf (rtl_dump_file, "bb %i ", bb->index);
533 fprintf (rtl_dump_file, " [%i]\n", bb->frequency);
537 prev_bb = ENTRY_BLOCK_PTR;
538 bb = ENTRY_BLOCK_PTR->next_bb;
539 index = 0;
541 for (; bb; prev_bb = bb, bb = RBI (bb)->next, index ++)
543 bb->index = index;
544 BASIC_BLOCK (index) = bb;
546 bb->prev_bb = prev_bb;
547 prev_bb->next_bb = bb;
549 prev_bb->next_bb = EXIT_BLOCK_PTR;
550 EXIT_BLOCK_PTR->prev_bb = prev_bb;
553 /* Perform sanity checks on the insn chain.
554 1. Check that next/prev pointers are consistent in both the forward and
555 reverse direction.
556 2. Count insns in chain, going both directions, and check if equal.
557 3. Check that get_last_insn () returns the actual end of chain. */
559 void
560 verify_insn_chain ()
562 rtx x, prevx, nextx;
563 int insn_cnt1, insn_cnt2;
565 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
566 x != 0;
567 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
568 if (PREV_INSN (x) != prevx)
569 abort ();
571 if (prevx != get_last_insn ())
572 abort ();
574 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
575 x != 0;
576 nextx = x, insn_cnt2++, x = PREV_INSN (x))
577 if (NEXT_INSN (x) != nextx)
578 abort ();
580 if (insn_cnt1 != insn_cnt2)
581 abort ();
584 /* Remove any unconditional jumps and forwarder block creating fallthru
585 edges instead. During BB reordering fallthru edges are not required
586 to target next basic block in the linear CFG layout, so the unconditional
587 jumps are not needed. If LOOPS is not null, also update loop structure &
588 dominators. */
590 static void
591 cleanup_unconditional_jumps ()
593 basic_block bb;
595 FOR_EACH_BB (bb)
597 if (!bb->succ)
598 continue;
599 if (bb->succ->flags & EDGE_FALLTHRU)
600 continue;
601 if (!bb->succ->succ_next)
603 rtx insn;
604 if (GET_CODE (bb->head) != CODE_LABEL && forwarder_block_p (bb)
605 && bb->prev_bb != ENTRY_BLOCK_PTR)
607 basic_block prev = bb->prev_bb;
609 if (rtl_dump_file)
610 fprintf (rtl_dump_file, "Removing forwarder BB %i\n",
611 bb->index);
613 redirect_edge_succ (bb->pred, bb->succ->dest);
614 flow_delete_block (bb);
615 bb = prev;
617 else if (simplejump_p (bb->end))
619 rtx jump = bb->end;
621 if (rtl_dump_file)
622 fprintf (rtl_dump_file, "Removing jump %i in BB %i\n",
623 INSN_UID (jump), bb->index);
624 delete_insn (jump);
625 bb->succ->flags |= EDGE_FALLTHRU;
627 else
628 continue;
630 /* Cleanup barriers and delete ADDR_VECs in a way as they are belonging
631 to removed tablejump anyway. */
632 insn = NEXT_INSN (bb->end);
633 while (insn
634 && (GET_CODE (insn) != NOTE
635 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
637 rtx next = NEXT_INSN (insn);
639 if (GET_CODE (insn) == BARRIER)
640 delete_barrier (insn);
641 else if (GET_CODE (insn) == JUMP_INSN)
642 delete_insn_chain (PREV_INSN (insn), insn);
643 else if (GET_CODE (insn) == CODE_LABEL)
645 else if (GET_CODE (insn) != NOTE)
646 abort ();
648 insn = next;
654 /* The block falling through to exit must be the last one in the
655 reordered chain. Ensure that this condition is met. */
656 static void
657 fixup_fallthru_exit_predecessor ()
659 edge e;
660 basic_block bb = NULL;
662 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
663 if (e->flags & EDGE_FALLTHRU)
664 bb = e->src;
666 if (bb && RBI (bb)->next)
668 basic_block c = ENTRY_BLOCK_PTR->next_bb;
670 while (RBI (c)->next != bb)
671 c = RBI (c)->next;
673 RBI (c)->next = RBI (bb)->next;
674 while (RBI (c)->next)
675 c = RBI (c)->next;
677 RBI (c)->next = bb;
678 RBI (bb)->next = NULL;
682 /* Return true in case it is possible to duplicate the basic block BB. */
684 bool
685 cfg_layout_can_duplicate_bb_p (bb)
686 basic_block bb;
688 rtx next;
689 edge s;
691 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR)
692 return false;
694 /* Duplicating fallthru block to exit would require adding an jump
695 and splitting the real last BB. */
696 for (s = bb->succ; s; s = s->succ_next)
697 if (s->dest == EXIT_BLOCK_PTR && s->flags & EDGE_FALLTHRU)
698 return false;
700 /* Do not attempt to duplicate tablejumps, as we need to unshare
701 the dispatch table. This is dificult to do, as the instructions
702 computing jump destination may be hoisted outside the basic block. */
703 if (GET_CODE (bb->end) == JUMP_INSN && JUMP_LABEL (bb->end)
704 && (next = next_nonnote_insn (JUMP_LABEL (bb->end)))
705 && GET_CODE (next) == JUMP_INSN
706 && (GET_CODE (PATTERN (next)) == ADDR_VEC
707 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
708 return false;
709 return true;
712 static rtx
713 duplicate_insn_chain (from, to)
714 rtx from, to;
716 rtx insn, last;
718 /* Avoid updating of boundaries of previous basic block. The
719 note will get removed from insn stream in fixup. */
720 last = emit_note (NULL, NOTE_INSN_DELETED);
722 /* Create copy at the end of INSN chain. The chain will
723 be reordered later. */
724 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
726 rtx new;
727 switch (GET_CODE (insn))
729 case INSN:
730 case CALL_INSN:
731 case JUMP_INSN:
732 /* Avoid copying of dispatch tables. We never duplicate
733 tablejumps, so this can hit only in case the table got
734 moved far from original jump. */
735 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
736 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
737 break;
738 new = emit_copy_of_insn_after (insn, get_last_insn ());
739 break;
741 case CODE_LABEL:
742 break;
744 case BARRIER:
745 emit_barrier ();
746 break;
748 case NOTE:
749 switch (NOTE_LINE_NUMBER (insn))
751 /* In case prologue is empty and function contain label
752 in first BB, we may want to copy the block. */
753 case NOTE_INSN_PROLOGUE_END:
755 case NOTE_INSN_LOOP_VTOP:
756 case NOTE_INSN_LOOP_CONT:
757 case NOTE_INSN_LOOP_BEG:
758 case NOTE_INSN_LOOP_END:
759 /* Strip down the loop notes - we don't really want to keep
760 them consistent in loop copies. */
761 case NOTE_INSN_DELETED:
762 case NOTE_INSN_DELETED_LABEL:
763 /* No problem to strip these. */
764 case NOTE_INSN_EPILOGUE_BEG:
765 case NOTE_INSN_FUNCTION_END:
766 /* Debug code expect these notes to exist just once.
767 Keep them in the master copy.
768 ??? It probably makes more sense to duplicate them for each
769 epilogue copy. */
770 case NOTE_INSN_FUNCTION_BEG:
771 /* There is always just single entry to function. */
772 case NOTE_INSN_BASIC_BLOCK:
773 break;
775 /* There is no purpose to duplicate prologue. */
776 case NOTE_INSN_BLOCK_BEG:
777 case NOTE_INSN_BLOCK_END:
778 /* The BLOCK_BEG/BLOCK_END notes should be eliminated when BB
779 reordering is in the progress. */
780 case NOTE_INSN_EH_REGION_BEG:
781 case NOTE_INSN_EH_REGION_END:
782 case NOTE_INSN_RANGE_BEG:
783 case NOTE_INSN_RANGE_END:
784 /* Should never exist at BB duplication time. */
785 abort ();
786 break;
787 case NOTE_INSN_REPEATED_LINE_NUMBER:
788 emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
789 break;
791 default:
792 if (NOTE_LINE_NUMBER (insn) < 0)
793 abort ();
794 /* It is possible that no_line_number is set and the note
795 won't be emitted. */
796 emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
798 break;
799 default:
800 abort ();
803 insn = NEXT_INSN (last);
804 delete_insn (last);
805 return insn;
808 /* Redirect Edge to DEST. */
809 void
810 cfg_layout_redirect_edge (e, dest)
811 edge e;
812 basic_block dest;
814 basic_block src = e->src;
815 basic_block old_next_bb = src->next_bb;
817 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
818 in the case the basic block appears to be in sequence. Avoid this
819 transformation. */
821 src->next_bb = NULL;
822 if (e->flags & EDGE_FALLTHRU)
824 /* In case we are redirecting fallthru edge to the branch edge
825 of conditional jump, remove it. */
826 if (src->succ->succ_next
827 && !src->succ->succ_next->succ_next)
829 edge s = e->succ_next ? e->succ_next : src->succ;
830 if (s->dest == dest
831 && any_condjump_p (src->end)
832 && onlyjump_p (src->end))
833 delete_insn (src->end);
835 redirect_edge_succ_nodup (e, dest);
837 else
838 redirect_edge_and_branch (e, dest);
840 /* We don't want simplejumps in the insn stream during cfglayout. */
841 if (simplejump_p (src->end))
843 delete_insn (src->end);
844 delete_barrier (NEXT_INSN (src->end));
845 src->succ->flags |= EDGE_FALLTHRU;
847 src->next_bb = old_next_bb;
850 /* Create an duplicate of the basic block BB and redirect edge E into it. */
852 basic_block
853 cfg_layout_duplicate_bb (bb, e)
854 basic_block bb;
855 edge e;
857 rtx insn;
858 edge s, n;
859 basic_block new_bb;
860 gcov_type new_count = e ? e->count : 0;
862 if (bb->count < new_count)
863 new_count = bb->count;
864 if (!bb->pred)
865 abort ();
866 #ifdef ENABLE_CHECKING
867 if (!cfg_layout_can_duplicate_bb_p (bb))
868 abort ();
869 #endif
871 insn = duplicate_insn_chain (bb->head, bb->end);
872 new_bb = create_basic_block (insn,
873 insn ? get_last_insn () : NULL,
874 EXIT_BLOCK_PTR->prev_bb);
875 alloc_aux_for_block (new_bb, sizeof (struct reorder_block_def));
877 if (RBI (bb)->header)
879 insn = RBI (bb)->header;
880 while (NEXT_INSN (insn))
881 insn = NEXT_INSN (insn);
882 insn = duplicate_insn_chain (RBI (bb)->header, insn);
883 if (insn)
884 RBI (new_bb)->header = unlink_insn_chain (insn, get_last_insn ());
887 if (RBI (bb)->footer)
889 insn = RBI (bb)->footer;
890 while (NEXT_INSN (insn))
891 insn = NEXT_INSN (insn);
892 insn = duplicate_insn_chain (RBI (bb)->footer, insn);
893 if (insn)
894 RBI (new_bb)->footer = unlink_insn_chain (insn, get_last_insn ());
897 if (bb->global_live_at_start)
899 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
900 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
901 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
902 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
905 new_bb->loop_depth = bb->loop_depth;
906 new_bb->flags = bb->flags;
907 for (s = bb->succ; s; s = s->succ_next)
909 n = make_edge (new_bb, s->dest, s->flags);
910 n->probability = s->probability;
911 if (new_count)
912 /* Take care for overflows! */
913 n->count = s->count * (new_count * 10000 / bb->count) / 10000;
914 else
915 n->count = 0;
916 s->count -= n->count;
919 new_bb->count = new_count;
920 bb->count -= new_count;
922 if (e)
924 new_bb->frequency = EDGE_FREQUENCY (e);
925 bb->frequency -= EDGE_FREQUENCY (e);
927 cfg_layout_redirect_edge (e, new_bb);
930 if (bb->count < 0)
931 bb->count = 0;
932 if (bb->frequency < 0)
933 bb->frequency = 0;
935 RBI (new_bb)->original = bb;
936 return new_bb;
939 /* Main entry point to this module - initialize the datastructures for
940 CFG layout changes. It keeps LOOPS up-to-date if not null. */
942 void
943 cfg_layout_initialize ()
945 /* Our algorithm depends on fact that there are now dead jumptables
946 around the code. */
947 alloc_aux_for_blocks (sizeof (struct reorder_block_def));
949 cleanup_unconditional_jumps ();
951 record_effective_endpoints ();
954 /* Finalize the changes: reorder insn list according to the sequence, enter
955 compensation code, rebuild scope forest. */
957 void
958 cfg_layout_finalize ()
960 fixup_fallthru_exit_predecessor ();
961 fixup_reorder_chain ();
963 #ifdef ENABLE_CHECKING
964 verify_insn_chain ();
965 #endif
967 free_aux_for_blocks ();
969 #ifdef ENABLE_CHECKING
970 verify_flow_info ();
971 #endif