* java/util/zip/ZipFile.java (finalize): New method.
[official-gcc.git] / gcc / cfglayout.c
blob04ba2ec21e038ca0ebe5460d6cc72e7016545a9a
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "insn-config.h"
30 #include "output.h"
31 #include "function.h"
32 #include "obstack.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
37 /* The contents of the current function definition are allocated
38 in this obstack, and all are freed at the end of the function. */
39 extern struct obstack flow_obstack;
41 /* Holds the interesting trailing notes for the function. */
42 static rtx function_footer;
44 static rtx skip_insns_after_block PARAMS ((basic_block));
45 static void record_effective_endpoints PARAMS ((void));
46 static rtx label_for_bb PARAMS ((basic_block));
47 static void fixup_reorder_chain PARAMS ((void));
49 static void set_block_levels PARAMS ((tree, int));
50 static void change_scope PARAMS ((rtx, tree, tree));
52 void verify_insn_chain PARAMS ((void));
53 static void cleanup_unconditional_jumps PARAMS ((struct loops *));
54 static void fixup_fallthru_exit_predecessor PARAMS ((void));
55 static rtx unlink_insn_chain PARAMS ((rtx, rtx));
56 static rtx duplicate_insn_chain PARAMS ((rtx, rtx));
57 static void break_superblocks PARAMS ((void));
59 static rtx
60 unlink_insn_chain (first, last)
61 rtx first;
62 rtx last;
64 rtx prevfirst = PREV_INSN (first);
65 rtx nextlast = NEXT_INSN (last);
67 PREV_INSN (first) = NULL;
68 NEXT_INSN (last) = NULL;
69 if (prevfirst)
70 NEXT_INSN (prevfirst) = nextlast;
71 if (nextlast)
72 PREV_INSN (nextlast) = prevfirst;
73 else
74 set_last_insn (prevfirst);
75 if (!prevfirst)
76 set_first_insn (nextlast);
77 return first;
80 /* Skip over inter-block insns occurring after BB which are typically
81 associated with BB (e.g., barriers). If there are any such insns,
82 we return the last one. Otherwise, we return the end of BB. */
84 static rtx
85 skip_insns_after_block (bb)
86 basic_block bb;
88 rtx insn, last_insn, next_head, prev;
90 next_head = NULL_RTX;
91 if (bb->next_bb != EXIT_BLOCK_PTR)
92 next_head = bb->next_bb->head;
94 for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)) != 0; )
96 if (insn == next_head)
97 break;
99 switch (GET_CODE (insn))
101 case BARRIER:
102 last_insn = insn;
103 continue;
105 case NOTE:
106 switch (NOTE_LINE_NUMBER (insn))
108 case NOTE_INSN_LOOP_END:
109 case NOTE_INSN_BLOCK_END:
110 last_insn = insn;
111 continue;
112 case NOTE_INSN_DELETED:
113 case NOTE_INSN_DELETED_LABEL:
114 continue;
116 default:
117 continue;
118 break;
120 break;
122 case CODE_LABEL:
123 if (NEXT_INSN (insn)
124 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
125 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
126 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
128 insn = NEXT_INSN (insn);
129 last_insn = insn;
130 continue;
132 break;
134 default:
135 break;
138 break;
141 /* It is possible to hit contradictory sequence. For instance:
143 jump_insn
144 NOTE_INSN_LOOP_BEG
145 barrier
147 Where barrier belongs to jump_insn, but the note does not. This can be
148 created by removing the basic block originally following
149 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
151 for (insn = last_insn; insn != bb->end; insn = prev)
153 prev = PREV_INSN (insn);
154 if (GET_CODE (insn) == NOTE)
155 switch (NOTE_LINE_NUMBER (insn))
157 case NOTE_INSN_LOOP_END:
158 case NOTE_INSN_BLOCK_END:
159 case NOTE_INSN_DELETED:
160 case NOTE_INSN_DELETED_LABEL:
161 continue;
162 default:
163 reorder_insns (insn, insn, last_insn);
167 return last_insn;
170 /* Locate or create a label for a given basic block. */
172 static rtx
173 label_for_bb (bb)
174 basic_block bb;
176 rtx label = bb->head;
178 if (GET_CODE (label) != CODE_LABEL)
180 if (rtl_dump_file)
181 fprintf (rtl_dump_file, "Emitting label for block %d\n", bb->index);
183 label = block_label (bb);
186 return label;
189 /* Locate the effective beginning and end of the insn chain for each
190 block, as defined by skip_insns_after_block above. */
192 static void
193 record_effective_endpoints ()
195 rtx next_insn = get_insns ();
196 basic_block bb;
198 FOR_EACH_BB (bb)
200 rtx end;
202 if (PREV_INSN (bb->head) && next_insn != bb->head)
203 RBI (bb)->header = unlink_insn_chain (next_insn,
204 PREV_INSN (bb->head));
205 end = skip_insns_after_block (bb);
206 if (NEXT_INSN (bb->end) && bb->end != end)
207 RBI (bb)->footer = unlink_insn_chain (NEXT_INSN (bb->end), end);
208 next_insn = NEXT_INSN (bb->end);
211 function_footer = next_insn;
212 if (function_footer)
213 function_footer = unlink_insn_chain (function_footer, get_last_insn ());
216 /* Build a varray mapping INSN_UID to lexical block. Return it. */
218 void
219 scope_to_insns_initialize ()
221 tree block = NULL;
222 rtx insn, next;
224 for (insn = get_insns (); insn; insn = next)
226 next = NEXT_INSN (insn);
228 if (active_insn_p (insn)
229 && GET_CODE (PATTERN (insn)) != ADDR_VEC
230 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
231 INSN_SCOPE (insn) = block;
232 else if (GET_CODE (insn) == NOTE)
234 switch (NOTE_LINE_NUMBER (insn))
236 case NOTE_INSN_BLOCK_BEG:
237 block = NOTE_BLOCK (insn);
238 delete_insn (insn);
239 break;
240 case NOTE_INSN_BLOCK_END:
241 block = BLOCK_SUPERCONTEXT (block);
242 delete_insn (insn);
243 break;
244 default:
245 break;
250 /* Tag the blocks with a depth number so that change_scope can find
251 the common parent easily. */
252 set_block_levels (DECL_INITIAL (cfun->decl), 0);
255 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
256 found in the block tree. */
258 static void
259 set_block_levels (block, level)
260 tree block;
261 int level;
263 while (block)
265 BLOCK_NUMBER (block) = level;
266 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
267 block = BLOCK_CHAIN (block);
271 /* Return sope resulting from combination of S1 and S2. */
272 tree
273 choose_inner_scope (s1, s2)
274 tree s1, s2;
276 if (!s1)
277 return s2;
278 if (!s2)
279 return s1;
280 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
281 return s1;
282 return s2;
285 /* Emit lexical block notes needed to change scope from S1 to S2. */
287 static void
288 change_scope (orig_insn, s1, s2)
289 rtx orig_insn;
290 tree s1, s2;
292 rtx insn = orig_insn;
293 tree com = NULL_TREE;
294 tree ts1 = s1, ts2 = s2;
295 tree s;
297 while (ts1 != ts2)
299 if (ts1 == NULL || ts2 == NULL)
300 abort ();
301 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
302 ts1 = BLOCK_SUPERCONTEXT (ts1);
303 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
304 ts2 = BLOCK_SUPERCONTEXT (ts2);
305 else
307 ts1 = BLOCK_SUPERCONTEXT (ts1);
308 ts2 = BLOCK_SUPERCONTEXT (ts2);
311 com = ts1;
313 /* Close scopes. */
314 s = s1;
315 while (s != com)
317 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
318 NOTE_BLOCK (note) = s;
319 s = BLOCK_SUPERCONTEXT (s);
322 /* Open scopes. */
323 s = s2;
324 while (s != com)
326 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
327 NOTE_BLOCK (insn) = s;
328 s = BLOCK_SUPERCONTEXT (s);
332 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
333 on the scope tree and the newly reordered instructions. */
335 void
336 scope_to_insns_finalize ()
338 tree cur_block = DECL_INITIAL (cfun->decl);
339 rtx insn, note;
341 insn = get_insns ();
342 if (!active_insn_p (insn))
343 insn = next_active_insn (insn);
344 for (; insn; insn = next_active_insn (insn))
346 tree this_block;
348 this_block = INSN_SCOPE (insn);
349 /* For sequences compute scope resulting from merging all scopes
350 of instructions nested inside. */
351 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
353 int i;
354 rtx body = PATTERN (insn);
356 this_block = NULL;
357 for (i = 0; i < XVECLEN (body, 0); i++)
358 this_block = choose_inner_scope (this_block,
359 INSN_SCOPE (XVECEXP (body, 0, i)));
361 if (! this_block)
362 continue;
364 if (this_block != cur_block)
366 change_scope (insn, cur_block, this_block);
367 cur_block = this_block;
371 /* change_scope emits before the insn, not after. */
372 note = emit_note (NULL, NOTE_INSN_DELETED);
373 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
374 delete_insn (note);
376 reorder_blocks ();
379 /* Given a reorder chain, rearrange the code to match. */
381 static void
382 fixup_reorder_chain ()
384 basic_block bb, prev_bb;
385 int index;
386 rtx insn = NULL;
388 /* First do the bulk reordering -- rechain the blocks without regard to
389 the needed changes to jumps and labels. */
391 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
392 bb != 0;
393 bb = RBI (bb)->next, index++)
395 if (RBI (bb)->header)
397 if (insn)
398 NEXT_INSN (insn) = RBI (bb)->header;
399 else
400 set_first_insn (RBI (bb)->header);
401 PREV_INSN (RBI (bb)->header) = insn;
402 insn = RBI (bb)->header;
403 while (NEXT_INSN (insn))
404 insn = NEXT_INSN (insn);
406 if (insn)
407 NEXT_INSN (insn) = bb->head;
408 else
409 set_first_insn (bb->head);
410 PREV_INSN (bb->head) = insn;
411 insn = bb->end;
412 if (RBI (bb)->footer)
414 NEXT_INSN (insn) = RBI (bb)->footer;
415 PREV_INSN (RBI (bb)->footer) = insn;
416 while (NEXT_INSN (insn))
417 insn = NEXT_INSN (insn);
421 if (index != n_basic_blocks)
422 abort ();
424 NEXT_INSN (insn) = function_footer;
425 if (function_footer)
426 PREV_INSN (function_footer) = insn;
428 while (NEXT_INSN (insn))
429 insn = NEXT_INSN (insn);
431 set_last_insn (insn);
432 #ifdef ENABLE_CHECKING
433 verify_insn_chain ();
434 #endif
436 /* Now add jumps and labels as needed to match the blocks new
437 outgoing edges. */
439 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = RBI (bb)->next)
441 edge e_fall, e_taken, e;
442 rtx bb_end_insn;
443 basic_block nb;
445 if (bb->succ == NULL)
446 continue;
448 /* Find the old fallthru edge, and another non-EH edge for
449 a taken jump. */
450 e_taken = e_fall = NULL;
451 for (e = bb->succ; e ; e = e->succ_next)
452 if (e->flags & EDGE_FALLTHRU)
453 e_fall = e;
454 else if (! (e->flags & EDGE_EH))
455 e_taken = e;
457 bb_end_insn = bb->end;
458 if (GET_CODE (bb_end_insn) == JUMP_INSN)
460 if (any_condjump_p (bb_end_insn))
462 /* If the old fallthru is still next, nothing to do. */
463 if (RBI (bb)->next == e_fall->dest
464 || (!RBI (bb)->next
465 && e_fall->dest == EXIT_BLOCK_PTR))
466 continue;
468 /* There is one special case: if *neither* block is next,
469 such as happens at the very end of a function, then we'll
470 need to add a new unconditional jump. Choose the taken
471 edge based on known or assumed probability. */
472 if (RBI (bb)->next != e_taken->dest)
474 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
476 if (note
477 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
478 && invert_jump (bb_end_insn,
479 label_for_bb (e_fall->dest), 0))
481 e_fall->flags &= ~EDGE_FALLTHRU;
482 e_taken->flags |= EDGE_FALLTHRU;
483 update_br_prob_note (bb);
484 e = e_fall, e_fall = e_taken, e_taken = e;
488 /* Otherwise we can try to invert the jump. This will
489 basically never fail, however, keep up the pretense. */
490 else if (invert_jump (bb_end_insn,
491 label_for_bb (e_fall->dest), 0))
493 e_fall->flags &= ~EDGE_FALLTHRU;
494 e_taken->flags |= EDGE_FALLTHRU;
495 update_br_prob_note (bb);
496 continue;
499 else if (returnjump_p (bb_end_insn))
500 continue;
501 else
503 /* Otherwise we have some switch or computed jump. In the
504 99% case, there should not have been a fallthru edge. */
505 if (! e_fall)
506 continue;
508 #ifdef CASE_DROPS_THROUGH
509 /* Except for VAX. Since we didn't have predication for the
510 tablejump, the fallthru block should not have moved. */
511 if (RBI (bb)->next == e_fall->dest)
512 continue;
513 bb_end_insn = skip_insns_after_block (bb);
514 #else
515 abort ();
516 #endif
519 else
521 /* No fallthru implies a noreturn function with EH edges, or
522 something similarly bizarre. In any case, we don't need to
523 do anything. */
524 if (! e_fall)
525 continue;
527 /* If the fallthru block is still next, nothing to do. */
528 if (RBI (bb)->next == e_fall->dest)
529 continue;
531 /* A fallthru to exit block. */
532 if (!RBI (bb)->next && e_fall->dest == EXIT_BLOCK_PTR)
533 continue;
536 /* We got here if we need to add a new jump insn. */
537 nb = force_nonfallthru (e_fall);
538 if (nb)
540 alloc_aux_for_block (nb, sizeof (struct reorder_block_def));
541 RBI (nb)->visited = 1;
542 RBI (nb)->next = RBI (bb)->next;
543 RBI (bb)->next = nb;
544 /* Don't process this new block. */
545 bb = nb;
549 /* Put basic_block_info in the new order. */
551 if (rtl_dump_file)
553 fprintf (rtl_dump_file, "Reordered sequence:\n");
554 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0; bb; bb = RBI (bb)->next, index ++)
556 fprintf (rtl_dump_file, " %i ", index);
557 if (RBI (bb)->original)
558 fprintf (rtl_dump_file, "duplicate of %i ",
559 RBI (bb)->original->index);
560 else if (forwarder_block_p (bb) && GET_CODE (bb->head) != CODE_LABEL)
561 fprintf (rtl_dump_file, "compensation ");
562 else
563 fprintf (rtl_dump_file, "bb %i ", bb->index);
564 fprintf (rtl_dump_file, " [%i]\n", bb->frequency);
568 prev_bb = ENTRY_BLOCK_PTR;
569 bb = ENTRY_BLOCK_PTR->next_bb;
570 index = 0;
572 for (; bb; prev_bb = bb, bb = RBI (bb)->next, index ++)
574 bb->index = index;
575 BASIC_BLOCK (index) = bb;
577 bb->prev_bb = prev_bb;
578 prev_bb->next_bb = bb;
580 prev_bb->next_bb = EXIT_BLOCK_PTR;
581 EXIT_BLOCK_PTR->prev_bb = prev_bb;
584 /* Perform sanity checks on the insn chain.
585 1. Check that next/prev pointers are consistent in both the forward and
586 reverse direction.
587 2. Count insns in chain, going both directions, and check if equal.
588 3. Check that get_last_insn () returns the actual end of chain. */
590 void
591 verify_insn_chain ()
593 rtx x, prevx, nextx;
594 int insn_cnt1, insn_cnt2;
596 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
597 x != 0;
598 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
599 if (PREV_INSN (x) != prevx)
600 abort ();
602 if (prevx != get_last_insn ())
603 abort ();
605 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
606 x != 0;
607 nextx = x, insn_cnt2++, x = PREV_INSN (x))
608 if (NEXT_INSN (x) != nextx)
609 abort ();
611 if (insn_cnt1 != insn_cnt2)
612 abort ();
615 /* Remove any unconditional jumps and forwarder block creating fallthru
616 edges instead. During BB reordering, fallthru edges are not required
617 to target next basic block in the linear CFG layout, so the unconditional
618 jumps are not needed. If LOOPS is not null, also update loop structure &
619 dominators. */
621 static void
622 cleanup_unconditional_jumps (loops)
623 struct loops *loops;
625 basic_block bb;
627 FOR_EACH_BB (bb)
629 if (!bb->succ)
630 continue;
631 if (bb->succ->flags & EDGE_FALLTHRU)
632 continue;
633 if (!bb->succ->succ_next)
635 rtx insn;
636 if (GET_CODE (bb->head) != CODE_LABEL && forwarder_block_p (bb)
637 && bb->prev_bb != ENTRY_BLOCK_PTR)
639 basic_block prev = bb->prev_bb;
641 if (rtl_dump_file)
642 fprintf (rtl_dump_file, "Removing forwarder BB %i\n",
643 bb->index);
645 if (loops)
647 /* bb cannot be loop header, as it only has one entry
648 edge. It could be a loop latch. */
649 if (bb->loop_father->header == bb)
650 abort ();
652 if (bb->loop_father->latch == bb)
653 bb->loop_father->latch = bb->pred->src;
655 if (get_immediate_dominator
656 (loops->cfg.dom, bb->succ->dest) == bb)
657 set_immediate_dominator
658 (loops->cfg.dom, bb->succ->dest, bb->pred->src);
660 remove_bb_from_loops (bb);
661 delete_from_dominance_info (loops->cfg.dom, bb);
664 redirect_edge_succ_nodup (bb->pred, bb->succ->dest);
665 flow_delete_block (bb);
666 bb = prev;
668 else if (simplejump_p (bb->end))
670 rtx jump = bb->end;
672 if (rtl_dump_file)
673 fprintf (rtl_dump_file, "Removing jump %i in BB %i\n",
674 INSN_UID (jump), bb->index);
675 delete_insn (jump);
676 bb->succ->flags |= EDGE_FALLTHRU;
678 else
679 continue;
681 insn = NEXT_INSN (bb->end);
682 while (insn
683 && (GET_CODE (insn) != NOTE
684 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
686 rtx next = NEXT_INSN (insn);
688 if (GET_CODE (insn) == BARRIER)
689 delete_barrier (insn);
691 insn = next;
697 /* The block falling through to exit must be the last one in the
698 reordered chain. Ensure that this condition is met. */
699 static void
700 fixup_fallthru_exit_predecessor ()
702 edge e;
703 basic_block bb = NULL;
705 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
706 if (e->flags & EDGE_FALLTHRU)
707 bb = e->src;
709 if (bb && RBI (bb)->next)
711 basic_block c = ENTRY_BLOCK_PTR->next_bb;
713 while (RBI (c)->next != bb)
714 c = RBI (c)->next;
716 RBI (c)->next = RBI (bb)->next;
717 while (RBI (c)->next)
718 c = RBI (c)->next;
720 RBI (c)->next = bb;
721 RBI (bb)->next = NULL;
725 /* Return true in case it is possible to duplicate the basic block BB. */
727 bool
728 cfg_layout_can_duplicate_bb_p (bb)
729 basic_block bb;
731 rtx next;
732 edge s;
734 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR)
735 return false;
737 /* Duplicating fallthru block to exit would require adding a jump
738 and splitting the real last BB. */
739 for (s = bb->succ; s; s = s->succ_next)
740 if (s->dest == EXIT_BLOCK_PTR && s->flags & EDGE_FALLTHRU)
741 return false;
743 /* Do not attempt to duplicate tablejumps, as we need to unshare
744 the dispatch table. This is difficult to do, as the instructions
745 computing jump destination may be hoisted outside the basic block. */
746 if (GET_CODE (bb->end) == JUMP_INSN && JUMP_LABEL (bb->end)
747 && (next = next_nonnote_insn (JUMP_LABEL (bb->end)))
748 && GET_CODE (next) == JUMP_INSN
749 && (GET_CODE (PATTERN (next)) == ADDR_VEC
750 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
751 return false;
753 /* Do not duplicate blocks containing insns that can't be copied. */
754 if (targetm.cannot_copy_insn_p)
756 rtx insn = bb->head;
757 while (1)
759 if (INSN_P (insn) && (*targetm.cannot_copy_insn_p) (insn))
760 return false;
761 if (insn == bb->end)
762 break;
763 insn = NEXT_INSN (insn);
767 return true;
770 static rtx
771 duplicate_insn_chain (from, to)
772 rtx from, to;
774 rtx insn, last;
776 /* Avoid updating of boundaries of previous basic block. The
777 note will get removed from insn stream in fixup. */
778 last = emit_note (NULL, NOTE_INSN_DELETED);
780 /* Create copy at the end of INSN chain. The chain will
781 be reordered later. */
782 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
784 switch (GET_CODE (insn))
786 case INSN:
787 case CALL_INSN:
788 case JUMP_INSN:
789 /* Avoid copying of dispatch tables. We never duplicate
790 tablejumps, so this can hit only in case the table got
791 moved far from original jump. */
792 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
793 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
794 break;
795 emit_copy_of_insn_after (insn, get_last_insn ());
796 break;
798 case CODE_LABEL:
799 break;
801 case BARRIER:
802 emit_barrier ();
803 break;
805 case NOTE:
806 switch (NOTE_LINE_NUMBER (insn))
808 /* In case prologue is empty and function contain label
809 in first BB, we may want to copy the block. */
810 case NOTE_INSN_PROLOGUE_END:
812 case NOTE_INSN_LOOP_VTOP:
813 case NOTE_INSN_LOOP_CONT:
814 case NOTE_INSN_LOOP_BEG:
815 case NOTE_INSN_LOOP_END:
816 /* Strip down the loop notes - we don't really want to keep
817 them consistent in loop copies. */
818 case NOTE_INSN_DELETED:
819 case NOTE_INSN_DELETED_LABEL:
820 /* No problem to strip these. */
821 case NOTE_INSN_EPILOGUE_BEG:
822 case NOTE_INSN_FUNCTION_END:
823 /* Debug code expect these notes to exist just once.
824 Keep them in the master copy.
825 ??? It probably makes more sense to duplicate them for each
826 epilogue copy. */
827 case NOTE_INSN_FUNCTION_BEG:
828 /* There is always just single entry to function. */
829 case NOTE_INSN_BASIC_BLOCK:
830 break;
832 /* There is no purpose to duplicate prologue. */
833 case NOTE_INSN_BLOCK_BEG:
834 case NOTE_INSN_BLOCK_END:
835 /* The BLOCK_BEG/BLOCK_END notes should be eliminated when BB
836 reordering is in the progress. */
837 case NOTE_INSN_EH_REGION_BEG:
838 case NOTE_INSN_EH_REGION_END:
839 /* Should never exist at BB duplication time. */
840 abort ();
841 break;
842 case NOTE_INSN_REPEATED_LINE_NUMBER:
843 emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
844 break;
846 default:
847 if (NOTE_LINE_NUMBER (insn) < 0)
848 abort ();
849 /* It is possible that no_line_number is set and the note
850 won't be emitted. */
851 emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
853 break;
854 default:
855 abort ();
858 insn = NEXT_INSN (last);
859 delete_insn (last);
860 return insn;
863 /* Redirect Edge to DEST. */
864 bool
865 cfg_layout_redirect_edge (e, dest)
866 edge e;
867 basic_block dest;
869 basic_block src = e->src;
870 basic_block old_next_bb = src->next_bb;
871 bool ret;
873 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
874 in the case the basic block appears to be in sequence. Avoid this
875 transformation. */
877 src->next_bb = NULL;
878 if (e->flags & EDGE_FALLTHRU)
880 /* In case we are redirecting fallthru edge to the branch edge
881 of conditional jump, remove it. */
882 if (src->succ->succ_next
883 && !src->succ->succ_next->succ_next)
885 edge s = e->succ_next ? e->succ_next : src->succ;
886 if (s->dest == dest
887 && any_condjump_p (src->end)
888 && onlyjump_p (src->end))
889 delete_insn (src->end);
891 redirect_edge_succ_nodup (e, dest);
893 ret = true;
895 else
896 ret = redirect_edge_and_branch (e, dest);
898 /* We don't want simplejumps in the insn stream during cfglayout. */
899 if (simplejump_p (src->end))
901 delete_insn (src->end);
902 delete_barrier (NEXT_INSN (src->end));
903 src->succ->flags |= EDGE_FALLTHRU;
905 src->next_bb = old_next_bb;
907 return ret;
910 /* Same as split_block but update cfg_layout structures. */
911 edge
912 cfg_layout_split_block (bb, insn)
913 basic_block bb;
914 rtx insn;
916 edge fallthru = split_block (bb, insn);
918 alloc_aux_for_block (fallthru->dest, sizeof (struct reorder_block_def));
919 RBI (fallthru->dest)->footer = RBI (fallthru->src)->footer;
920 RBI (fallthru->src)->footer = NULL;
921 return fallthru;
924 /* Create a duplicate of the basic block BB and redirect edge E into it. */
926 basic_block
927 cfg_layout_duplicate_bb (bb, e)
928 basic_block bb;
929 edge e;
931 rtx insn;
932 edge s, n;
933 basic_block new_bb;
934 gcov_type new_count = e ? e->count : 0;
936 if (bb->count < new_count)
937 new_count = bb->count;
938 if (!bb->pred)
939 abort ();
940 #ifdef ENABLE_CHECKING
941 if (!cfg_layout_can_duplicate_bb_p (bb))
942 abort ();
943 #endif
945 insn = duplicate_insn_chain (bb->head, bb->end);
946 new_bb = create_basic_block (insn,
947 insn ? get_last_insn () : NULL,
948 EXIT_BLOCK_PTR->prev_bb);
949 alloc_aux_for_block (new_bb, sizeof (struct reorder_block_def));
951 if (RBI (bb)->header)
953 insn = RBI (bb)->header;
954 while (NEXT_INSN (insn))
955 insn = NEXT_INSN (insn);
956 insn = duplicate_insn_chain (RBI (bb)->header, insn);
957 if (insn)
958 RBI (new_bb)->header = unlink_insn_chain (insn, get_last_insn ());
961 if (RBI (bb)->footer)
963 insn = RBI (bb)->footer;
964 while (NEXT_INSN (insn))
965 insn = NEXT_INSN (insn);
966 insn = duplicate_insn_chain (RBI (bb)->footer, insn);
967 if (insn)
968 RBI (new_bb)->footer = unlink_insn_chain (insn, get_last_insn ());
971 if (bb->global_live_at_start)
973 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
974 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
975 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
976 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
979 new_bb->loop_depth = bb->loop_depth;
980 new_bb->flags = bb->flags;
981 for (s = bb->succ; s; s = s->succ_next)
983 n = make_edge (new_bb, s->dest, s->flags);
984 n->probability = s->probability;
985 if (new_count)
986 /* Take care for overflows! */
987 n->count = s->count * (new_count * 10000 / bb->count) / 10000;
988 else
989 n->count = 0;
990 s->count -= n->count;
993 new_bb->count = new_count;
994 bb->count -= new_count;
996 if (e)
998 new_bb->frequency = EDGE_FREQUENCY (e);
999 bb->frequency -= EDGE_FREQUENCY (e);
1001 cfg_layout_redirect_edge (e, new_bb);
1004 if (bb->count < 0)
1005 bb->count = 0;
1006 if (bb->frequency < 0)
1007 bb->frequency = 0;
1009 RBI (new_bb)->original = bb;
1010 RBI (bb)->copy = new_bb;
1011 return new_bb;
1014 /* Main entry point to this module - initialize the datastructures for
1015 CFG layout changes. It keeps LOOPS up-to-date if not null. */
1017 void
1018 cfg_layout_initialize (loops)
1019 struct loops *loops;
1021 /* Our algorithm depends on fact that there are now dead jumptables
1022 around the code. */
1023 alloc_aux_for_blocks (sizeof (struct reorder_block_def));
1025 cleanup_unconditional_jumps (loops);
1027 record_effective_endpoints ();
1030 /* Splits superblocks. */
1031 static void
1032 break_superblocks ()
1034 sbitmap superblocks;
1035 int i, need;
1037 superblocks = sbitmap_alloc (n_basic_blocks);
1038 sbitmap_zero (superblocks);
1040 need = 0;
1042 for (i = 0; i < n_basic_blocks; i++)
1043 if (BASIC_BLOCK(i)->flags & BB_SUPERBLOCK)
1045 BASIC_BLOCK(i)->flags &= ~BB_SUPERBLOCK;
1046 SET_BIT (superblocks, i);
1047 need = 1;
1050 if (need)
1052 rebuild_jump_labels (get_insns ());
1053 find_many_sub_basic_blocks (superblocks);
1056 free (superblocks);
1059 /* Finalize the changes: reorder insn list according to the sequence, enter
1060 compensation code, rebuild scope forest. */
1062 void
1063 cfg_layout_finalize ()
1065 fixup_fallthru_exit_predecessor ();
1066 fixup_reorder_chain ();
1068 #ifdef ENABLE_CHECKING
1069 verify_insn_chain ();
1070 #endif
1072 free_aux_for_blocks ();
1074 break_superblocks ();
1076 #ifdef ENABLE_CHECKING
1077 verify_flow_info ();
1078 #endif