2003-04-02 Aldy Hernandez <aldyh@redhat.com>
[official-gcc.git] / gcc / cfglayout.c
blobb5be7ca29b32f67a498149e8761cb091607f60bb
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "insn-config.h"
30 #include "output.h"
31 #include "function.h"
32 #include "obstack.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
37 /* The contents of the current function definition are allocated
38 in this obstack, and all are freed at the end of the function. */
39 extern struct obstack flow_obstack;
41 /* Holds the interesting trailing notes for the function. */
42 static rtx function_footer;
44 static rtx skip_insns_after_block PARAMS ((basic_block));
45 static void record_effective_endpoints PARAMS ((void));
46 static rtx label_for_bb PARAMS ((basic_block));
47 static void fixup_reorder_chain PARAMS ((void));
49 static void set_block_levels PARAMS ((tree, int));
50 static void change_scope PARAMS ((rtx, tree, tree));
52 void verify_insn_chain PARAMS ((void));
53 static void cleanup_unconditional_jumps PARAMS ((struct loops *));
54 static void fixup_fallthru_exit_predecessor PARAMS ((void));
55 static rtx unlink_insn_chain PARAMS ((rtx, rtx));
56 static rtx duplicate_insn_chain PARAMS ((rtx, rtx));
57 static void break_superblocks PARAMS ((void));
59 static rtx
60 unlink_insn_chain (first, last)
61 rtx first;
62 rtx last;
64 rtx prevfirst = PREV_INSN (first);
65 rtx nextlast = NEXT_INSN (last);
67 PREV_INSN (first) = NULL;
68 NEXT_INSN (last) = NULL;
69 if (prevfirst)
70 NEXT_INSN (prevfirst) = nextlast;
71 if (nextlast)
72 PREV_INSN (nextlast) = prevfirst;
73 else
74 set_last_insn (prevfirst);
75 if (!prevfirst)
76 set_first_insn (nextlast);
77 return first;
80 /* Skip over inter-block insns occurring after BB which are typically
81 associated with BB (e.g., barriers). If there are any such insns,
82 we return the last one. Otherwise, we return the end of BB. */
84 static rtx
85 skip_insns_after_block (bb)
86 basic_block bb;
88 rtx insn, last_insn, next_head, prev;
90 next_head = NULL_RTX;
91 if (bb->next_bb != EXIT_BLOCK_PTR)
92 next_head = bb->next_bb->head;
94 for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)) != 0; )
96 if (insn == next_head)
97 break;
99 switch (GET_CODE (insn))
101 case BARRIER:
102 last_insn = insn;
103 continue;
105 case NOTE:
106 switch (NOTE_LINE_NUMBER (insn))
108 case NOTE_INSN_LOOP_END:
109 case NOTE_INSN_BLOCK_END:
110 last_insn = insn;
111 continue;
112 case NOTE_INSN_DELETED:
113 case NOTE_INSN_DELETED_LABEL:
114 continue;
116 default:
117 continue;
118 break;
120 break;
122 case CODE_LABEL:
123 if (NEXT_INSN (insn)
124 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
125 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
126 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
128 insn = NEXT_INSN (insn);
129 last_insn = insn;
130 continue;
132 break;
134 default:
135 break;
138 break;
141 /* It is possible to hit contradictory sequence. For instance:
143 jump_insn
144 NOTE_INSN_LOOP_BEG
145 barrier
147 Where barrier belongs to jump_insn, but the note does not. This can be
148 created by removing the basic block originally following
149 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
151 for (insn = last_insn; insn != bb->end; insn = prev)
153 prev = PREV_INSN (insn);
154 if (GET_CODE (insn) == NOTE)
155 switch (NOTE_LINE_NUMBER (insn))
157 case NOTE_INSN_LOOP_END:
158 case NOTE_INSN_BLOCK_END:
159 case NOTE_INSN_DELETED:
160 case NOTE_INSN_DELETED_LABEL:
161 continue;
162 default:
163 reorder_insns (insn, insn, last_insn);
167 return last_insn;
170 /* Locate or create a label for a given basic block. */
172 static rtx
173 label_for_bb (bb)
174 basic_block bb;
176 rtx label = bb->head;
178 if (GET_CODE (label) != CODE_LABEL)
180 if (rtl_dump_file)
181 fprintf (rtl_dump_file, "Emitting label for block %d\n", bb->index);
183 label = block_label (bb);
186 return label;
189 /* Locate the effective beginning and end of the insn chain for each
190 block, as defined by skip_insns_after_block above. */
192 static void
193 record_effective_endpoints ()
195 rtx next_insn = get_insns ();
196 basic_block bb;
198 FOR_EACH_BB (bb)
200 rtx end;
202 if (PREV_INSN (bb->head) && next_insn != bb->head)
203 RBI (bb)->header = unlink_insn_chain (next_insn,
204 PREV_INSN (bb->head));
205 end = skip_insns_after_block (bb);
206 if (NEXT_INSN (bb->end) && bb->end != end)
207 RBI (bb)->footer = unlink_insn_chain (NEXT_INSN (bb->end), end);
208 next_insn = NEXT_INSN (bb->end);
211 function_footer = next_insn;
212 if (function_footer)
213 function_footer = unlink_insn_chain (function_footer, get_last_insn ());
216 /* Build a varray mapping INSN_UID to lexical block. Return it. */
218 void
219 scope_to_insns_initialize ()
221 tree block = NULL;
222 rtx insn, next;
224 for (insn = get_insns (); insn; insn = next)
226 next = NEXT_INSN (insn);
228 if (active_insn_p (insn)
229 && GET_CODE (PATTERN (insn)) != ADDR_VEC
230 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
231 INSN_SCOPE (insn) = block;
232 else if (GET_CODE (insn) == NOTE)
234 switch (NOTE_LINE_NUMBER (insn))
236 case NOTE_INSN_BLOCK_BEG:
237 block = NOTE_BLOCK (insn);
238 delete_insn (insn);
239 break;
240 case NOTE_INSN_BLOCK_END:
241 block = BLOCK_SUPERCONTEXT (block);
242 delete_insn (insn);
243 break;
244 default:
245 break;
250 /* Tag the blocks with a depth number so that change_scope can find
251 the common parent easily. */
252 set_block_levels (DECL_INITIAL (cfun->decl), 0);
255 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
256 found in the block tree. */
258 static void
259 set_block_levels (block, level)
260 tree block;
261 int level;
263 while (block)
265 BLOCK_NUMBER (block) = level;
266 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
267 block = BLOCK_CHAIN (block);
271 /* Return sope resulting from combination of S1 and S2. */
272 tree
273 choose_inner_scope (s1, s2)
274 tree s1, s2;
276 if (!s1)
277 return s2;
278 if (!s2)
279 return s1;
280 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
281 return s1;
282 return s2;
285 /* Emit lexical block notes needed to change scope from S1 to S2. */
287 static void
288 change_scope (orig_insn, s1, s2)
289 rtx orig_insn;
290 tree s1, s2;
292 rtx insn = orig_insn;
293 tree com = NULL_TREE;
294 tree ts1 = s1, ts2 = s2;
295 tree s;
297 while (ts1 != ts2)
299 if (ts1 == NULL || ts2 == NULL)
300 abort ();
301 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
302 ts1 = BLOCK_SUPERCONTEXT (ts1);
303 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
304 ts2 = BLOCK_SUPERCONTEXT (ts2);
305 else
307 ts1 = BLOCK_SUPERCONTEXT (ts1);
308 ts2 = BLOCK_SUPERCONTEXT (ts2);
311 com = ts1;
313 /* Close scopes. */
314 s = s1;
315 while (s != com)
317 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
318 NOTE_BLOCK (note) = s;
319 s = BLOCK_SUPERCONTEXT (s);
322 /* Open scopes. */
323 s = s2;
324 while (s != com)
326 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
327 NOTE_BLOCK (insn) = s;
328 s = BLOCK_SUPERCONTEXT (s);
332 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
333 on the scope tree and the newly reordered instructions. */
335 void
336 scope_to_insns_finalize ()
338 tree cur_block = DECL_INITIAL (cfun->decl);
339 rtx insn, note;
341 insn = get_insns ();
342 if (!active_insn_p (insn))
343 insn = next_active_insn (insn);
344 for (; insn; insn = next_active_insn (insn))
346 tree this_block;
348 this_block = INSN_SCOPE (insn);
349 /* For sequences compute scope resulting from merging all scopes
350 of instructions nested inside. */
351 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
353 int i;
354 rtx body = PATTERN (insn);
356 this_block = NULL;
357 for (i = 0; i < XVECLEN (body, 0); i++)
358 this_block = choose_inner_scope (this_block,
359 INSN_SCOPE (XVECEXP (body, 0, i)));
361 if (! this_block)
362 continue;
364 if (this_block != cur_block)
366 change_scope (insn, cur_block, this_block);
367 cur_block = this_block;
371 /* change_scope emits before the insn, not after. */
372 note = emit_note (NULL, NOTE_INSN_DELETED);
373 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
374 delete_insn (note);
376 reorder_blocks ();
379 /* Given a reorder chain, rearrange the code to match. */
381 static void
382 fixup_reorder_chain ()
384 basic_block bb, prev_bb;
385 int index;
386 rtx insn = NULL;
388 /* First do the bulk reordering -- rechain the blocks without regard to
389 the needed changes to jumps and labels. */
391 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
392 bb != 0;
393 bb = RBI (bb)->next, index++)
395 if (RBI (bb)->header)
397 if (insn)
398 NEXT_INSN (insn) = RBI (bb)->header;
399 else
400 set_first_insn (RBI (bb)->header);
401 PREV_INSN (RBI (bb)->header) = insn;
402 insn = RBI (bb)->header;
403 while (NEXT_INSN (insn))
404 insn = NEXT_INSN (insn);
406 if (insn)
407 NEXT_INSN (insn) = bb->head;
408 else
409 set_first_insn (bb->head);
410 PREV_INSN (bb->head) = insn;
411 insn = bb->end;
412 if (RBI (bb)->footer)
414 NEXT_INSN (insn) = RBI (bb)->footer;
415 PREV_INSN (RBI (bb)->footer) = insn;
416 while (NEXT_INSN (insn))
417 insn = NEXT_INSN (insn);
421 if (index != n_basic_blocks)
422 abort ();
424 NEXT_INSN (insn) = function_footer;
425 if (function_footer)
426 PREV_INSN (function_footer) = insn;
428 while (NEXT_INSN (insn))
429 insn = NEXT_INSN (insn);
431 set_last_insn (insn);
432 #ifdef ENABLE_CHECKING
433 verify_insn_chain ();
434 #endif
436 /* Now add jumps and labels as needed to match the blocks new
437 outgoing edges. */
439 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = RBI (bb)->next)
441 edge e_fall, e_taken, e;
442 rtx bb_end_insn;
443 basic_block nb;
445 if (bb->succ == NULL)
446 continue;
448 /* Find the old fallthru edge, and another non-EH edge for
449 a taken jump. */
450 e_taken = e_fall = NULL;
451 for (e = bb->succ; e ; e = e->succ_next)
452 if (e->flags & EDGE_FALLTHRU)
453 e_fall = e;
454 else if (! (e->flags & EDGE_EH))
455 e_taken = e;
457 bb_end_insn = bb->end;
458 if (GET_CODE (bb_end_insn) == JUMP_INSN)
460 if (any_condjump_p (bb_end_insn))
462 /* If the old fallthru is still next, nothing to do. */
463 if (RBI (bb)->next == e_fall->dest
464 || (!RBI (bb)->next
465 && e_fall->dest == EXIT_BLOCK_PTR))
466 continue;
468 /* The degenerated case of conditional jump jumping to the next
469 instruction can happen on target having jumps with side
470 effects.
472 Create temporarily the duplicated edge representing branch.
473 It will get unidentified by force_nonfallthru_and_redirect
474 that would otherwise get confused by fallthru edge not pointing
475 to the next basic block. */
476 if (!e_taken)
478 rtx note;
479 edge e_fake;
481 e_fake = unchecked_make_edge (bb, e_fall->dest, 0);
483 if (!redirect_jump (bb->end, block_label (bb), 0))
484 abort ();
485 note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX);
486 if (note)
488 int prob = INTVAL (XEXP (note, 0));
490 e_fake->probability = prob;
491 e_fake->count = e_fall->count * prob / REG_BR_PROB_BASE;
492 e_fall->probability -= e_fall->probability;
493 e_fall->count -= e_fake->count;
494 if (e_fall->probability < 0)
495 e_fall->probability = 0;
496 if (e_fall->count < 0)
497 e_fall->count = 0;
500 /* There is one special case: if *neither* block is next,
501 such as happens at the very end of a function, then we'll
502 need to add a new unconditional jump. Choose the taken
503 edge based on known or assumed probability. */
504 else if (RBI (bb)->next != e_taken->dest)
506 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
508 if (note
509 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
510 && invert_jump (bb_end_insn,
511 label_for_bb (e_fall->dest), 0))
513 e_fall->flags &= ~EDGE_FALLTHRU;
514 e_taken->flags |= EDGE_FALLTHRU;
515 update_br_prob_note (bb);
516 e = e_fall, e_fall = e_taken, e_taken = e;
520 /* Otherwise we can try to invert the jump. This will
521 basically never fail, however, keep up the pretense. */
522 else if (invert_jump (bb_end_insn,
523 label_for_bb (e_fall->dest), 0))
525 e_fall->flags &= ~EDGE_FALLTHRU;
526 e_taken->flags |= EDGE_FALLTHRU;
527 update_br_prob_note (bb);
528 continue;
531 else if (returnjump_p (bb_end_insn))
532 continue;
533 else
535 /* Otherwise we have some switch or computed jump. In the
536 99% case, there should not have been a fallthru edge. */
537 if (! e_fall)
538 continue;
540 #ifdef CASE_DROPS_THROUGH
541 /* Except for VAX. Since we didn't have predication for the
542 tablejump, the fallthru block should not have moved. */
543 if (RBI (bb)->next == e_fall->dest)
544 continue;
545 bb_end_insn = skip_insns_after_block (bb);
546 #else
547 abort ();
548 #endif
551 else
553 /* No fallthru implies a noreturn function with EH edges, or
554 something similarly bizarre. In any case, we don't need to
555 do anything. */
556 if (! e_fall)
557 continue;
559 /* If the fallthru block is still next, nothing to do. */
560 if (RBI (bb)->next == e_fall->dest)
561 continue;
563 /* A fallthru to exit block. */
564 if (!RBI (bb)->next && e_fall->dest == EXIT_BLOCK_PTR)
565 continue;
568 /* We got here if we need to add a new jump insn. */
569 nb = force_nonfallthru (e_fall);
570 if (nb)
572 alloc_aux_for_block (nb, sizeof (struct reorder_block_def));
573 RBI (nb)->visited = 1;
574 RBI (nb)->next = RBI (bb)->next;
575 RBI (bb)->next = nb;
576 /* Don't process this new block. */
577 bb = nb;
581 /* Put basic_block_info in the new order. */
583 if (rtl_dump_file)
585 fprintf (rtl_dump_file, "Reordered sequence:\n");
586 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0; bb; bb = RBI (bb)->next, index ++)
588 fprintf (rtl_dump_file, " %i ", index);
589 if (RBI (bb)->original)
590 fprintf (rtl_dump_file, "duplicate of %i ",
591 RBI (bb)->original->index);
592 else if (forwarder_block_p (bb) && GET_CODE (bb->head) != CODE_LABEL)
593 fprintf (rtl_dump_file, "compensation ");
594 else
595 fprintf (rtl_dump_file, "bb %i ", bb->index);
596 fprintf (rtl_dump_file, " [%i]\n", bb->frequency);
600 prev_bb = ENTRY_BLOCK_PTR;
601 bb = ENTRY_BLOCK_PTR->next_bb;
602 index = 0;
604 for (; bb; prev_bb = bb, bb = RBI (bb)->next, index ++)
606 bb->index = index;
607 BASIC_BLOCK (index) = bb;
609 bb->prev_bb = prev_bb;
610 prev_bb->next_bb = bb;
612 prev_bb->next_bb = EXIT_BLOCK_PTR;
613 EXIT_BLOCK_PTR->prev_bb = prev_bb;
616 /* Perform sanity checks on the insn chain.
617 1. Check that next/prev pointers are consistent in both the forward and
618 reverse direction.
619 2. Count insns in chain, going both directions, and check if equal.
620 3. Check that get_last_insn () returns the actual end of chain. */
622 void
623 verify_insn_chain ()
625 rtx x, prevx, nextx;
626 int insn_cnt1, insn_cnt2;
628 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
629 x != 0;
630 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
631 if (PREV_INSN (x) != prevx)
632 abort ();
634 if (prevx != get_last_insn ())
635 abort ();
637 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
638 x != 0;
639 nextx = x, insn_cnt2++, x = PREV_INSN (x))
640 if (NEXT_INSN (x) != nextx)
641 abort ();
643 if (insn_cnt1 != insn_cnt2)
644 abort ();
647 /* Remove any unconditional jumps and forwarder block creating fallthru
648 edges instead. During BB reordering, fallthru edges are not required
649 to target next basic block in the linear CFG layout, so the unconditional
650 jumps are not needed. If LOOPS is not null, also update loop structure &
651 dominators. */
653 static void
654 cleanup_unconditional_jumps (loops)
655 struct loops *loops;
657 basic_block bb;
659 FOR_EACH_BB (bb)
661 if (!bb->succ)
662 continue;
663 if (bb->succ->flags & EDGE_FALLTHRU)
664 continue;
665 if (!bb->succ->succ_next)
667 rtx insn;
668 if (GET_CODE (bb->head) != CODE_LABEL && forwarder_block_p (bb)
669 && bb->prev_bb != ENTRY_BLOCK_PTR)
671 basic_block prev = bb->prev_bb;
673 if (rtl_dump_file)
674 fprintf (rtl_dump_file, "Removing forwarder BB %i\n",
675 bb->index);
677 if (loops)
679 /* bb cannot be loop header, as it only has one entry
680 edge. It could be a loop latch. */
681 if (bb->loop_father->header == bb)
682 abort ();
684 if (bb->loop_father->latch == bb)
685 bb->loop_father->latch = bb->pred->src;
687 if (get_immediate_dominator
688 (loops->cfg.dom, bb->succ->dest) == bb)
689 set_immediate_dominator
690 (loops->cfg.dom, bb->succ->dest, bb->pred->src);
692 remove_bb_from_loops (bb);
693 delete_from_dominance_info (loops->cfg.dom, bb);
696 redirect_edge_succ_nodup (bb->pred, bb->succ->dest);
697 flow_delete_block (bb);
698 bb = prev;
700 else if (simplejump_p (bb->end))
702 rtx jump = bb->end;
704 if (rtl_dump_file)
705 fprintf (rtl_dump_file, "Removing jump %i in BB %i\n",
706 INSN_UID (jump), bb->index);
707 delete_insn (jump);
708 bb->succ->flags |= EDGE_FALLTHRU;
710 else
711 continue;
713 insn = NEXT_INSN (bb->end);
714 while (insn
715 && (GET_CODE (insn) != NOTE
716 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
718 rtx next = NEXT_INSN (insn);
720 if (GET_CODE (insn) == BARRIER)
721 delete_barrier (insn);
723 insn = next;
729 /* The block falling through to exit must be the last one in the
730 reordered chain. Ensure that this condition is met. */
731 static void
732 fixup_fallthru_exit_predecessor ()
734 edge e;
735 basic_block bb = NULL;
737 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
738 if (e->flags & EDGE_FALLTHRU)
739 bb = e->src;
741 if (bb && RBI (bb)->next)
743 basic_block c = ENTRY_BLOCK_PTR->next_bb;
745 while (RBI (c)->next != bb)
746 c = RBI (c)->next;
748 RBI (c)->next = RBI (bb)->next;
749 while (RBI (c)->next)
750 c = RBI (c)->next;
752 RBI (c)->next = bb;
753 RBI (bb)->next = NULL;
757 /* Return true in case it is possible to duplicate the basic block BB. */
759 bool
760 cfg_layout_can_duplicate_bb_p (bb)
761 basic_block bb;
763 edge s;
765 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR)
766 return false;
768 /* Duplicating fallthru block to exit would require adding a jump
769 and splitting the real last BB. */
770 for (s = bb->succ; s; s = s->succ_next)
771 if (s->dest == EXIT_BLOCK_PTR && s->flags & EDGE_FALLTHRU)
772 return false;
774 /* Do not attempt to duplicate tablejumps, as we need to unshare
775 the dispatch table. This is difficult to do, as the instructions
776 computing jump destination may be hoisted outside the basic block. */
777 if (tablejump_p (bb->end, NULL, NULL))
778 return false;
780 /* Do not duplicate blocks containing insns that can't be copied. */
781 if (targetm.cannot_copy_insn_p)
783 rtx insn = bb->head;
784 while (1)
786 if (INSN_P (insn) && (*targetm.cannot_copy_insn_p) (insn))
787 return false;
788 if (insn == bb->end)
789 break;
790 insn = NEXT_INSN (insn);
794 return true;
797 static rtx
798 duplicate_insn_chain (from, to)
799 rtx from, to;
801 rtx insn, last;
803 /* Avoid updating of boundaries of previous basic block. The
804 note will get removed from insn stream in fixup. */
805 last = emit_note (NULL, NOTE_INSN_DELETED);
807 /* Create copy at the end of INSN chain. The chain will
808 be reordered later. */
809 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
811 switch (GET_CODE (insn))
813 case INSN:
814 case CALL_INSN:
815 case JUMP_INSN:
816 /* Avoid copying of dispatch tables. We never duplicate
817 tablejumps, so this can hit only in case the table got
818 moved far from original jump. */
819 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
820 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
821 break;
822 emit_copy_of_insn_after (insn, get_last_insn ());
823 break;
825 case CODE_LABEL:
826 break;
828 case BARRIER:
829 emit_barrier ();
830 break;
832 case NOTE:
833 switch (NOTE_LINE_NUMBER (insn))
835 /* In case prologue is empty and function contain label
836 in first BB, we may want to copy the block. */
837 case NOTE_INSN_PROLOGUE_END:
839 case NOTE_INSN_LOOP_VTOP:
840 case NOTE_INSN_LOOP_CONT:
841 case NOTE_INSN_LOOP_BEG:
842 case NOTE_INSN_LOOP_END:
843 /* Strip down the loop notes - we don't really want to keep
844 them consistent in loop copies. */
845 case NOTE_INSN_DELETED:
846 case NOTE_INSN_DELETED_LABEL:
847 /* No problem to strip these. */
848 case NOTE_INSN_EPILOGUE_BEG:
849 case NOTE_INSN_FUNCTION_END:
850 /* Debug code expect these notes to exist just once.
851 Keep them in the master copy.
852 ??? It probably makes more sense to duplicate them for each
853 epilogue copy. */
854 case NOTE_INSN_FUNCTION_BEG:
855 /* There is always just single entry to function. */
856 case NOTE_INSN_BASIC_BLOCK:
857 break;
859 /* There is no purpose to duplicate prologue. */
860 case NOTE_INSN_BLOCK_BEG:
861 case NOTE_INSN_BLOCK_END:
862 /* The BLOCK_BEG/BLOCK_END notes should be eliminated when BB
863 reordering is in the progress. */
864 case NOTE_INSN_EH_REGION_BEG:
865 case NOTE_INSN_EH_REGION_END:
866 /* Should never exist at BB duplication time. */
867 abort ();
868 break;
869 case NOTE_INSN_REPEATED_LINE_NUMBER:
870 emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
871 break;
873 default:
874 if (NOTE_LINE_NUMBER (insn) < 0)
875 abort ();
876 /* It is possible that no_line_number is set and the note
877 won't be emitted. */
878 emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
880 break;
881 default:
882 abort ();
885 insn = NEXT_INSN (last);
886 delete_insn (last);
887 return insn;
890 /* Redirect Edge to DEST. */
891 bool
892 cfg_layout_redirect_edge (e, dest)
893 edge e;
894 basic_block dest;
896 basic_block src = e->src;
897 basic_block old_next_bb = src->next_bb;
898 bool ret;
900 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
901 in the case the basic block appears to be in sequence. Avoid this
902 transformation. */
904 src->next_bb = NULL;
905 if (e->flags & EDGE_FALLTHRU)
907 /* In case we are redirecting fallthru edge to the branch edge
908 of conditional jump, remove it. */
909 if (src->succ->succ_next
910 && !src->succ->succ_next->succ_next)
912 edge s = e->succ_next ? e->succ_next : src->succ;
913 if (s->dest == dest
914 && any_condjump_p (src->end)
915 && onlyjump_p (src->end))
916 delete_insn (src->end);
918 redirect_edge_succ_nodup (e, dest);
920 ret = true;
922 else
923 ret = redirect_edge_and_branch (e, dest);
925 /* We don't want simplejumps in the insn stream during cfglayout. */
926 if (simplejump_p (src->end))
928 delete_insn (src->end);
929 delete_barrier (NEXT_INSN (src->end));
930 src->succ->flags |= EDGE_FALLTHRU;
932 src->next_bb = old_next_bb;
934 return ret;
937 /* Same as split_block but update cfg_layout structures. */
938 edge
939 cfg_layout_split_block (bb, insn)
940 basic_block bb;
941 rtx insn;
943 edge fallthru = split_block (bb, insn);
945 alloc_aux_for_block (fallthru->dest, sizeof (struct reorder_block_def));
946 RBI (fallthru->dest)->footer = RBI (fallthru->src)->footer;
947 RBI (fallthru->src)->footer = NULL;
948 return fallthru;
951 /* Create a duplicate of the basic block BB and redirect edge E into it. */
953 basic_block
954 cfg_layout_duplicate_bb (bb, e)
955 basic_block bb;
956 edge e;
958 rtx insn;
959 edge s, n;
960 basic_block new_bb;
961 gcov_type new_count = e ? e->count : 0;
963 if (bb->count < new_count)
964 new_count = bb->count;
965 if (!bb->pred)
966 abort ();
967 #ifdef ENABLE_CHECKING
968 if (!cfg_layout_can_duplicate_bb_p (bb))
969 abort ();
970 #endif
972 insn = duplicate_insn_chain (bb->head, bb->end);
973 new_bb = create_basic_block (insn,
974 insn ? get_last_insn () : NULL,
975 EXIT_BLOCK_PTR->prev_bb);
976 alloc_aux_for_block (new_bb, sizeof (struct reorder_block_def));
978 if (RBI (bb)->header)
980 insn = RBI (bb)->header;
981 while (NEXT_INSN (insn))
982 insn = NEXT_INSN (insn);
983 insn = duplicate_insn_chain (RBI (bb)->header, insn);
984 if (insn)
985 RBI (new_bb)->header = unlink_insn_chain (insn, get_last_insn ());
988 if (RBI (bb)->footer)
990 insn = RBI (bb)->footer;
991 while (NEXT_INSN (insn))
992 insn = NEXT_INSN (insn);
993 insn = duplicate_insn_chain (RBI (bb)->footer, insn);
994 if (insn)
995 RBI (new_bb)->footer = unlink_insn_chain (insn, get_last_insn ());
998 if (bb->global_live_at_start)
1000 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1001 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1002 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
1003 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1006 new_bb->loop_depth = bb->loop_depth;
1007 new_bb->flags = bb->flags;
1008 for (s = bb->succ; s; s = s->succ_next)
1010 /* Since we are creating edges from a new block to successors
1011 of another block (which therefore are known to be disjoint), there
1012 is no need to actually check for duplicated edges. */
1013 n = unchecked_make_edge (new_bb, s->dest, s->flags);
1014 n->probability = s->probability;
1015 if (new_count)
1016 /* Take care for overflows! */
1017 n->count = s->count * (new_count * 10000 / bb->count) / 10000;
1018 else
1019 n->count = 0;
1020 s->count -= n->count;
1023 new_bb->count = new_count;
1024 bb->count -= new_count;
1026 if (e)
1028 new_bb->frequency = EDGE_FREQUENCY (e);
1029 bb->frequency -= EDGE_FREQUENCY (e);
1031 cfg_layout_redirect_edge (e, new_bb);
1034 if (bb->count < 0)
1035 bb->count = 0;
1036 if (bb->frequency < 0)
1037 bb->frequency = 0;
1039 RBI (new_bb)->original = bb;
1040 RBI (bb)->copy = new_bb;
1041 return new_bb;
1044 /* Main entry point to this module - initialize the datastructures for
1045 CFG layout changes. It keeps LOOPS up-to-date if not null. */
1047 void
1048 cfg_layout_initialize (loops)
1049 struct loops *loops;
1051 /* Our algorithm depends on fact that there are now dead jumptables
1052 around the code. */
1053 alloc_aux_for_blocks (sizeof (struct reorder_block_def));
1055 cleanup_unconditional_jumps (loops);
1057 record_effective_endpoints ();
1060 /* Splits superblocks. */
1061 static void
1062 break_superblocks ()
1064 sbitmap superblocks;
1065 int i, need;
1067 superblocks = sbitmap_alloc (n_basic_blocks);
1068 sbitmap_zero (superblocks);
1070 need = 0;
1072 for (i = 0; i < n_basic_blocks; i++)
1073 if (BASIC_BLOCK(i)->flags & BB_SUPERBLOCK)
1075 BASIC_BLOCK(i)->flags &= ~BB_SUPERBLOCK;
1076 SET_BIT (superblocks, i);
1077 need = 1;
1080 if (need)
1082 rebuild_jump_labels (get_insns ());
1083 find_many_sub_basic_blocks (superblocks);
1086 free (superblocks);
1089 /* Finalize the changes: reorder insn list according to the sequence, enter
1090 compensation code, rebuild scope forest. */
1092 void
1093 cfg_layout_finalize ()
1095 fixup_fallthru_exit_predecessor ();
1096 fixup_reorder_chain ();
1098 #ifdef ENABLE_CHECKING
1099 verify_insn_chain ();
1100 #endif
1102 free_aux_for_blocks ();
1104 break_superblocks ();
1106 #ifdef ENABLE_CHECKING
1107 verify_flow_info ();
1108 #endif