* config/xtensa/linux.h (TARGET_OS_CPP_BUILTINS): Remove definition of
[official-gcc.git] / gcc / cfglayout.c
blob9c5b85aec16144d3005db6e118739f34cbb5acd8
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "insn-config.h"
30 #include "output.h"
31 #include "function.h"
32 #include "obstack.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
37 /* The contents of the current function definition are allocated
38 in this obstack, and all are freed at the end of the function. */
39 extern struct obstack flow_obstack;
41 /* Holds the interesting trailing notes for the function. */
42 static rtx function_footer;
44 static rtx skip_insns_after_block PARAMS ((basic_block));
45 static void record_effective_endpoints PARAMS ((void));
46 static rtx label_for_bb PARAMS ((basic_block));
47 static void fixup_reorder_chain PARAMS ((void));
49 static void set_block_levels PARAMS ((tree, int));
50 static void change_scope PARAMS ((rtx, tree, tree));
52 void verify_insn_chain PARAMS ((void));
53 static void cleanup_unconditional_jumps PARAMS ((struct loops *));
54 static void fixup_fallthru_exit_predecessor PARAMS ((void));
55 static rtx unlink_insn_chain PARAMS ((rtx, rtx));
56 static rtx duplicate_insn_chain PARAMS ((rtx, rtx));
57 static void break_superblocks PARAMS ((void));
59 static rtx
60 unlink_insn_chain (first, last)
61 rtx first;
62 rtx last;
64 rtx prevfirst = PREV_INSN (first);
65 rtx nextlast = NEXT_INSN (last);
67 PREV_INSN (first) = NULL;
68 NEXT_INSN (last) = NULL;
69 if (prevfirst)
70 NEXT_INSN (prevfirst) = nextlast;
71 if (nextlast)
72 PREV_INSN (nextlast) = prevfirst;
73 else
74 set_last_insn (prevfirst);
75 if (!prevfirst)
76 set_first_insn (nextlast);
77 return first;
80 /* Skip over inter-block insns occurring after BB which are typically
81 associated with BB (e.g., barriers). If there are any such insns,
82 we return the last one. Otherwise, we return the end of BB. */
84 static rtx
85 skip_insns_after_block (bb)
86 basic_block bb;
88 rtx insn, last_insn, next_head, prev;
90 next_head = NULL_RTX;
91 if (bb->next_bb != EXIT_BLOCK_PTR)
92 next_head = bb->next_bb->head;
94 for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)) != 0; )
96 if (insn == next_head)
97 break;
99 switch (GET_CODE (insn))
101 case BARRIER:
102 last_insn = insn;
103 continue;
105 case NOTE:
106 switch (NOTE_LINE_NUMBER (insn))
108 case NOTE_INSN_LOOP_END:
109 case NOTE_INSN_BLOCK_END:
110 last_insn = insn;
111 continue;
112 case NOTE_INSN_DELETED:
113 case NOTE_INSN_DELETED_LABEL:
114 continue;
116 default:
117 continue;
118 break;
120 break;
122 case CODE_LABEL:
123 if (NEXT_INSN (insn)
124 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
125 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
126 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
128 insn = NEXT_INSN (insn);
129 last_insn = insn;
130 continue;
132 break;
134 default:
135 break;
138 break;
141 /* It is possible to hit contradictory sequence. For instance:
143 jump_insn
144 NOTE_INSN_LOOP_BEG
145 barrier
147 Where barrier belongs to jump_insn, but the note does not. This can be
148 created by removing the basic block originally following
149 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
151 for (insn = last_insn; insn != bb->end; insn = prev)
153 prev = PREV_INSN (insn);
154 if (GET_CODE (insn) == NOTE)
155 switch (NOTE_LINE_NUMBER (insn))
157 case NOTE_INSN_LOOP_END:
158 case NOTE_INSN_BLOCK_END:
159 case NOTE_INSN_DELETED:
160 case NOTE_INSN_DELETED_LABEL:
161 continue;
162 default:
163 reorder_insns (insn, insn, last_insn);
167 return last_insn;
170 /* Locate or create a label for a given basic block. */
172 static rtx
173 label_for_bb (bb)
174 basic_block bb;
176 rtx label = bb->head;
178 if (GET_CODE (label) != CODE_LABEL)
180 if (rtl_dump_file)
181 fprintf (rtl_dump_file, "Emitting label for block %d\n", bb->index);
183 label = block_label (bb);
186 return label;
189 /* Locate the effective beginning and end of the insn chain for each
190 block, as defined by skip_insns_after_block above. */
192 static void
193 record_effective_endpoints ()
195 rtx next_insn = get_insns ();
196 basic_block bb;
198 FOR_EACH_BB (bb)
200 rtx end;
202 if (PREV_INSN (bb->head) && next_insn != bb->head)
203 RBI (bb)->header = unlink_insn_chain (next_insn,
204 PREV_INSN (bb->head));
205 end = skip_insns_after_block (bb);
206 if (NEXT_INSN (bb->end) && bb->end != end)
207 RBI (bb)->footer = unlink_insn_chain (NEXT_INSN (bb->end), end);
208 next_insn = NEXT_INSN (bb->end);
211 function_footer = next_insn;
212 if (function_footer)
213 function_footer = unlink_insn_chain (function_footer, get_last_insn ());
216 /* Build a varray mapping INSN_UID to lexical block. Return it. */
218 void
219 scope_to_insns_initialize ()
221 tree block = NULL;
222 rtx insn, next;
224 for (insn = get_insns (); insn; insn = next)
226 next = NEXT_INSN (insn);
228 if (active_insn_p (insn)
229 && GET_CODE (PATTERN (insn)) != ADDR_VEC
230 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
231 INSN_SCOPE (insn) = block;
232 else if (GET_CODE (insn) == NOTE)
234 switch (NOTE_LINE_NUMBER (insn))
236 case NOTE_INSN_BLOCK_BEG:
237 block = NOTE_BLOCK (insn);
238 delete_insn (insn);
239 break;
240 case NOTE_INSN_BLOCK_END:
241 block = BLOCK_SUPERCONTEXT (block);
242 if (block && TREE_CODE (block) == FUNCTION_DECL)
243 block = 0;
244 delete_insn (insn);
245 break;
246 default:
247 break;
252 /* Tag the blocks with a depth number so that change_scope can find
253 the common parent easily. */
254 set_block_levels (DECL_INITIAL (cfun->decl), 0);
257 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
258 found in the block tree. */
260 static void
261 set_block_levels (block, level)
262 tree block;
263 int level;
265 while (block)
267 BLOCK_NUMBER (block) = level;
268 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
269 block = BLOCK_CHAIN (block);
273 /* Return sope resulting from combination of S1 and S2. */
274 tree
275 choose_inner_scope (s1, s2)
276 tree s1, s2;
278 if (!s1)
279 return s2;
280 if (!s2)
281 return s1;
282 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
283 return s1;
284 return s2;
287 /* Emit lexical block notes needed to change scope from S1 to S2. */
289 static void
290 change_scope (orig_insn, s1, s2)
291 rtx orig_insn;
292 tree s1, s2;
294 rtx insn = orig_insn;
295 tree com = NULL_TREE;
296 tree ts1 = s1, ts2 = s2;
297 tree s;
299 while (ts1 != ts2)
301 if (ts1 == NULL || ts2 == NULL)
302 abort ();
303 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
304 ts1 = BLOCK_SUPERCONTEXT (ts1);
305 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
306 ts2 = BLOCK_SUPERCONTEXT (ts2);
307 else
309 ts1 = BLOCK_SUPERCONTEXT (ts1);
310 ts2 = BLOCK_SUPERCONTEXT (ts2);
313 com = ts1;
315 /* Close scopes. */
316 s = s1;
317 while (s != com)
319 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
320 NOTE_BLOCK (note) = s;
321 s = BLOCK_SUPERCONTEXT (s);
324 /* Open scopes. */
325 s = s2;
326 while (s != com)
328 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
329 NOTE_BLOCK (insn) = s;
330 s = BLOCK_SUPERCONTEXT (s);
334 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
335 on the scope tree and the newly reordered instructions. */
337 void
338 scope_to_insns_finalize ()
340 tree cur_block = DECL_INITIAL (cfun->decl);
341 rtx insn, note;
343 insn = get_insns ();
344 if (!active_insn_p (insn))
345 insn = next_active_insn (insn);
346 for (; insn; insn = next_active_insn (insn))
348 tree this_block;
350 this_block = INSN_SCOPE (insn);
351 /* For sequences compute scope resulting from merging all scopes
352 of instructions nested inside. */
353 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
355 int i;
356 rtx body = PATTERN (insn);
358 this_block = NULL;
359 for (i = 0; i < XVECLEN (body, 0); i++)
360 this_block = choose_inner_scope (this_block,
361 INSN_SCOPE (XVECEXP (body, 0, i)));
363 if (! this_block)
364 continue;
366 if (this_block != cur_block)
368 change_scope (insn, cur_block, this_block);
369 cur_block = this_block;
373 /* change_scope emits before the insn, not after. */
374 note = emit_note (NULL, NOTE_INSN_DELETED);
375 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
376 delete_insn (note);
378 reorder_blocks ();
381 /* Given a reorder chain, rearrange the code to match. */
383 static void
384 fixup_reorder_chain ()
386 basic_block bb, prev_bb;
387 int index;
388 rtx insn = NULL;
390 /* First do the bulk reordering -- rechain the blocks without regard to
391 the needed changes to jumps and labels. */
393 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
394 bb != 0;
395 bb = RBI (bb)->next, index++)
397 if (RBI (bb)->header)
399 if (insn)
400 NEXT_INSN (insn) = RBI (bb)->header;
401 else
402 set_first_insn (RBI (bb)->header);
403 PREV_INSN (RBI (bb)->header) = insn;
404 insn = RBI (bb)->header;
405 while (NEXT_INSN (insn))
406 insn = NEXT_INSN (insn);
408 if (insn)
409 NEXT_INSN (insn) = bb->head;
410 else
411 set_first_insn (bb->head);
412 PREV_INSN (bb->head) = insn;
413 insn = bb->end;
414 if (RBI (bb)->footer)
416 NEXT_INSN (insn) = RBI (bb)->footer;
417 PREV_INSN (RBI (bb)->footer) = insn;
418 while (NEXT_INSN (insn))
419 insn = NEXT_INSN (insn);
423 if (index != n_basic_blocks)
424 abort ();
426 NEXT_INSN (insn) = function_footer;
427 if (function_footer)
428 PREV_INSN (function_footer) = insn;
430 while (NEXT_INSN (insn))
431 insn = NEXT_INSN (insn);
433 set_last_insn (insn);
434 #ifdef ENABLE_CHECKING
435 verify_insn_chain ();
436 #endif
438 /* Now add jumps and labels as needed to match the blocks new
439 outgoing edges. */
441 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = RBI (bb)->next)
443 edge e_fall, e_taken, e;
444 rtx bb_end_insn;
445 basic_block nb;
447 if (bb->succ == NULL)
448 continue;
450 /* Find the old fallthru edge, and another non-EH edge for
451 a taken jump. */
452 e_taken = e_fall = NULL;
453 for (e = bb->succ; e ; e = e->succ_next)
454 if (e->flags & EDGE_FALLTHRU)
455 e_fall = e;
456 else if (! (e->flags & EDGE_EH))
457 e_taken = e;
459 bb_end_insn = bb->end;
460 if (GET_CODE (bb_end_insn) == JUMP_INSN)
462 if (any_condjump_p (bb_end_insn))
464 /* If the old fallthru is still next, nothing to do. */
465 if (RBI (bb)->next == e_fall->dest
466 || (!RBI (bb)->next
467 && e_fall->dest == EXIT_BLOCK_PTR))
468 continue;
470 /* The degenerated case of conditional jump jumping to the next
471 instruction can happen on target having jumps with side
472 effects.
474 Create temporarily the duplicated edge representing branch.
475 It will get unidentified by force_nonfallthru_and_redirect
476 that would otherwise get confused by fallthru edge not pointing
477 to the next basic block. */
478 if (!e_taken)
480 rtx note;
481 edge e_fake;
483 e_fake = unchecked_make_edge (bb, e_fall->dest, 0);
485 if (!redirect_jump (bb->end, block_label (bb), 0))
486 abort ();
487 note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX);
488 if (note)
490 int prob = INTVAL (XEXP (note, 0));
492 e_fake->probability = prob;
493 e_fake->count = e_fall->count * prob / REG_BR_PROB_BASE;
494 e_fall->probability -= e_fall->probability;
495 e_fall->count -= e_fake->count;
496 if (e_fall->probability < 0)
497 e_fall->probability = 0;
498 if (e_fall->count < 0)
499 e_fall->count = 0;
502 /* There is one special case: if *neither* block is next,
503 such as happens at the very end of a function, then we'll
504 need to add a new unconditional jump. Choose the taken
505 edge based on known or assumed probability. */
506 else if (RBI (bb)->next != e_taken->dest)
508 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
510 if (note
511 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
512 && invert_jump (bb_end_insn,
513 label_for_bb (e_fall->dest), 0))
515 e_fall->flags &= ~EDGE_FALLTHRU;
516 e_taken->flags |= EDGE_FALLTHRU;
517 update_br_prob_note (bb);
518 e = e_fall, e_fall = e_taken, e_taken = e;
522 /* Otherwise we can try to invert the jump. This will
523 basically never fail, however, keep up the pretense. */
524 else if (invert_jump (bb_end_insn,
525 label_for_bb (e_fall->dest), 0))
527 e_fall->flags &= ~EDGE_FALLTHRU;
528 e_taken->flags |= EDGE_FALLTHRU;
529 update_br_prob_note (bb);
530 continue;
533 else if (returnjump_p (bb_end_insn))
534 continue;
535 else
537 /* Otherwise we have some switch or computed jump. In the
538 99% case, there should not have been a fallthru edge. */
539 if (! e_fall)
540 continue;
542 #ifdef CASE_DROPS_THROUGH
543 /* Except for VAX. Since we didn't have predication for the
544 tablejump, the fallthru block should not have moved. */
545 if (RBI (bb)->next == e_fall->dest)
546 continue;
547 bb_end_insn = skip_insns_after_block (bb);
548 #else
549 abort ();
550 #endif
553 else
555 /* No fallthru implies a noreturn function with EH edges, or
556 something similarly bizarre. In any case, we don't need to
557 do anything. */
558 if (! e_fall)
559 continue;
561 /* If the fallthru block is still next, nothing to do. */
562 if (RBI (bb)->next == e_fall->dest)
563 continue;
565 /* A fallthru to exit block. */
566 if (!RBI (bb)->next && e_fall->dest == EXIT_BLOCK_PTR)
567 continue;
570 /* We got here if we need to add a new jump insn. */
571 nb = force_nonfallthru (e_fall);
572 if (nb)
574 alloc_aux_for_block (nb, sizeof (struct reorder_block_def));
575 RBI (nb)->visited = 1;
576 RBI (nb)->next = RBI (bb)->next;
577 RBI (bb)->next = nb;
578 /* Don't process this new block. */
579 bb = nb;
583 /* Put basic_block_info in the new order. */
585 if (rtl_dump_file)
587 fprintf (rtl_dump_file, "Reordered sequence:\n");
588 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0; bb; bb = RBI (bb)->next, index ++)
590 fprintf (rtl_dump_file, " %i ", index);
591 if (RBI (bb)->original)
592 fprintf (rtl_dump_file, "duplicate of %i ",
593 RBI (bb)->original->index);
594 else if (forwarder_block_p (bb) && GET_CODE (bb->head) != CODE_LABEL)
595 fprintf (rtl_dump_file, "compensation ");
596 else
597 fprintf (rtl_dump_file, "bb %i ", bb->index);
598 fprintf (rtl_dump_file, " [%i]\n", bb->frequency);
602 prev_bb = ENTRY_BLOCK_PTR;
603 bb = ENTRY_BLOCK_PTR->next_bb;
604 index = 0;
606 for (; bb; prev_bb = bb, bb = RBI (bb)->next, index ++)
608 bb->index = index;
609 BASIC_BLOCK (index) = bb;
611 bb->prev_bb = prev_bb;
612 prev_bb->next_bb = bb;
614 prev_bb->next_bb = EXIT_BLOCK_PTR;
615 EXIT_BLOCK_PTR->prev_bb = prev_bb;
618 /* Perform sanity checks on the insn chain.
619 1. Check that next/prev pointers are consistent in both the forward and
620 reverse direction.
621 2. Count insns in chain, going both directions, and check if equal.
622 3. Check that get_last_insn () returns the actual end of chain. */
624 void
625 verify_insn_chain ()
627 rtx x, prevx, nextx;
628 int insn_cnt1, insn_cnt2;
630 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
631 x != 0;
632 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
633 if (PREV_INSN (x) != prevx)
634 abort ();
636 if (prevx != get_last_insn ())
637 abort ();
639 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
640 x != 0;
641 nextx = x, insn_cnt2++, x = PREV_INSN (x))
642 if (NEXT_INSN (x) != nextx)
643 abort ();
645 if (insn_cnt1 != insn_cnt2)
646 abort ();
649 /* Remove any unconditional jumps and forwarder block creating fallthru
650 edges instead. During BB reordering, fallthru edges are not required
651 to target next basic block in the linear CFG layout, so the unconditional
652 jumps are not needed. If LOOPS is not null, also update loop structure &
653 dominators. */
655 static void
656 cleanup_unconditional_jumps (loops)
657 struct loops *loops;
659 basic_block bb;
661 FOR_EACH_BB (bb)
663 if (!bb->succ)
664 continue;
665 if (bb->succ->flags & EDGE_FALLTHRU)
666 continue;
667 if (!bb->succ->succ_next)
669 rtx insn;
670 if (GET_CODE (bb->head) != CODE_LABEL && forwarder_block_p (bb)
671 && bb->prev_bb != ENTRY_BLOCK_PTR)
673 basic_block prev = bb->prev_bb;
675 if (rtl_dump_file)
676 fprintf (rtl_dump_file, "Removing forwarder BB %i\n",
677 bb->index);
679 if (loops)
681 /* bb cannot be loop header, as it only has one entry
682 edge. It could be a loop latch. */
683 if (bb->loop_father->header == bb)
684 abort ();
686 if (bb->loop_father->latch == bb)
687 bb->loop_father->latch = bb->pred->src;
689 if (get_immediate_dominator
690 (loops->cfg.dom, bb->succ->dest) == bb)
691 set_immediate_dominator
692 (loops->cfg.dom, bb->succ->dest, bb->pred->src);
694 remove_bb_from_loops (bb);
695 delete_from_dominance_info (loops->cfg.dom, bb);
698 redirect_edge_succ_nodup (bb->pred, bb->succ->dest);
699 flow_delete_block (bb);
700 bb = prev;
702 else if (simplejump_p (bb->end))
704 rtx jump = bb->end;
706 if (rtl_dump_file)
707 fprintf (rtl_dump_file, "Removing jump %i in BB %i\n",
708 INSN_UID (jump), bb->index);
709 delete_insn (jump);
710 bb->succ->flags |= EDGE_FALLTHRU;
712 else
713 continue;
715 insn = NEXT_INSN (bb->end);
716 while (insn
717 && (GET_CODE (insn) != NOTE
718 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
720 rtx next = NEXT_INSN (insn);
722 if (GET_CODE (insn) == BARRIER)
723 delete_barrier (insn);
725 insn = next;
731 /* The block falling through to exit must be the last one in the
732 reordered chain. Ensure that this condition is met. */
733 static void
734 fixup_fallthru_exit_predecessor ()
736 edge e;
737 basic_block bb = NULL;
739 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
740 if (e->flags & EDGE_FALLTHRU)
741 bb = e->src;
743 if (bb && RBI (bb)->next)
745 basic_block c = ENTRY_BLOCK_PTR->next_bb;
747 while (RBI (c)->next != bb)
748 c = RBI (c)->next;
750 RBI (c)->next = RBI (bb)->next;
751 while (RBI (c)->next)
752 c = RBI (c)->next;
754 RBI (c)->next = bb;
755 RBI (bb)->next = NULL;
759 /* Return true in case it is possible to duplicate the basic block BB. */
761 bool
762 cfg_layout_can_duplicate_bb_p (bb)
763 basic_block bb;
765 edge s;
767 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR)
768 return false;
770 /* Duplicating fallthru block to exit would require adding a jump
771 and splitting the real last BB. */
772 for (s = bb->succ; s; s = s->succ_next)
773 if (s->dest == EXIT_BLOCK_PTR && s->flags & EDGE_FALLTHRU)
774 return false;
776 /* Do not attempt to duplicate tablejumps, as we need to unshare
777 the dispatch table. This is difficult to do, as the instructions
778 computing jump destination may be hoisted outside the basic block. */
779 if (tablejump_p (bb->end, NULL, NULL))
780 return false;
782 /* Do not duplicate blocks containing insns that can't be copied. */
783 if (targetm.cannot_copy_insn_p)
785 rtx insn = bb->head;
786 while (1)
788 if (INSN_P (insn) && (*targetm.cannot_copy_insn_p) (insn))
789 return false;
790 if (insn == bb->end)
791 break;
792 insn = NEXT_INSN (insn);
796 return true;
799 static rtx
800 duplicate_insn_chain (from, to)
801 rtx from, to;
803 rtx insn, last;
805 /* Avoid updating of boundaries of previous basic block. The
806 note will get removed from insn stream in fixup. */
807 last = emit_note (NULL, NOTE_INSN_DELETED);
809 /* Create copy at the end of INSN chain. The chain will
810 be reordered later. */
811 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
813 switch (GET_CODE (insn))
815 case INSN:
816 case CALL_INSN:
817 case JUMP_INSN:
818 /* Avoid copying of dispatch tables. We never duplicate
819 tablejumps, so this can hit only in case the table got
820 moved far from original jump. */
821 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
822 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
823 break;
824 emit_copy_of_insn_after (insn, get_last_insn ());
825 break;
827 case CODE_LABEL:
828 break;
830 case BARRIER:
831 emit_barrier ();
832 break;
834 case NOTE:
835 switch (NOTE_LINE_NUMBER (insn))
837 /* In case prologue is empty and function contain label
838 in first BB, we may want to copy the block. */
839 case NOTE_INSN_PROLOGUE_END:
841 case NOTE_INSN_LOOP_VTOP:
842 case NOTE_INSN_LOOP_CONT:
843 case NOTE_INSN_LOOP_BEG:
844 case NOTE_INSN_LOOP_END:
845 /* Strip down the loop notes - we don't really want to keep
846 them consistent in loop copies. */
847 case NOTE_INSN_DELETED:
848 case NOTE_INSN_DELETED_LABEL:
849 /* No problem to strip these. */
850 case NOTE_INSN_EPILOGUE_BEG:
851 case NOTE_INSN_FUNCTION_END:
852 /* Debug code expect these notes to exist just once.
853 Keep them in the master copy.
854 ??? It probably makes more sense to duplicate them for each
855 epilogue copy. */
856 case NOTE_INSN_FUNCTION_BEG:
857 /* There is always just single entry to function. */
858 case NOTE_INSN_BASIC_BLOCK:
859 break;
861 /* There is no purpose to duplicate prologue. */
862 case NOTE_INSN_BLOCK_BEG:
863 case NOTE_INSN_BLOCK_END:
864 /* The BLOCK_BEG/BLOCK_END notes should be eliminated when BB
865 reordering is in the progress. */
866 case NOTE_INSN_EH_REGION_BEG:
867 case NOTE_INSN_EH_REGION_END:
868 /* Should never exist at BB duplication time. */
869 abort ();
870 break;
871 case NOTE_INSN_REPEATED_LINE_NUMBER:
872 emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
873 break;
875 default:
876 if (NOTE_LINE_NUMBER (insn) < 0)
877 abort ();
878 /* It is possible that no_line_number is set and the note
879 won't be emitted. */
880 emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
882 break;
883 default:
884 abort ();
887 insn = NEXT_INSN (last);
888 delete_insn (last);
889 return insn;
892 /* Redirect Edge to DEST. */
893 bool
894 cfg_layout_redirect_edge (e, dest)
895 edge e;
896 basic_block dest;
898 basic_block src = e->src;
899 basic_block old_next_bb = src->next_bb;
900 bool ret;
902 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
903 in the case the basic block appears to be in sequence. Avoid this
904 transformation. */
906 src->next_bb = NULL;
907 if (e->flags & EDGE_FALLTHRU)
909 /* Redirect any branch edges unified with the fallthru one. */
910 if (GET_CODE (src->end) == JUMP_INSN
911 && JUMP_LABEL (src->end) == e->dest->head)
913 if (!redirect_jump (src->end, block_label (dest), 0))
914 abort ();
916 /* In case we are redirecting fallthru edge to the branch edge
917 of conditional jump, remove it. */
918 if (src->succ->succ_next
919 && !src->succ->succ_next->succ_next)
921 edge s = e->succ_next ? e->succ_next : src->succ;
922 if (s->dest == dest
923 && any_condjump_p (src->end)
924 && onlyjump_p (src->end))
925 delete_insn (src->end);
927 redirect_edge_succ_nodup (e, dest);
929 ret = true;
931 else
932 ret = redirect_edge_and_branch (e, dest);
934 /* We don't want simplejumps in the insn stream during cfglayout. */
935 if (simplejump_p (src->end))
937 delete_insn (src->end);
938 delete_barrier (NEXT_INSN (src->end));
939 src->succ->flags |= EDGE_FALLTHRU;
941 src->next_bb = old_next_bb;
943 return ret;
946 /* Same as split_block but update cfg_layout structures. */
947 edge
948 cfg_layout_split_block (bb, insn)
949 basic_block bb;
950 rtx insn;
952 edge fallthru = split_block (bb, insn);
954 alloc_aux_for_block (fallthru->dest, sizeof (struct reorder_block_def));
955 RBI (fallthru->dest)->footer = RBI (fallthru->src)->footer;
956 RBI (fallthru->src)->footer = NULL;
957 return fallthru;
960 /* Create a duplicate of the basic block BB and redirect edge E into it. */
962 basic_block
963 cfg_layout_duplicate_bb (bb, e)
964 basic_block bb;
965 edge e;
967 rtx insn;
968 edge s, n;
969 basic_block new_bb;
970 gcov_type new_count = e ? e->count : 0;
972 if (bb->count < new_count)
973 new_count = bb->count;
974 if (!bb->pred)
975 abort ();
976 #ifdef ENABLE_CHECKING
977 if (!cfg_layout_can_duplicate_bb_p (bb))
978 abort ();
979 #endif
981 insn = duplicate_insn_chain (bb->head, bb->end);
982 new_bb = create_basic_block (insn,
983 insn ? get_last_insn () : NULL,
984 EXIT_BLOCK_PTR->prev_bb);
985 alloc_aux_for_block (new_bb, sizeof (struct reorder_block_def));
987 if (RBI (bb)->header)
989 insn = RBI (bb)->header;
990 while (NEXT_INSN (insn))
991 insn = NEXT_INSN (insn);
992 insn = duplicate_insn_chain (RBI (bb)->header, insn);
993 if (insn)
994 RBI (new_bb)->header = unlink_insn_chain (insn, get_last_insn ());
997 if (RBI (bb)->footer)
999 insn = RBI (bb)->footer;
1000 while (NEXT_INSN (insn))
1001 insn = NEXT_INSN (insn);
1002 insn = duplicate_insn_chain (RBI (bb)->footer, insn);
1003 if (insn)
1004 RBI (new_bb)->footer = unlink_insn_chain (insn, get_last_insn ());
1007 if (bb->global_live_at_start)
1009 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1010 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1011 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
1012 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1015 new_bb->loop_depth = bb->loop_depth;
1016 new_bb->flags = bb->flags;
1017 for (s = bb->succ; s; s = s->succ_next)
1019 /* Since we are creating edges from a new block to successors
1020 of another block (which therefore are known to be disjoint), there
1021 is no need to actually check for duplicated edges. */
1022 n = unchecked_make_edge (new_bb, s->dest, s->flags);
1023 n->probability = s->probability;
1024 if (new_count)
1025 /* Take care for overflows! */
1026 n->count = s->count * (new_count * 10000 / bb->count) / 10000;
1027 else
1028 n->count = 0;
1029 s->count -= n->count;
1032 new_bb->count = new_count;
1033 bb->count -= new_count;
1035 if (e)
1037 new_bb->frequency = EDGE_FREQUENCY (e);
1038 bb->frequency -= EDGE_FREQUENCY (e);
1040 cfg_layout_redirect_edge (e, new_bb);
1043 if (bb->count < 0)
1044 bb->count = 0;
1045 if (bb->frequency < 0)
1046 bb->frequency = 0;
1048 RBI (new_bb)->original = bb;
1049 RBI (bb)->copy = new_bb;
1050 return new_bb;
1053 /* Main entry point to this module - initialize the datastructures for
1054 CFG layout changes. It keeps LOOPS up-to-date if not null. */
1056 void
1057 cfg_layout_initialize (loops)
1058 struct loops *loops;
1060 /* Our algorithm depends on fact that there are now dead jumptables
1061 around the code. */
1062 alloc_aux_for_blocks (sizeof (struct reorder_block_def));
1064 cleanup_unconditional_jumps (loops);
1066 record_effective_endpoints ();
1069 /* Splits superblocks. */
1070 static void
1071 break_superblocks ()
1073 sbitmap superblocks;
1074 int i, need;
1076 superblocks = sbitmap_alloc (n_basic_blocks);
1077 sbitmap_zero (superblocks);
1079 need = 0;
1081 for (i = 0; i < n_basic_blocks; i++)
1082 if (BASIC_BLOCK(i)->flags & BB_SUPERBLOCK)
1084 BASIC_BLOCK(i)->flags &= ~BB_SUPERBLOCK;
1085 SET_BIT (superblocks, i);
1086 need = 1;
1089 if (need)
1091 rebuild_jump_labels (get_insns ());
1092 find_many_sub_basic_blocks (superblocks);
1095 free (superblocks);
1098 /* Finalize the changes: reorder insn list according to the sequence, enter
1099 compensation code, rebuild scope forest. */
1101 void
1102 cfg_layout_finalize ()
1104 fixup_fallthru_exit_predecessor ();
1105 fixup_reorder_chain ();
1107 #ifdef ENABLE_CHECKING
1108 verify_insn_chain ();
1109 #endif
1111 free_aux_for_blocks ();
1113 break_superblocks ();
1115 #ifdef ENABLE_CHECKING
1116 verify_flow_info ();
1117 #endif