2001-01-23 Alexandre Petit-Bianco <apbianco@cygnus.com>
[official-gcc.git] / gcc / bb-reorder.c
blob08d23264632b039904b91d2eb1400f4258859a51
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 /* References:
23 "Profile Guided Code Positioning"
24 Pettis and Hanson; PLDI '90.
26 TODO:
28 (1) Consider:
30 if (p) goto A; // predict taken
31 foo ();
33 if (q) goto B; // predict taken
34 bar ();
36 baz ();
37 return;
39 We'll currently reorder this as
41 if (!p) goto C;
43 if (!q) goto D;
45 baz ();
46 return;
48 bar ();
49 goto B;
51 foo ();
52 goto A;
54 A better ordering is
56 if (!p) goto C;
57 if (!q) goto D;
59 baz ();
60 return;
62 foo ();
63 if (q) goto B;
65 bar ();
66 goto B;
68 This requires that we be able to duplicate the jump at A, and
69 adjust the graph traversal such that greedy placement doesn't
70 fix D before C is considered.
72 (2) Coordinate with shorten_branches to minimize the number of
73 long branches.
75 (3) Invent a method by which sufficiently non-predicted code can
76 be moved to either the end of the section or another section
77 entirely. Some sort of NOTE_INSN note would work fine.
79 This completely scroggs all debugging formats, so the user
80 would have to explicitly ask for it.
83 #include "config.h"
84 #include "system.h"
85 #include "tree.h"
86 #include "rtl.h"
87 #include "tm_p.h"
88 #include "hard-reg-set.h"
89 #include "basic-block.h"
90 #include "insn-config.h"
91 #include "regs.h"
92 #include "flags.h"
93 #include "output.h"
94 #include "function.h"
95 #include "except.h"
96 #include "toplev.h"
97 #include "recog.h"
98 #include "insn-flags.h"
99 #include "expr.h"
100 #include "obstack.h"
103 #ifndef HAVE_epilogue
104 #define HAVE_epilogue 0
105 #endif
108 /* The contents of the current function definition are allocated
109 in this obstack, and all are freed at the end of the function.
110 For top-level functions, this is temporary_obstack.
111 Separate obstacks are made for nested functions. */
113 extern struct obstack flow_obstack;
116 /* Structure to hold information about lexical scopes. */
117 typedef struct scope_def
119 int level;
121 /* The NOTE_INSN_BLOCK_BEG that started this scope. */
122 rtx note_beg;
124 /* The NOTE_INSN_BLOCK_END that ended this scope. */
125 rtx note_end;
127 /* The bb containing note_beg (if any). */
128 basic_block bb_beg;
130 /* The bb containing note_end (if any). */
131 basic_block bb_end;
133 /* List of basic blocks contained within this scope. */
134 basic_block *bbs;
136 /* Number of blocks contained within this scope. */
137 int num_bbs;
139 /* The outer scope or NULL if outermost scope. */
140 struct scope_def *outer;
142 /* The first inner scope or NULL if innermost scope. */
143 struct scope_def *inner;
145 /* The last inner scope or NULL if innermost scope. */
146 struct scope_def *inner_last;
148 /* Link to the next (sibling) scope. */
149 struct scope_def *next;
150 } *scope;
153 /* Structure to hold information about the scope forest. */
154 typedef struct
156 /* Number of trees in forest. */
157 int num_trees;
159 /* List of tree roots. */
160 scope *trees;
161 } scope_forest_info;
163 /* Structure to hold information about the blocks during reordering. */
164 typedef struct reorder_block_def
166 rtx eff_head;
167 rtx eff_end;
168 scope scope;
169 basic_block next;
170 int index;
171 int visited;
172 } *reorder_block_def;
174 #define RBI(BB) ((reorder_block_def) (BB)->aux)
177 /* Local function prototypes. */
178 static rtx skip_insns_after_block PARAMS ((basic_block));
179 static void record_effective_endpoints PARAMS ((void));
180 static void make_reorder_chain PARAMS ((void));
181 static basic_block make_reorder_chain_1 PARAMS ((basic_block, basic_block));
182 static rtx label_for_bb PARAMS ((basic_block));
183 static rtx emit_jump_to_block_after PARAMS ((basic_block, rtx));
184 static void fixup_reorder_chain PARAMS ((void));
185 static void relate_bbs_with_scopes PARAMS ((scope));
186 static scope make_new_scope PARAMS ((int, rtx));
187 static void build_scope_forest PARAMS ((scope_forest_info *));
188 static void remove_scope_notes PARAMS ((void));
189 static void insert_intra_1 PARAMS ((scope, rtx *));
190 static void insert_intra_bb_scope_notes PARAMS ((basic_block));
191 static void insert_inter_bb_scope_notes PARAMS ((basic_block, basic_block));
192 static void rebuild_scope_notes PARAMS ((scope_forest_info *));
193 static void free_scope_forest_1 PARAMS ((scope));
194 static void free_scope_forest PARAMS ((scope_forest_info *));
195 void dump_scope_forest PARAMS ((scope_forest_info *));
196 static void dump_scope_forest_1 PARAMS ((scope, int));
197 static rtx get_next_bb_note PARAMS ((rtx));
198 static rtx get_prev_bb_note PARAMS ((rtx));
200 void verify_insn_chain PARAMS ((void));
202 /* Skip over inter-block insns occurring after BB which are typically
203 associated with BB (e.g., barriers). If there are any such insns,
204 we return the last one. Otherwise, we return the end of BB. */
206 static rtx
207 skip_insns_after_block (bb)
208 basic_block bb;
210 rtx insn, last_insn, next_head;
212 next_head = NULL_RTX;
213 if (bb->index + 1 != n_basic_blocks)
214 next_head = BASIC_BLOCK (bb->index + 1)->head;
216 for (last_insn = bb->end; (insn = NEXT_INSN (last_insn)); last_insn = insn)
218 if (insn == next_head)
219 break;
221 switch (GET_CODE (insn))
223 case BARRIER:
224 continue;
226 case NOTE:
227 switch (NOTE_LINE_NUMBER (insn))
229 case NOTE_INSN_LOOP_END:
230 case NOTE_INSN_BLOCK_END:
231 case NOTE_INSN_DELETED:
232 case NOTE_INSN_DELETED_LABEL:
233 continue;
235 default:
236 break;
238 break;
240 case CODE_LABEL:
241 if (NEXT_INSN (insn)
242 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
243 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
244 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
246 insn = NEXT_INSN (insn);
247 continue;
249 break;
251 default:
252 break;
255 break;
258 return last_insn;
262 /* Locate the effective beginning and end of the insn chain for each
263 block, as defined by skip_insns_after_block above. */
265 static void
266 record_effective_endpoints ()
268 rtx next_insn = get_insns ();
269 int i;
271 for (i = 0; i < n_basic_blocks; ++i)
273 basic_block bb = BASIC_BLOCK (i);
274 rtx end;
276 RBI (bb)->eff_head = next_insn;
277 end = skip_insns_after_block (bb);
278 RBI (bb)->eff_end = end;
279 next_insn = NEXT_INSN (end);
284 /* Compute an ordering for a subgraph beginning with block BB. Record the
285 ordering in RBI()->index and chained through RBI()->next. */
287 static void
288 make_reorder_chain ()
290 basic_block last_block = NULL;
291 basic_block prev = NULL;
292 int nbb_m1 = n_basic_blocks - 1;
294 /* If we've not got epilogue in RTL, we must fallthru to the exit.
295 Force the last block to be at the end. */
296 /* ??? Some ABIs (e.g. MIPS) require the return insn to be at the
297 end of the function for stack unwinding purposes. */
298 if (! HAVE_epilogue)
300 last_block = BASIC_BLOCK (nbb_m1);
301 RBI (last_block)->visited = 1;
302 nbb_m1 -= 1;
305 /* Loop until we've placed every block. */
308 int i;
309 basic_block next = NULL;
311 /* Find the next unplaced block. */
312 /* ??? Get rid of this loop, and track which blocks are not yet
313 placed more directly, so as to avoid the O(N^2) worst case.
314 Perhaps keep a doubly-linked list of all to-be-placed blocks;
315 remove from the list as we place. The head of that list is
316 what we're looking for here. */
318 for (i = 0; i <= nbb_m1; ++i)
320 basic_block bb = BASIC_BLOCK (i);
321 if (! RBI (bb)->visited)
323 next = bb;
324 break;
327 if (! next)
328 abort ();
330 prev = make_reorder_chain_1 (next, prev);
332 while (RBI (prev)->index < nbb_m1);
334 /* Terminate the chain. */
335 if (! HAVE_epilogue)
337 RBI (prev)->next = last_block;
338 RBI (last_block)->index = RBI (prev)->index + 1;
339 prev = last_block;
341 RBI (prev)->next = NULL;
344 /* A helper function for make_reorder_chain.
346 We do not follow EH edges, or non-fallthru edges to noreturn blocks.
347 These are assumed to be the error condition and we wish to cluster
348 all of them at the very end of the function for the benefit of cache
349 locality for the rest of the function.
351 ??? We could do slightly better by noticing earlier that some subgraph
352 has all paths leading to noreturn functions, but for there to be more
353 than one block in such a subgraph is rare. */
355 static basic_block
356 make_reorder_chain_1 (bb, prev)
357 basic_block bb;
358 basic_block prev;
360 edge e;
361 basic_block next;
362 rtx note;
364 /* Mark this block visited. */
365 if (prev)
367 int new_index;
369 restart:
370 RBI (prev)->next = bb;
371 new_index = RBI (prev)->index + 1;
372 RBI (bb)->index = new_index;
374 if (rtl_dump_file && prev->index + 1 != bb->index)
375 fprintf (rtl_dump_file, "Reordering block %d (%d) after %d (%d)\n",
376 bb->index, RBI (bb)->index, prev->index, RBI (prev)->index);
378 else
379 RBI (bb)->index = 0;
380 RBI (bb)->visited = 1;
381 prev = bb;
383 if (bb->succ == NULL)
384 return prev;
386 /* Find the most probable block. */
388 next = NULL;
389 if (any_condjump_p (bb->end)
390 && (note = find_reg_note (bb->end, REG_BR_PROB, 0)) != NULL)
392 int taken, probability;
393 edge e_taken, e_fall;
395 probability = INTVAL (XEXP (note, 0));
396 taken = probability > REG_BR_PROB_BASE / 2;
398 /* Find the normal taken edge and the normal fallthru edge.
399 Note that there may in fact be other edges due to
400 asynchronous_exceptions.
402 Note, conditional jumps with other side effects may not
403 be fully optimized. In this case it is possible for
404 the conditional jump to branch to the same location as
405 the fallthru path.
407 We should probably work to improve optimization of that
408 case; however, it seems silly not to also deal with such
409 problems here if they happen to occur. */
411 e_taken = e_fall = NULL;
412 for (e = bb->succ; e ; e = e->succ_next)
414 if (e->flags & EDGE_FALLTHRU)
415 e_fall = e;
416 if (! (e->flags & EDGE_EH))
417 e_taken = e;
420 next = (taken ? e_taken : e_fall)->dest;
423 /* In the absence of a prediction, disturb things as little as possible
424 by selecting the old "next" block from the list of successors. If
425 there had been a fallthru edge, that will be the one. */
426 if (! next)
428 for (e = bb->succ; e ; e = e->succ_next)
429 if (e->dest->index == bb->index + 1)
431 if ((e->flags & EDGE_FALLTHRU)
432 || (e->dest->succ
433 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))))
434 next = e->dest;
435 break;
439 /* Make sure we didn't select a silly next block. */
440 if (! next || next == EXIT_BLOCK_PTR || RBI (next)->visited)
441 next = NULL;
443 /* Recurse on the successors. Unroll the last call, as the normal
444 case is exactly one or two edges, and we can tail recurse. */
445 for (e = bb->succ; e; e = e->succ_next)
446 if (e->dest != EXIT_BLOCK_PTR
447 && ! RBI (e->dest)->visited
448 && e->dest->succ
449 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
451 if (next)
453 prev = make_reorder_chain_1 (next, prev);
454 next = RBI (e->dest)->visited ? NULL : e->dest;
456 else
457 next = e->dest;
459 if (next)
461 bb = next;
462 goto restart;
465 return prev;
469 /* Locate or create a label for a given basic block. */
471 static rtx
472 label_for_bb (bb)
473 basic_block bb;
475 rtx label = bb->head;
477 if (GET_CODE (label) != CODE_LABEL)
479 if (rtl_dump_file)
480 fprintf (rtl_dump_file, "Emitting label for block %d (%d)\n",
481 bb->index, RBI (bb)->index);
483 label = emit_label_before (gen_label_rtx (), label);
484 if (bb->head == RBI (bb)->eff_head)
485 RBI (bb)->eff_head = label;
486 bb->head = label;
489 return label;
493 /* Emit a jump to BB after insn AFTER. */
495 static rtx
496 emit_jump_to_block_after (bb, after)
497 basic_block bb;
498 rtx after;
500 rtx jump;
502 if (bb != EXIT_BLOCK_PTR)
504 rtx label = label_for_bb (bb);
505 jump = emit_jump_insn_after (gen_jump (label), after);
506 JUMP_LABEL (jump) = label;
507 LABEL_NUSES (label) += 1;
509 if (rtl_dump_file)
510 fprintf (rtl_dump_file, "Emitting jump to block %d (%d)\n",
511 bb->index, RBI (bb)->index);
513 else
515 #ifdef HAVE_return
516 if (! HAVE_return)
517 abort ();
518 jump = emit_jump_insn_after (gen_return (), after);
520 if (rtl_dump_file)
521 fprintf (rtl_dump_file, "Emitting return\n");
522 #else
523 abort ();
524 #endif
527 return jump;
531 /* Given a reorder chain, rearrange the code to match. */
533 static void
534 fixup_reorder_chain ()
536 basic_block bb, last_bb;
538 /* First do the bulk reordering -- rechain the blocks without regard to
539 the needed changes to jumps and labels. */
541 last_bb = BASIC_BLOCK (0);
542 bb = RBI (last_bb)->next;
543 while (bb)
545 rtx last_e = RBI (last_bb)->eff_end;
546 rtx curr_h = RBI (bb)->eff_head;
548 NEXT_INSN (last_e) = curr_h;
549 PREV_INSN (curr_h) = last_e;
551 last_bb = bb;
552 bb = RBI (bb)->next;
554 NEXT_INSN (RBI (last_bb)->eff_end) = NULL_RTX;
555 set_last_insn (RBI (last_bb)->eff_end);
557 /* Now add jumps and labels as needed to match the blocks new
558 outgoing edges. */
560 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
562 edge e_fall, e_taken, e;
563 rtx jump_insn, barrier_insn, bb_end_insn;
564 basic_block nb;
566 if (bb->succ == NULL)
567 continue;
569 /* Find the old fallthru edge, and another non-EH edge for
570 a taken jump. */
571 e_taken = e_fall = NULL;
572 for (e = bb->succ; e ; e = e->succ_next)
573 if (e->flags & EDGE_FALLTHRU)
574 e_fall = e;
575 else if (! (e->flags & EDGE_EH))
576 e_taken = e;
578 bb_end_insn = bb->end;
579 if (GET_CODE (bb_end_insn) == JUMP_INSN)
581 if (any_uncondjump_p (bb_end_insn))
583 /* If the destination is still not next, nothing to do. */
584 if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
585 continue;
587 /* Otherwise, we can remove the jump and cleanup the edge. */
588 tidy_fallthru_edge (e_taken, bb, e_taken->dest);
589 RBI (bb)->eff_end = skip_insns_after_block (bb);
590 RBI (e_taken->dest)->eff_head = NEXT_INSN (RBI (bb)->eff_end);
592 if (rtl_dump_file)
593 fprintf (rtl_dump_file, "Removing jump in block %d (%d)\n",
594 bb->index, RBI (bb)->index);
595 continue;
597 else if (any_condjump_p (bb_end_insn))
599 /* If the old fallthru is still next, nothing to do. */
600 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
601 || (RBI (bb)->index == n_basic_blocks - 1
602 && e_fall->dest == EXIT_BLOCK_PTR))
603 continue;
605 /* There is one special case: if *neither* block is next,
606 such as happens at the very end of a function, then we'll
607 need to add a new unconditional jump. Choose the taken
608 edge based on known or assumed probability. */
609 if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
611 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
612 if (note
613 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
614 && invert_jump (bb_end_insn,
615 label_for_bb (e_fall->dest), 0))
617 e_fall->flags &= ~EDGE_FALLTHRU;
618 e_taken->flags |= EDGE_FALLTHRU;
619 e = e_fall, e_fall = e_taken, e_taken = e;
623 /* Otherwise we can try to invert the jump. This will
624 basically never fail, however, keep up the pretense. */
625 else if (invert_jump (bb_end_insn,
626 label_for_bb (e_fall->dest), 0))
628 e_fall->flags &= ~EDGE_FALLTHRU;
629 e_taken->flags |= EDGE_FALLTHRU;
630 continue;
633 else if (returnjump_p (bb_end_insn))
634 continue;
635 else
637 /* Otherwise we have some switch or computed jump. In the
638 99% case, there should not have been a fallthru edge. */
639 if (! e_fall)
640 continue;
641 #ifdef CASE_DROPS_THROUGH
642 /* Except for VAX. Since we didn't have predication for the
643 tablejump, the fallthru block should not have moved. */
644 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index)
645 continue;
646 bb_end_insn = skip_insns_after_block (bb);
647 #else
648 abort ();
649 #endif
652 else
654 /* No fallthru implies a noreturn function with EH edges, or
655 something similarly bizarre. In any case, we don't need to
656 do anything. */
657 if (! e_fall)
658 continue;
660 /* If the fallthru block is still next, nothing to do. */
661 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
662 || (RBI (bb)->index == n_basic_blocks - 1
663 && e_fall->dest == EXIT_BLOCK_PTR))
664 continue;
666 /* We need a new jump insn. If the block has only one outgoing
667 edge, then we can stuff the new jump insn in directly. */
668 if (bb->succ->succ_next == NULL)
670 e_fall->flags &= ~EDGE_FALLTHRU;
672 jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
673 bb->end = jump_insn;
674 barrier_insn = emit_barrier_after (jump_insn);
675 RBI (bb)->eff_end = barrier_insn;
676 continue;
680 /* We got here if we need to add a new jump insn in a new block
681 across the edge e_fall. */
683 jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
684 barrier_insn = emit_barrier_after (jump_insn);
686 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
687 create_basic_block (n_basic_blocks - 1, jump_insn, jump_insn, NULL);
689 nb = BASIC_BLOCK (n_basic_blocks - 1);
690 nb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
691 nb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
692 nb->local_set = 0;
694 COPY_REG_SET (nb->global_live_at_start, bb->global_live_at_start);
695 COPY_REG_SET (nb->global_live_at_end, bb->global_live_at_start);
697 nb->aux = xmalloc (sizeof (struct reorder_block_def));
698 RBI (nb)->eff_head = nb->head;
699 RBI (nb)->eff_end = barrier_insn;
700 RBI (nb)->scope = RBI (bb)->scope;
701 RBI (nb)->index = RBI (bb)->index + 1;
702 RBI (nb)->visited = 1;
703 RBI (nb)->next = RBI (bb)->next;
704 RBI (bb)->next = nb;
706 /* Link to new block. */
707 make_edge (NULL, nb, e_fall->dest, 0);
708 redirect_edge_succ (e_fall, nb);
710 /* Don't process this new block. */
711 bb = nb;
713 /* Fix subsequent reorder block indices to reflect new block. */
714 while ((nb = RBI (nb)->next) != NULL)
715 RBI (nb)->index += 1;
718 /* Put basic_block_info in the new order. */
719 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
721 bb->index = RBI (bb)->index;
722 BASIC_BLOCK (bb->index) = bb;
727 /* Perform sanity checks on the insn chain.
728 1. Check that next/prev pointers are consistent in both the forward and
729 reverse direction.
730 2. Count insns in chain, going both directions, and check if equal.
731 3. Check that get_last_insn () returns the actual end of chain. */
733 void
734 verify_insn_chain ()
736 rtx x,
737 prevx,
738 nextx;
739 int insn_cnt1,
740 insn_cnt2;
742 prevx = NULL;
743 insn_cnt1 = 1;
744 for (x = get_insns (); x; x = NEXT_INSN (x))
746 if (PREV_INSN (x) != prevx)
748 fprintf (stderr, "Forward traversal: insn chain corrupt.\n");
749 fprintf (stderr, "previous insn:\n");
750 debug_rtx (prevx);
751 fprintf (stderr, "current insn:\n");
752 debug_rtx (x);
753 abort ();
755 ++insn_cnt1;
756 prevx = x;
759 if (prevx != get_last_insn ())
761 fprintf (stderr, "last_insn corrupt.\n");
762 abort ();
765 nextx = NULL;
766 insn_cnt2 = 1;
767 for (x = get_last_insn (); x; x = PREV_INSN (x))
769 if (NEXT_INSN (x) != nextx)
771 fprintf (stderr, "Reverse traversal: insn chain corrupt.\n");
772 fprintf (stderr, "current insn:\n");
773 debug_rtx (x);
774 fprintf (stderr, "next insn:\n");
775 debug_rtx (nextx);
776 abort ();
778 ++insn_cnt2;
779 nextx = x;
782 if (insn_cnt1 != insn_cnt2)
784 fprintf (stderr, "insn_cnt1 (%d) not equal to insn_cnt2 (%d).\n",
785 insn_cnt1, insn_cnt2);
786 abort ();
790 static rtx
791 get_next_bb_note (x)
792 rtx x;
794 while (x)
796 if (NOTE_INSN_BASIC_BLOCK_P (x))
797 return x;
798 x = NEXT_INSN (x);
800 return NULL;
804 static rtx
805 get_prev_bb_note (x)
806 rtx x;
808 while (x)
810 if (NOTE_INSN_BASIC_BLOCK_P (x))
811 return x;
812 x = PREV_INSN (x);
814 return NULL;
818 /* Determine and record the relationships between basic blocks and
819 scopes in scope tree S. */
821 static void
822 relate_bbs_with_scopes (s)
823 scope s;
825 scope p;
826 int i, bbi1, bbi2, bbs_spanned;
827 rtx bbnote;
829 for (p = s->inner; p; p = p->next)
830 relate_bbs_with_scopes (p);
832 bbi1 = bbi2 = -1;
833 bbs_spanned = 0;
835 /* If the begin and end notes are both inside the same basic block,
836 or if they are both outside of basic blocks, then we know immediately
837 how they are related. Otherwise, we need to poke around to make the
838 determination. */
839 if (s->bb_beg != s->bb_end)
841 if (s->bb_beg && s->bb_end)
843 /* Both notes are in different bbs. This implies that all the
844 basic blocks spanned by the pair of notes are contained in
845 this scope. */
846 bbi1 = s->bb_beg->index;
847 bbi2 = s->bb_end->index;
848 bbs_spanned = 1;
850 else if (! s->bb_beg)
852 /* First note is outside of a bb. If the scope spans more than
853 one basic block, then they all are contained within this
854 scope. Otherwise, this scope is contained within the basic
855 block. */
856 bbnote = get_next_bb_note (s->note_beg);
857 if (! bbnote)
858 abort ();
859 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_end)
861 bbs_spanned = 0;
862 s->bb_beg = NOTE_BASIC_BLOCK (bbnote);
864 else
866 bbi1 = NOTE_BASIC_BLOCK (bbnote)->index;
867 bbi2 = s->bb_end->index;
868 s->bb_end = NULL;
869 bbs_spanned = 1;
872 else /* ! s->bb_end */
874 /* Second note is outside of a bb. If the scope spans more than
875 one basic block, then they all are contained within this
876 scope. Otherwise, this scope is contained within the basic
877 block. */
878 bbnote = get_prev_bb_note (s->note_end);
879 if (! bbnote)
880 abort ();
881 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_beg)
883 bbs_spanned = 0;
884 s->bb_end = NOTE_BASIC_BLOCK (bbnote);
886 else
888 bbi1 = s->bb_beg->index;
889 bbi2 = NOTE_BASIC_BLOCK (bbnote)->index;
890 s->bb_beg = NULL;
891 bbs_spanned = 1;
895 else
897 if (s->bb_beg)
898 /* Both notes are in the same bb, which implies the block
899 contains this scope. */
900 bbs_spanned = 0;
901 else
903 rtx x1, x2;
904 /* Both notes are outside of any bbs. This implies that all the
905 basic blocks spanned by the pair of notes are contained in
906 this scope.
907 There is a degenerate case to consider. If the notes do not
908 span any basic blocks, then it is an empty scope that can
909 safely be deleted or ignored. Mark these with level = -1. */
911 x1 = get_next_bb_note (s->note_beg);
912 x2 = get_prev_bb_note (s->note_end);
913 if (! (x1 && x2))
915 s->level = -1;
916 bbs_spanned = 0;
918 else
920 bbi1 = NOTE_BASIC_BLOCK (x1)->index;
921 bbi2 = NOTE_BASIC_BLOCK (x2)->index;
922 bbs_spanned = 1;
927 /* If the scope spans one or more basic blocks, we record them. We
928 only record the bbs that are immediately contained within this
929 scope. Note that if a scope is contained within a bb, we can tell
930 by checking that bb_beg = bb_end and that they are non-null. */
931 if (bbs_spanned)
933 int j = 0;
935 s->num_bbs = 0;
936 for (i = bbi1; i <= bbi2; i++)
937 if (! RBI (BASIC_BLOCK (i))->scope)
938 s->num_bbs++;
940 s->bbs = xmalloc (s->num_bbs * sizeof (basic_block));
941 for (i = bbi1; i <= bbi2; i++)
943 basic_block curr_bb = BASIC_BLOCK (i);
944 if (! RBI (curr_bb)->scope)
946 s->bbs[j++] = curr_bb;
947 RBI (curr_bb)->scope = s;
951 else
952 s->num_bbs = 0;
956 /* Allocate and initialize a new scope structure with scope level LEVEL,
957 and record the NOTE beginning the scope. */
959 static scope
960 make_new_scope (level, note)
961 int level;
962 rtx note;
964 scope new_scope = xcalloc (1, sizeof (struct scope_def));
965 new_scope->level = level;
966 new_scope->note_beg = note;
967 return new_scope;
971 /* Build a forest representing the scope structure of the function.
972 Return a pointer to a structure describing the forest. */
974 static void
975 build_scope_forest (forest)
976 scope_forest_info *forest;
978 rtx x;
979 int level, bbi, i;
980 basic_block curr_bb;
981 scope root, curr_scope = 0;
983 forest->num_trees = 0;
984 forest->trees = NULL;
985 level = -1;
986 root = NULL;
987 curr_bb = NULL;
988 bbi = 0;
989 for (x = get_insns (); x; x = NEXT_INSN (x))
991 if (bbi < n_basic_blocks && x == BASIC_BLOCK (bbi)->head)
992 curr_bb = BASIC_BLOCK (bbi);
994 if (GET_CODE (x) == NOTE)
996 if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG)
998 if (root)
1000 scope new_scope;
1001 if (! curr_scope)
1002 abort();
1003 level++;
1004 new_scope = make_new_scope (level, x);
1005 new_scope->outer = curr_scope;
1006 new_scope->next = NULL;
1007 if (! curr_scope->inner)
1009 curr_scope->inner = new_scope;
1010 curr_scope->inner_last = new_scope;
1012 else
1014 curr_scope->inner_last->next = new_scope;
1015 curr_scope->inner_last = new_scope;
1017 curr_scope = curr_scope->inner_last;
1019 else
1021 int ntrees = forest->num_trees;
1022 level++;
1023 curr_scope = make_new_scope (level, x);
1024 root = curr_scope;
1025 forest->trees = xrealloc (forest->trees,
1026 sizeof (scope) * (ntrees + 1));
1027 forest->trees[forest->num_trees++] = root;
1029 curr_scope->bb_beg = curr_bb;
1031 else if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END)
1033 curr_scope->bb_end = curr_bb;
1034 curr_scope->note_end = x;
1035 level--;
1036 curr_scope = curr_scope->outer;
1037 if (level == -1)
1038 root = NULL;
1040 } /* if note */
1042 if (curr_bb && curr_bb->end == x)
1044 curr_bb = NULL;
1045 bbi++;
1048 } /* for */
1050 for (i = 0; i < forest->num_trees; i++)
1051 relate_bbs_with_scopes (forest->trees[i]);
1055 /* Remove all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes from
1056 the insn chain. */
1058 static void
1059 remove_scope_notes ()
1061 rtx x, next;
1062 basic_block currbb = NULL;
1064 for (x = get_insns (); x; x = next)
1066 next = NEXT_INSN (x);
1067 if (NOTE_INSN_BASIC_BLOCK_P (x))
1068 currbb = NOTE_BASIC_BLOCK (x);
1070 if (GET_CODE (x) == NOTE
1071 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG
1072 || NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END))
1074 /* Check if the scope note happens to be the end of a bb. */
1075 if (currbb && x == currbb->end)
1076 currbb->end = PREV_INSN (x);
1077 if (currbb && x == currbb->head)
1078 abort ();
1080 if (PREV_INSN (x))
1082 NEXT_INSN (PREV_INSN (x)) = next;
1083 PREV_INSN (next) = PREV_INSN (x);
1085 NEXT_INSN (x) = NULL;
1086 PREV_INSN (x) = NULL;
1088 else
1089 abort ();
1095 /* Insert scope note pairs for a contained scope tree S after insn IP. */
1097 static void
1098 insert_intra_1 (s, ip)
1099 scope s;
1100 rtx *ip;
1102 scope p;
1104 if (NOTE_BLOCK (s->note_beg))
1106 *ip = emit_note_after (NOTE_INSN_BLOCK_BEG, *ip);
1107 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_beg);
1110 for (p = s->inner; p; p = p->next)
1111 insert_intra_1 (p, ip);
1113 if (NOTE_BLOCK (s->note_beg))
1115 *ip = emit_note_after (NOTE_INSN_BLOCK_END, *ip);
1116 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_end);
1121 /* Insert NOTE_INSN_BLOCK_END notes and NOTE_INSN_BLOCK_BEG notes for
1122 scopes that are contained within BB. */
1124 static void
1125 insert_intra_bb_scope_notes (bb)
1126 basic_block bb;
1128 scope s = RBI (bb)->scope;
1129 scope p;
1130 rtx ip;
1132 if (! s)
1133 return;
1135 ip = bb->head;
1136 if (GET_CODE (ip) == CODE_LABEL)
1137 ip = NEXT_INSN (ip);
1139 for (p = s->inner; p; p = p->next)
1141 if (p->bb_beg != NULL && p->bb_beg == p->bb_end && p->bb_beg == bb)
1142 insert_intra_1 (p, &ip);
1147 /* Given two consecutive basic blocks BB1 and BB2 with different scopes,
1148 insert NOTE_INSN_BLOCK_END notes after BB1 and NOTE_INSN_BLOCK_BEG
1149 notes before BB2 such that the notes are correctly balanced. If BB1 or
1150 BB2 is NULL, we are inserting scope notes for the first and last basic
1151 blocks, respectively. */
1153 static void
1154 insert_inter_bb_scope_notes (bb1, bb2)
1155 basic_block bb1;
1156 basic_block bb2;
1158 rtx ip;
1159 scope com;
1161 /* It is possible that a basic block is not contained in any scope.
1162 In that case, we either open or close a scope but not both. */
1163 if (bb1 && bb2)
1165 scope s1 = RBI (bb1)->scope;
1166 scope s2 = RBI (bb2)->scope;
1167 if (! s1 && ! s2)
1168 return;
1169 if (! s1)
1170 bb1 = NULL;
1171 else if (! s2)
1172 bb2 = NULL;
1175 /* Find common ancestor scope. */
1176 if (bb1 && bb2)
1178 scope s1 = RBI (bb1)->scope;
1179 scope s2 = RBI (bb2)->scope;
1180 while (s1 != s2)
1182 if (! (s1 && s2))
1183 abort ();
1184 if (s1->level > s2->level)
1185 s1 = s1->outer;
1186 else if (s2->level > s1->level)
1187 s2 = s2->outer;
1188 else
1190 s1 = s1->outer;
1191 s2 = s2->outer;
1194 com = s1;
1196 else
1197 com = NULL;
1199 /* Close scopes. */
1200 if (bb1)
1202 scope s = RBI (bb1)->scope;
1203 ip = RBI (bb1)->eff_end;
1204 while (s != com)
1206 if (NOTE_BLOCK (s->note_beg))
1208 ip = emit_note_after (NOTE_INSN_BLOCK_END, ip);
1209 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_end);
1211 s = s->outer;
1215 /* Open scopes. */
1216 if (bb2)
1218 scope s = RBI (bb2)->scope;
1219 ip = bb2->head;
1220 while (s != com)
1222 if (NOTE_BLOCK (s->note_beg))
1224 ip = emit_note_before (NOTE_INSN_BLOCK_BEG, ip);
1225 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_beg);
1227 s = s->outer;
1233 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1234 on the scope forest and the newly reordered basic blocks. */
1236 static void
1237 rebuild_scope_notes (forest)
1238 scope_forest_info *forest;
1240 int i;
1242 if (forest->num_trees == 0)
1243 return;
1245 /* Start by opening the scopes before the first basic block. */
1246 insert_inter_bb_scope_notes (NULL, BASIC_BLOCK (0));
1248 /* Then, open and close scopes as needed between blocks. */
1249 for (i = 0; i < n_basic_blocks - 1; i++)
1251 basic_block bb1 = BASIC_BLOCK (i);
1252 basic_block bb2 = BASIC_BLOCK (i + 1);
1253 if (RBI (bb1)->scope != RBI (bb2)->scope)
1254 insert_inter_bb_scope_notes (bb1, bb2);
1255 insert_intra_bb_scope_notes (bb1);
1258 /* Finally, close the scopes after the last basic block. */
1259 insert_inter_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1), NULL);
1260 insert_intra_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1));
1264 /* Free the storage associated with the scope tree at S. */
1266 static void
1267 free_scope_forest_1 (s)
1268 scope s;
1270 scope p, next;
1272 for (p = s->inner; p; p = next)
1274 next = p->next;
1275 free_scope_forest_1 (p);
1278 if (s->bbs)
1279 free (s->bbs);
1280 free (s);
1284 /* Free the storage associated with the scope forest. */
1286 static void
1287 free_scope_forest (forest)
1288 scope_forest_info *forest;
1290 int i;
1291 for (i = 0; i < forest->num_trees; i++)
1292 free_scope_forest_1 (forest->trees[i]);
1296 /* Visualize the scope forest. */
1298 void
1299 dump_scope_forest (forest)
1300 scope_forest_info *forest;
1302 if (forest->num_trees == 0)
1303 fprintf (stderr, "\n< Empty scope forest >\n");
1304 else
1306 int i;
1307 fprintf (stderr, "\n< Scope forest >\n");
1308 for (i = 0; i < forest->num_trees; i++)
1309 dump_scope_forest_1 (forest->trees[i], 0);
1314 /* Recursive portion of dump_scope_forest. */
1316 static void
1317 dump_scope_forest_1 (s, indent)
1318 scope s;
1319 int indent;
1321 scope p;
1322 int i;
1324 if (s->bb_beg != NULL && s->bb_beg == s->bb_end
1325 && RBI (s->bb_beg)->scope
1326 && RBI (s->bb_beg)->scope->level + 1 == s->level)
1328 fprintf (stderr, "%*s", indent, "");
1329 fprintf (stderr, "BB%d:\n", s->bb_beg->index);
1332 fprintf (stderr, "%*s", indent, "");
1333 fprintf (stderr, "{ level %d (block %p)\n", s->level,
1334 (PTR) NOTE_BLOCK (s->note_beg));
1336 fprintf (stderr, "%*s%s", indent, "", "bbs:");
1337 for (i = 0; i < s->num_bbs; i++)
1338 fprintf (stderr, " %d", s->bbs[i]->index);
1339 fprintf (stderr, "\n");
1341 for (p = s->inner; p; p = p->next)
1342 dump_scope_forest_1 (p, indent + 2);
1344 fprintf (stderr, "%*s", indent, "");
1345 fprintf (stderr, "}\n");
1349 /* Reorder basic blocks. The main entry point to this file. */
1351 void
1352 reorder_basic_blocks ()
1354 scope_forest_info forest;
1355 int i;
1357 if (n_basic_blocks <= 1)
1358 return;
1360 /* We do not currently handle correct re-placement of EH notes.
1361 But that does not matter unless we intend to use them. */
1362 if (flag_exceptions)
1363 for (i = 0; i < n_basic_blocks; i++)
1365 edge e;
1366 for (e = BASIC_BLOCK (i)->succ; e ; e = e->succ_next)
1367 if (e->flags & EDGE_EH)
1368 return;
1371 for (i = 0; i < n_basic_blocks; i++)
1372 BASIC_BLOCK (i)->aux = xcalloc (1, sizeof (struct reorder_block_def));
1374 EXIT_BLOCK_PTR->aux = xcalloc (1, sizeof (struct reorder_block_def));
1376 build_scope_forest (&forest);
1377 remove_scope_notes ();
1379 record_effective_endpoints ();
1380 make_reorder_chain ();
1381 fixup_reorder_chain ();
1383 #ifdef ENABLE_CHECKING
1384 verify_insn_chain ();
1385 #endif
1387 rebuild_scope_notes (&forest);
1388 free_scope_forest (&forest);
1389 reorder_blocks ();
1391 for (i = 0; i < n_basic_blocks; i++)
1392 free (BASIC_BLOCK (i)->aux);
1394 free (EXIT_BLOCK_PTR->aux);
1396 #ifdef ENABLE_CHECKING
1397 verify_flow_info ();
1398 #endif