2003-11-27 Guilhem Lavaux <guilhem@kaffe.org>
[official-gcc.git] / gcc / cfglayout.c
blob7c7600af9ae0f124e8bb86e16dac94d8a9b8f0da
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "insn-config.h"
30 #include "output.h"
31 #include "function.h"
32 #include "obstack.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
36 #include "ggc.h"
37 #include "alloc-pool.h"
39 /* The contents of the current function definition are allocated
40 in this obstack, and all are freed at the end of the function. */
41 extern struct obstack flow_obstack;
43 alloc_pool cfg_layout_pool;
45 /* Holds the interesting trailing notes for the function. */
46 rtx cfg_layout_function_footer, cfg_layout_function_header;
48 static rtx skip_insns_after_block (basic_block);
49 static void record_effective_endpoints (void);
50 static rtx label_for_bb (basic_block);
51 static void fixup_reorder_chain (void);
53 static void set_block_levels (tree, int);
54 static void change_scope (rtx, tree, tree);
56 void verify_insn_chain (void);
57 static void fixup_fallthru_exit_predecessor (void);
58 static rtx duplicate_insn_chain (rtx, rtx);
59 static void break_superblocks (void);
60 static tree insn_scope (rtx);
62 rtx
63 unlink_insn_chain (rtx first, rtx last)
65 rtx prevfirst = PREV_INSN (first);
66 rtx nextlast = NEXT_INSN (last);
68 PREV_INSN (first) = NULL;
69 NEXT_INSN (last) = NULL;
70 if (prevfirst)
71 NEXT_INSN (prevfirst) = nextlast;
72 if (nextlast)
73 PREV_INSN (nextlast) = prevfirst;
74 else
75 set_last_insn (prevfirst);
76 if (!prevfirst)
77 set_first_insn (nextlast);
78 return first;
81 /* Skip over inter-block insns occurring after BB which are typically
82 associated with BB (e.g., barriers). If there are any such insns,
83 we return the last one. Otherwise, we return the end of BB. */
85 static rtx
86 skip_insns_after_block (basic_block bb)
88 rtx insn, last_insn, next_head, prev;
90 next_head = NULL_RTX;
91 if (bb->next_bb != EXIT_BLOCK_PTR)
92 next_head = bb->next_bb->head;
94 for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)) != 0; )
96 if (insn == next_head)
97 break;
99 switch (GET_CODE (insn))
101 case BARRIER:
102 last_insn = insn;
103 continue;
105 case NOTE:
106 switch (NOTE_LINE_NUMBER (insn))
108 case NOTE_INSN_LOOP_END:
109 case NOTE_INSN_BLOCK_END:
110 last_insn = insn;
111 continue;
112 case NOTE_INSN_DELETED:
113 case NOTE_INSN_DELETED_LABEL:
114 continue;
116 default:
117 continue;
118 break;
120 break;
122 case CODE_LABEL:
123 if (NEXT_INSN (insn)
124 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
125 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
126 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
128 insn = NEXT_INSN (insn);
129 last_insn = insn;
130 continue;
132 break;
134 default:
135 break;
138 break;
141 /* It is possible to hit contradictory sequence. For instance:
143 jump_insn
144 NOTE_INSN_LOOP_BEG
145 barrier
147 Where barrier belongs to jump_insn, but the note does not. This can be
148 created by removing the basic block originally following
149 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
151 for (insn = last_insn; insn != bb->end; insn = prev)
153 prev = PREV_INSN (insn);
154 if (GET_CODE (insn) == NOTE)
155 switch (NOTE_LINE_NUMBER (insn))
157 case NOTE_INSN_LOOP_END:
158 case NOTE_INSN_BLOCK_END:
159 case NOTE_INSN_DELETED:
160 case NOTE_INSN_DELETED_LABEL:
161 continue;
162 default:
163 reorder_insns (insn, insn, last_insn);
167 return last_insn;
170 /* Locate or create a label for a given basic block. */
172 static rtx
173 label_for_bb (basic_block bb)
175 rtx label = bb->head;
177 if (GET_CODE (label) != CODE_LABEL)
179 if (rtl_dump_file)
180 fprintf (rtl_dump_file, "Emitting label for block %d\n", bb->index);
182 label = block_label (bb);
185 return label;
188 /* Locate the effective beginning and end of the insn chain for each
189 block, as defined by skip_insns_after_block above. */
191 static void
192 record_effective_endpoints (void)
194 rtx next_insn;
195 basic_block bb;
196 rtx insn;
198 for (insn = get_insns ();
199 insn
200 && GET_CODE (insn) == NOTE
201 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
202 insn = NEXT_INSN (insn))
203 continue;
204 if (!insn)
205 abort (); /* No basic blocks at all? */
206 if (PREV_INSN (insn))
207 cfg_layout_function_header =
208 unlink_insn_chain (get_insns (), PREV_INSN (insn));
209 else
210 cfg_layout_function_header = NULL_RTX;
212 next_insn = get_insns ();
213 FOR_EACH_BB (bb)
215 rtx end;
217 if (PREV_INSN (bb->head) && next_insn != bb->head)
218 bb->rbi->header = unlink_insn_chain (next_insn,
219 PREV_INSN (bb->head));
220 end = skip_insns_after_block (bb);
221 if (NEXT_INSN (bb->end) && bb->end != end)
222 bb->rbi->footer = unlink_insn_chain (NEXT_INSN (bb->end), end);
223 next_insn = NEXT_INSN (bb->end);
226 cfg_layout_function_footer = next_insn;
227 if (cfg_layout_function_footer)
228 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
231 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
232 numbers and files. In order to be GGC friendly we need to use separate
233 varrays. This also slightly improve the memory locality in binary search.
234 The _locs array contains locators where the given property change. The
235 block_locators_blocks contains the scope block that is used for all insn
236 locator greater than corresponding block_locators_locs value and smaller
237 than the following one. Similarly for the other properties. */
238 static GTY(()) varray_type block_locators_locs;
239 static GTY(()) varray_type block_locators_blocks;
240 static GTY(()) varray_type line_locators_locs;
241 static GTY(()) varray_type line_locators_lines;
242 static GTY(()) varray_type file_locators_locs;
243 static GTY(()) varray_type file_locators_files;
244 int prologue_locator;
245 int epilogue_locator;
247 /* During the RTL expansion the lexical blocks and line numbers are
248 represented via INSN_NOTEs. Replace them by representation using
249 INSN_LOCATORs. */
251 void
252 insn_locators_initialize (void)
254 tree block = NULL;
255 tree last_block = NULL;
256 rtx insn, next;
257 int loc = 0;
258 int line_number = 0, last_line_number = 0;
259 char *file_name = NULL, *last_file_name = NULL;
261 prologue_locator = epilogue_locator = 0;
263 VARRAY_INT_INIT (block_locators_locs, 32, "block_locators_locs");
264 VARRAY_TREE_INIT (block_locators_blocks, 32, "block_locators_blocks");
265 VARRAY_INT_INIT (line_locators_locs, 32, "line_locators_locs");
266 VARRAY_INT_INIT (line_locators_lines, 32, "line_locators_lines");
267 VARRAY_INT_INIT (file_locators_locs, 32, "file_locators_locs");
268 VARRAY_CHAR_PTR_INIT (file_locators_files, 32, "file_locators_files");
270 for (insn = get_insns (); insn; insn = next)
272 next = NEXT_INSN (insn);
274 if ((active_insn_p (insn)
275 && GET_CODE (PATTERN (insn)) != ADDR_VEC
276 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
277 || !NEXT_INSN (insn)
278 || (!prologue_locator && file_name))
280 if (last_block != block)
282 loc++;
283 VARRAY_PUSH_INT (block_locators_locs, loc);
284 VARRAY_PUSH_TREE (block_locators_blocks, block);
285 last_block = block;
287 if (last_line_number != line_number)
289 loc++;
290 VARRAY_PUSH_INT (line_locators_locs, loc);
291 VARRAY_PUSH_INT (line_locators_lines, line_number);
292 last_line_number = line_number;
294 if (last_file_name != file_name)
296 loc++;
297 VARRAY_PUSH_INT (file_locators_locs, loc);
298 VARRAY_PUSH_CHAR_PTR (file_locators_files, file_name);
299 last_file_name = file_name;
302 if (!prologue_locator && file_name)
303 prologue_locator = loc;
304 if (!NEXT_INSN (insn))
305 epilogue_locator = loc;
306 if (active_insn_p (insn))
307 INSN_LOCATOR (insn) = loc;
308 else if (GET_CODE (insn) == NOTE)
310 switch (NOTE_LINE_NUMBER (insn))
312 case NOTE_INSN_BLOCK_BEG:
313 block = NOTE_BLOCK (insn);
314 delete_insn (insn);
315 break;
316 case NOTE_INSN_BLOCK_END:
317 block = BLOCK_SUPERCONTEXT (block);
318 if (block && TREE_CODE (block) == FUNCTION_DECL)
319 block = 0;
320 delete_insn (insn);
321 break;
322 default:
323 if (NOTE_LINE_NUMBER (insn) > 0)
325 line_number = NOTE_LINE_NUMBER (insn);
326 file_name = (char *)NOTE_SOURCE_FILE (insn);
328 break;
333 /* Tag the blocks with a depth number so that change_scope can find
334 the common parent easily. */
335 set_block_levels (DECL_INITIAL (cfun->decl), 0);
338 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
339 found in the block tree. */
341 static void
342 set_block_levels (tree block, int level)
344 while (block)
346 BLOCK_NUMBER (block) = level;
347 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
348 block = BLOCK_CHAIN (block);
352 /* Return sope resulting from combination of S1 and S2. */
353 tree
354 choose_inner_scope (tree s1, tree s2)
356 if (!s1)
357 return s2;
358 if (!s2)
359 return s1;
360 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
361 return s1;
362 return s2;
365 /* Emit lexical block notes needed to change scope from S1 to S2. */
367 static void
368 change_scope (rtx orig_insn, tree s1, tree s2)
370 rtx insn = orig_insn;
371 tree com = NULL_TREE;
372 tree ts1 = s1, ts2 = s2;
373 tree s;
375 while (ts1 != ts2)
377 if (ts1 == NULL || ts2 == NULL)
378 abort ();
379 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
380 ts1 = BLOCK_SUPERCONTEXT (ts1);
381 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
382 ts2 = BLOCK_SUPERCONTEXT (ts2);
383 else
385 ts1 = BLOCK_SUPERCONTEXT (ts1);
386 ts2 = BLOCK_SUPERCONTEXT (ts2);
389 com = ts1;
391 /* Close scopes. */
392 s = s1;
393 while (s != com)
395 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
396 NOTE_BLOCK (note) = s;
397 s = BLOCK_SUPERCONTEXT (s);
400 /* Open scopes. */
401 s = s2;
402 while (s != com)
404 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
405 NOTE_BLOCK (insn) = s;
406 s = BLOCK_SUPERCONTEXT (s);
410 /* Return lexical scope block insn belong to. */
411 static tree
412 insn_scope (rtx insn)
414 int max = VARRAY_ACTIVE_SIZE (block_locators_locs);
415 int min = 0;
416 int loc = INSN_LOCATOR (insn);
418 /* When block_locators_locs was initialized, the pro- and epilogue
419 insns didn't exist yet and can therefore not be found this way.
420 But we know that they belong to the outer most block of the
421 current function.
422 Without this test, the prologue would be put inside the block of
423 the first valid instruction in the function and when that first
424 insn is part of an inlined function then the low_pc of that
425 inlined function is messed up. Likewise for the epilogue and
426 the last valid instruction. */
427 if (loc == prologue_locator || loc == epilogue_locator)
428 return DECL_INITIAL (cfun->decl);
430 if (!max || !loc)
431 return NULL;
432 while (1)
434 int pos = (min + max) / 2;
435 int tmp = VARRAY_INT (block_locators_locs, pos);
437 if (tmp <= loc && min != pos)
438 min = pos;
439 else if (tmp > loc && max != pos)
440 max = pos;
441 else
443 min = pos;
444 break;
447 return VARRAY_TREE (block_locators_blocks, min);
450 /* Return line number of the statement specified by the locator. */
452 locator_line (int loc)
454 int max = VARRAY_ACTIVE_SIZE (line_locators_locs);
455 int min = 0;
457 if (!max || !loc)
458 return 0;
459 while (1)
461 int pos = (min + max) / 2;
462 int tmp = VARRAY_INT (line_locators_locs, pos);
464 if (tmp <= loc && min != pos)
465 min = pos;
466 else if (tmp > loc && max != pos)
467 max = pos;
468 else
470 min = pos;
471 break;
474 return VARRAY_INT (line_locators_lines, min);
477 /* Return line number of the statement that produced this insn. */
479 insn_line (rtx insn)
481 return locator_line (INSN_LOCATOR (insn));
484 /* Return source file of the statement specified by LOC. */
485 const char *
486 locator_file (int loc)
488 int max = VARRAY_ACTIVE_SIZE (file_locators_locs);
489 int min = 0;
491 if (!max || !loc)
492 return NULL;
493 while (1)
495 int pos = (min + max) / 2;
496 int tmp = VARRAY_INT (file_locators_locs, pos);
498 if (tmp <= loc && min != pos)
499 min = pos;
500 else if (tmp > loc && max != pos)
501 max = pos;
502 else
504 min = pos;
505 break;
508 return VARRAY_CHAR_PTR (file_locators_files, min);
511 /* Return source file of the statement that produced this insn. */
512 const char *
513 insn_file (rtx insn)
515 return locator_file (INSN_LOCATOR (insn));
518 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
519 on the scope tree and the newly reordered instructions. */
521 void
522 reemit_insn_block_notes (void)
524 tree cur_block = DECL_INITIAL (cfun->decl);
525 rtx insn, note;
527 insn = get_insns ();
528 if (!active_insn_p (insn))
529 insn = next_active_insn (insn);
530 for (; insn; insn = next_active_insn (insn))
532 tree this_block;
534 this_block = insn_scope (insn);
535 /* For sequences compute scope resulting from merging all scopes
536 of instructions nested inside. */
537 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
539 int i;
540 rtx body = PATTERN (insn);
542 this_block = NULL;
543 for (i = 0; i < XVECLEN (body, 0); i++)
544 this_block = choose_inner_scope (this_block,
545 insn_scope (XVECEXP (body, 0, i)));
547 if (! this_block)
548 continue;
550 if (this_block != cur_block)
552 change_scope (insn, cur_block, this_block);
553 cur_block = this_block;
557 /* change_scope emits before the insn, not after. */
558 note = emit_note (NOTE_INSN_DELETED);
559 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
560 delete_insn (note);
562 reorder_blocks ();
565 /* Given a reorder chain, rearrange the code to match. */
567 static void
568 fixup_reorder_chain (void)
570 basic_block bb, prev_bb;
571 int index;
572 rtx insn = NULL;
574 if (cfg_layout_function_header)
576 set_first_insn (cfg_layout_function_header);
577 insn = cfg_layout_function_header;
578 while (NEXT_INSN (insn))
579 insn = NEXT_INSN (insn);
582 /* First do the bulk reordering -- rechain the blocks without regard to
583 the needed changes to jumps and labels. */
585 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
586 bb != 0;
587 bb = bb->rbi->next, index++)
589 if (bb->rbi->header)
591 if (insn)
592 NEXT_INSN (insn) = bb->rbi->header;
593 else
594 set_first_insn (bb->rbi->header);
595 PREV_INSN (bb->rbi->header) = insn;
596 insn = bb->rbi->header;
597 while (NEXT_INSN (insn))
598 insn = NEXT_INSN (insn);
600 if (insn)
601 NEXT_INSN (insn) = bb->head;
602 else
603 set_first_insn (bb->head);
604 PREV_INSN (bb->head) = insn;
605 insn = bb->end;
606 if (bb->rbi->footer)
608 NEXT_INSN (insn) = bb->rbi->footer;
609 PREV_INSN (bb->rbi->footer) = insn;
610 while (NEXT_INSN (insn))
611 insn = NEXT_INSN (insn);
615 if (index != n_basic_blocks)
616 abort ();
618 NEXT_INSN (insn) = cfg_layout_function_footer;
619 if (cfg_layout_function_footer)
620 PREV_INSN (cfg_layout_function_footer) = insn;
622 while (NEXT_INSN (insn))
623 insn = NEXT_INSN (insn);
625 set_last_insn (insn);
626 #ifdef ENABLE_CHECKING
627 verify_insn_chain ();
628 #endif
629 delete_dead_jumptables ();
631 /* Now add jumps and labels as needed to match the blocks new
632 outgoing edges. */
634 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = bb->rbi->next)
636 edge e_fall, e_taken, e;
637 rtx bb_end_insn;
638 basic_block nb;
640 if (bb->succ == NULL)
641 continue;
643 /* Find the old fallthru edge, and another non-EH edge for
644 a taken jump. */
645 e_taken = e_fall = NULL;
646 for (e = bb->succ; e ; e = e->succ_next)
647 if (e->flags & EDGE_FALLTHRU)
648 e_fall = e;
649 else if (! (e->flags & EDGE_EH))
650 e_taken = e;
652 bb_end_insn = bb->end;
653 if (GET_CODE (bb_end_insn) == JUMP_INSN)
655 if (any_condjump_p (bb_end_insn))
657 /* If the old fallthru is still next, nothing to do. */
658 if (bb->rbi->next == e_fall->dest
659 || (!bb->rbi->next
660 && e_fall->dest == EXIT_BLOCK_PTR))
661 continue;
663 /* The degenerated case of conditional jump jumping to the next
664 instruction can happen on target having jumps with side
665 effects.
667 Create temporarily the duplicated edge representing branch.
668 It will get unidentified by force_nonfallthru_and_redirect
669 that would otherwise get confused by fallthru edge not pointing
670 to the next basic block. */
671 if (!e_taken)
673 rtx note;
674 edge e_fake;
676 e_fake = unchecked_make_edge (bb, e_fall->dest, 0);
678 if (!redirect_jump (bb->end, block_label (bb), 0))
679 abort ();
680 note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX);
681 if (note)
683 int prob = INTVAL (XEXP (note, 0));
685 e_fake->probability = prob;
686 e_fake->count = e_fall->count * prob / REG_BR_PROB_BASE;
687 e_fall->probability -= e_fall->probability;
688 e_fall->count -= e_fake->count;
689 if (e_fall->probability < 0)
690 e_fall->probability = 0;
691 if (e_fall->count < 0)
692 e_fall->count = 0;
695 /* There is one special case: if *neither* block is next,
696 such as happens at the very end of a function, then we'll
697 need to add a new unconditional jump. Choose the taken
698 edge based on known or assumed probability. */
699 else if (bb->rbi->next != e_taken->dest)
701 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
703 if (note
704 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
705 && invert_jump (bb_end_insn,
706 label_for_bb (e_fall->dest), 0))
708 e_fall->flags &= ~EDGE_FALLTHRU;
709 e_taken->flags |= EDGE_FALLTHRU;
710 update_br_prob_note (bb);
711 e = e_fall, e_fall = e_taken, e_taken = e;
715 /* Otherwise we can try to invert the jump. This will
716 basically never fail, however, keep up the pretense. */
717 else if (invert_jump (bb_end_insn,
718 label_for_bb (e_fall->dest), 0))
720 e_fall->flags &= ~EDGE_FALLTHRU;
721 e_taken->flags |= EDGE_FALLTHRU;
722 update_br_prob_note (bb);
723 continue;
726 else if (returnjump_p (bb_end_insn))
727 continue;
728 else
730 /* Otherwise we have some switch or computed jump. In the
731 99% case, there should not have been a fallthru edge. */
732 if (! e_fall)
733 continue;
735 #ifdef CASE_DROPS_THROUGH
736 /* Except for VAX. Since we didn't have predication for the
737 tablejump, the fallthru block should not have moved. */
738 if (bb->rbi->next == e_fall->dest)
739 continue;
740 bb_end_insn = skip_insns_after_block (bb);
741 #else
742 abort ();
743 #endif
746 else
748 /* No fallthru implies a noreturn function with EH edges, or
749 something similarly bizarre. In any case, we don't need to
750 do anything. */
751 if (! e_fall)
752 continue;
754 /* If the fallthru block is still next, nothing to do. */
755 if (bb->rbi->next == e_fall->dest)
756 continue;
758 /* A fallthru to exit block. */
759 if (!bb->rbi->next && e_fall->dest == EXIT_BLOCK_PTR)
760 continue;
763 /* We got here if we need to add a new jump insn. */
764 nb = force_nonfallthru (e_fall);
765 if (nb)
767 cfg_layout_initialize_rbi (nb);
768 nb->rbi->visited = 1;
769 nb->rbi->next = bb->rbi->next;
770 bb->rbi->next = nb;
771 /* Don't process this new block. */
772 bb = nb;
776 /* Put basic_block_info in the new order. */
778 if (rtl_dump_file)
780 fprintf (rtl_dump_file, "Reordered sequence:\n");
781 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0; bb; bb = bb->rbi->next, index ++)
783 fprintf (rtl_dump_file, " %i ", index);
784 if (bb->rbi->original)
785 fprintf (rtl_dump_file, "duplicate of %i ",
786 bb->rbi->original->index);
787 else if (forwarder_block_p (bb) && GET_CODE (bb->head) != CODE_LABEL)
788 fprintf (rtl_dump_file, "compensation ");
789 else
790 fprintf (rtl_dump_file, "bb %i ", bb->index);
791 fprintf (rtl_dump_file, " [%i]\n", bb->frequency);
795 prev_bb = ENTRY_BLOCK_PTR;
796 bb = ENTRY_BLOCK_PTR->next_bb;
797 index = 0;
799 for (; bb; prev_bb = bb, bb = bb->rbi->next, index ++)
801 bb->index = index;
802 BASIC_BLOCK (index) = bb;
804 bb->prev_bb = prev_bb;
805 prev_bb->next_bb = bb;
807 prev_bb->next_bb = EXIT_BLOCK_PTR;
808 EXIT_BLOCK_PTR->prev_bb = prev_bb;
810 /* Annoying special case - jump around dead jumptables left in the code. */
811 FOR_EACH_BB (bb)
813 edge e;
814 for (e = bb->succ; e && !(e->flags & EDGE_FALLTHRU); e = e->succ_next)
815 continue;
816 if (e && !can_fallthru (e->src, e->dest))
817 force_nonfallthru (e);
821 /* Perform sanity checks on the insn chain.
822 1. Check that next/prev pointers are consistent in both the forward and
823 reverse direction.
824 2. Count insns in chain, going both directions, and check if equal.
825 3. Check that get_last_insn () returns the actual end of chain. */
827 void
828 verify_insn_chain (void)
830 rtx x, prevx, nextx;
831 int insn_cnt1, insn_cnt2;
833 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
834 x != 0;
835 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
836 if (PREV_INSN (x) != prevx)
837 abort ();
839 if (prevx != get_last_insn ())
840 abort ();
842 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
843 x != 0;
844 nextx = x, insn_cnt2++, x = PREV_INSN (x))
845 if (NEXT_INSN (x) != nextx)
846 abort ();
848 if (insn_cnt1 != insn_cnt2)
849 abort ();
852 /* The block falling through to exit must be the last one in the
853 reordered chain. Ensure that this condition is met. */
854 static void
855 fixup_fallthru_exit_predecessor (void)
857 edge e;
858 basic_block bb = NULL;
860 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
861 if (e->flags & EDGE_FALLTHRU)
862 bb = e->src;
864 if (bb && bb->rbi->next)
866 basic_block c = ENTRY_BLOCK_PTR->next_bb;
868 while (c->rbi->next != bb)
869 c = c->rbi->next;
871 c->rbi->next = bb->rbi->next;
872 while (c->rbi->next)
873 c = c->rbi->next;
875 c->rbi->next = bb;
876 bb->rbi->next = NULL;
880 /* Return true in case it is possible to duplicate the basic block BB. */
882 bool
883 cfg_layout_can_duplicate_bb_p (basic_block bb)
885 edge s;
887 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR)
888 return false;
890 /* Duplicating fallthru block to exit would require adding a jump
891 and splitting the real last BB. */
892 for (s = bb->succ; s; s = s->succ_next)
893 if (s->dest == EXIT_BLOCK_PTR && s->flags & EDGE_FALLTHRU)
894 return false;
896 /* Do not attempt to duplicate tablejumps, as we need to unshare
897 the dispatch table. This is difficult to do, as the instructions
898 computing jump destination may be hoisted outside the basic block. */
899 if (tablejump_p (bb->end, NULL, NULL))
900 return false;
902 /* Do not duplicate blocks containing insns that can't be copied. */
903 if (targetm.cannot_copy_insn_p)
905 rtx insn = bb->head;
906 while (1)
908 if (INSN_P (insn) && (*targetm.cannot_copy_insn_p) (insn))
909 return false;
910 if (insn == bb->end)
911 break;
912 insn = NEXT_INSN (insn);
916 return true;
919 static rtx
920 duplicate_insn_chain (rtx from, rtx to)
922 rtx insn, last;
924 /* Avoid updating of boundaries of previous basic block. The
925 note will get removed from insn stream in fixup. */
926 last = emit_note (NOTE_INSN_DELETED);
928 /* Create copy at the end of INSN chain. The chain will
929 be reordered later. */
930 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
932 switch (GET_CODE (insn))
934 case INSN:
935 case CALL_INSN:
936 case JUMP_INSN:
937 /* Avoid copying of dispatch tables. We never duplicate
938 tablejumps, so this can hit only in case the table got
939 moved far from original jump. */
940 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
941 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
942 break;
943 emit_copy_of_insn_after (insn, get_last_insn ());
944 break;
946 case CODE_LABEL:
947 break;
949 case BARRIER:
950 emit_barrier ();
951 break;
953 case NOTE:
954 switch (NOTE_LINE_NUMBER (insn))
956 /* In case prologue is empty and function contain label
957 in first BB, we may want to copy the block. */
958 case NOTE_INSN_PROLOGUE_END:
960 case NOTE_INSN_LOOP_VTOP:
961 case NOTE_INSN_LOOP_CONT:
962 case NOTE_INSN_LOOP_BEG:
963 case NOTE_INSN_LOOP_END:
964 /* Strip down the loop notes - we don't really want to keep
965 them consistent in loop copies. */
966 case NOTE_INSN_DELETED:
967 case NOTE_INSN_DELETED_LABEL:
968 /* No problem to strip these. */
969 case NOTE_INSN_EPILOGUE_BEG:
970 case NOTE_INSN_FUNCTION_END:
971 /* Debug code expect these notes to exist just once.
972 Keep them in the master copy.
973 ??? It probably makes more sense to duplicate them for each
974 epilogue copy. */
975 case NOTE_INSN_FUNCTION_BEG:
976 /* There is always just single entry to function. */
977 case NOTE_INSN_BASIC_BLOCK:
978 break;
980 /* There is no purpose to duplicate prologue. */
981 case NOTE_INSN_BLOCK_BEG:
982 case NOTE_INSN_BLOCK_END:
983 /* The BLOCK_BEG/BLOCK_END notes should be eliminated when BB
984 reordering is in the progress. */
985 case NOTE_INSN_EH_REGION_BEG:
986 case NOTE_INSN_EH_REGION_END:
987 /* Should never exist at BB duplication time. */
988 abort ();
989 break;
990 case NOTE_INSN_REPEATED_LINE_NUMBER:
991 emit_note_copy (insn);
992 break;
994 default:
995 if (NOTE_LINE_NUMBER (insn) < 0)
996 abort ();
997 /* It is possible that no_line_number is set and the note
998 won't be emitted. */
999 emit_note_copy (insn);
1001 break;
1002 default:
1003 abort ();
1006 insn = NEXT_INSN (last);
1007 delete_insn (last);
1008 return insn;
1010 /* Create a duplicate of the basic block BB and redirect edge E into it.
1011 If E is not specified, BB is just copied, but updating the frequencies
1012 etc. is left to the caller. */
1014 basic_block
1015 cfg_layout_duplicate_bb (basic_block bb, edge e)
1017 rtx insn;
1018 edge s, n;
1019 basic_block new_bb;
1020 gcov_type new_count = e ? e->count : 0;
1022 if (bb->count < new_count)
1023 new_count = bb->count;
1024 if (!bb->pred)
1025 abort ();
1026 #ifdef ENABLE_CHECKING
1027 if (!cfg_layout_can_duplicate_bb_p (bb))
1028 abort ();
1029 #endif
1031 insn = duplicate_insn_chain (bb->head, bb->end);
1032 new_bb = create_basic_block (insn,
1033 insn ? get_last_insn () : NULL,
1034 EXIT_BLOCK_PTR->prev_bb);
1036 if (bb->rbi->header)
1038 insn = bb->rbi->header;
1039 while (NEXT_INSN (insn))
1040 insn = NEXT_INSN (insn);
1041 insn = duplicate_insn_chain (bb->rbi->header, insn);
1042 if (insn)
1043 new_bb->rbi->header = unlink_insn_chain (insn, get_last_insn ());
1046 if (bb->rbi->footer)
1048 insn = bb->rbi->footer;
1049 while (NEXT_INSN (insn))
1050 insn = NEXT_INSN (insn);
1051 insn = duplicate_insn_chain (bb->rbi->footer, insn);
1052 if (insn)
1053 new_bb->rbi->footer = unlink_insn_chain (insn, get_last_insn ());
1056 if (bb->global_live_at_start)
1058 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1059 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1060 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
1061 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1064 new_bb->loop_depth = bb->loop_depth;
1065 new_bb->flags = bb->flags;
1066 for (s = bb->succ; s; s = s->succ_next)
1068 /* Since we are creating edges from a new block to successors
1069 of another block (which therefore are known to be disjoint), there
1070 is no need to actually check for duplicated edges. */
1071 n = unchecked_make_edge (new_bb, s->dest, s->flags);
1072 n->probability = s->probability;
1073 if (e && bb->count)
1075 /* Take care for overflows! */
1076 n->count = s->count * (new_count * 10000 / bb->count) / 10000;
1077 s->count -= n->count;
1079 else
1080 n->count = s->count;
1081 n->aux = s->aux;
1084 if (e)
1086 new_bb->count = new_count;
1087 bb->count -= new_count;
1089 new_bb->frequency = EDGE_FREQUENCY (e);
1090 bb->frequency -= EDGE_FREQUENCY (e);
1092 redirect_edge_and_branch_force (e, new_bb);
1094 if (bb->count < 0)
1095 bb->count = 0;
1096 if (bb->frequency < 0)
1097 bb->frequency = 0;
1099 else
1101 new_bb->count = bb->count;
1102 new_bb->frequency = bb->frequency;
1105 new_bb->rbi->original = bb;
1106 bb->rbi->copy = new_bb;
1108 return new_bb;
1111 void
1112 cfg_layout_initialize_rbi (basic_block bb)
1114 if (bb->rbi)
1115 abort ();
1116 bb->rbi = pool_alloc (cfg_layout_pool);
1117 memset (bb->rbi, 0, sizeof (struct reorder_block_def));
1120 /* Main entry point to this module - initialize the datastructures for
1121 CFG layout changes. It keeps LOOPS up-to-date if not null. */
1123 void
1124 cfg_layout_initialize (void)
1126 basic_block bb;
1128 /* Our algorithm depends on fact that there are now dead jumptables
1129 around the code. */
1130 cfg_layout_pool =
1131 create_alloc_pool ("cfg layout pool", sizeof (struct reorder_block_def),
1132 n_basic_blocks + 2);
1133 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1134 cfg_layout_initialize_rbi (bb);
1136 cfg_layout_rtl_register_cfg_hooks ();
1138 record_effective_endpoints ();
1140 cleanup_cfg (CLEANUP_CFGLAYOUT);
1143 /* Splits superblocks. */
1144 static void
1145 break_superblocks (void)
1147 sbitmap superblocks;
1148 int i, need;
1150 superblocks = sbitmap_alloc (n_basic_blocks);
1151 sbitmap_zero (superblocks);
1153 need = 0;
1155 for (i = 0; i < n_basic_blocks; i++)
1156 if (BASIC_BLOCK(i)->flags & BB_SUPERBLOCK)
1158 BASIC_BLOCK(i)->flags &= ~BB_SUPERBLOCK;
1159 SET_BIT (superblocks, i);
1160 need = 1;
1163 if (need)
1165 rebuild_jump_labels (get_insns ());
1166 find_many_sub_basic_blocks (superblocks);
1169 free (superblocks);
1172 /* Finalize the changes: reorder insn list according to the sequence, enter
1173 compensation code, rebuild scope forest. */
1175 void
1176 cfg_layout_finalize (void)
1178 basic_block bb;
1180 #ifdef ENABLE_CHECKING
1181 verify_flow_info ();
1182 #endif
1183 rtl_register_cfg_hooks ();
1184 fixup_fallthru_exit_predecessor ();
1185 fixup_reorder_chain ();
1187 #ifdef ENABLE_CHECKING
1188 verify_insn_chain ();
1189 #endif
1191 free_alloc_pool (cfg_layout_pool);
1192 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1193 bb->rbi = NULL;
1195 break_superblocks ();
1197 #ifdef ENABLE_CHECKING
1198 verify_flow_info ();
1199 #endif
1202 /* Checks whether all N blocks in BBS array can be copied. */
1203 bool
1204 can_copy_bbs_p (basic_block *bbs, unsigned n)
1206 unsigned i;
1207 edge e;
1208 int ret = true;
1210 for (i = 0; i < n; i++)
1211 bbs[i]->rbi->duplicated = 1;
1213 for (i = 0; i < n; i++)
1215 /* In case we should redirect abnormal edge during duplication, fail. */
1216 for (e = bbs[i]->succ; e; e = e->succ_next)
1217 if ((e->flags & EDGE_ABNORMAL)
1218 && e->dest->rbi->duplicated)
1220 ret = false;
1221 goto end;
1224 if (!cfg_layout_can_duplicate_bb_p (bbs[i]))
1226 ret = false;
1227 break;
1231 end:
1232 for (i = 0; i < n; i++)
1233 bbs[i]->rbi->duplicated = 0;
1235 return ret;
1238 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1239 are placed into array NEW_BBS in the same order. Edges from basic blocks
1240 in BBS are also duplicated and copies of those of them
1241 that lead into BBS are redirected to appropriate newly created block. The
1242 function assigns bbs into loops (copy of basic block bb is assigned to
1243 bb->loop_father->copy loop, so this must be set up correctly in advance)
1244 and updates dominators locally (LOOPS structure that contains the information
1245 about dominators is passed to enable this).
1247 BASE is the superloop to that basic block belongs; if its header or latch
1248 is copied, we do not set the new blocks as header or latch.
1250 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1251 also in the same order. */
1253 void
1254 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1255 edge *edges, unsigned n_edges, edge *new_edges,
1256 struct loop *base, struct loops *loops)
1258 unsigned i, j;
1259 basic_block bb, new_bb, dom_bb;
1260 edge e;
1262 /* Duplicate bbs, update dominators, assign bbs to loops. */
1263 for (i = 0; i < n; i++)
1265 /* Duplicate. */
1266 bb = bbs[i];
1267 new_bb = new_bbs[i] = cfg_layout_duplicate_bb (bb, NULL);
1268 bb->rbi->duplicated = 1;
1269 /* Add to loop. */
1270 add_bb_to_loop (new_bb, bb->loop_father->copy);
1271 add_to_dominance_info (loops->cfg.dom, new_bb);
1272 /* Possibly set header. */
1273 if (bb->loop_father->header == bb && bb->loop_father != base)
1274 new_bb->loop_father->header = new_bb;
1275 /* Or latch. */
1276 if (bb->loop_father->latch == bb && bb->loop_father != base)
1277 new_bb->loop_father->latch = new_bb;
1280 /* Set dominators. */
1281 for (i = 0; i < n; i++)
1283 bb = bbs[i];
1284 new_bb = new_bbs[i];
1286 dom_bb = get_immediate_dominator (loops->cfg.dom, bb);
1287 if (dom_bb->rbi->duplicated)
1289 dom_bb = dom_bb->rbi->copy;
1290 set_immediate_dominator (loops->cfg.dom, new_bb, dom_bb);
1294 /* Redirect edges. */
1295 for (j = 0; j < n_edges; j++)
1296 new_edges[j] = NULL;
1297 for (i = 0; i < n; i++)
1299 new_bb = new_bbs[i];
1300 bb = bbs[i];
1302 for (e = new_bb->succ; e; e = e->succ_next)
1304 for (j = 0; j < n_edges; j++)
1305 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1306 new_edges[j] = e;
1308 if (!e->dest->rbi->duplicated)
1309 continue;
1310 redirect_edge_and_branch_force (e, e->dest->rbi->copy);
1314 /* Clear information about duplicates. */
1315 for (i = 0; i < n; i++)
1316 bbs[i]->rbi->duplicated = 0;
1319 #include "gt-cfglayout.h"