Merge reload-branch up to revision 101000
[official-gcc.git] / gcc / cfglayout.c
blob7cf0dcf27431cbb51423cf0cf5659f20388989b0
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "obstack.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "output.h"
32 #include "function.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
36 #include "ggc.h"
37 #include "alloc-pool.h"
38 #include "flags.h"
40 /* Holds the interesting trailing notes for the function. */
41 rtx cfg_layout_function_footer, cfg_layout_function_header;
43 static rtx skip_insns_after_block (basic_block);
44 static void record_effective_endpoints (void);
45 static rtx label_for_bb (basic_block);
46 static void fixup_reorder_chain (void);
48 static void set_block_levels (tree, int);
49 static void change_scope (rtx, tree, tree);
51 void verify_insn_chain (void);
52 static void fixup_fallthru_exit_predecessor (void);
53 static tree insn_scope (rtx);
55 rtx
56 unlink_insn_chain (rtx first, rtx last)
58 rtx prevfirst = PREV_INSN (first);
59 rtx nextlast = NEXT_INSN (last);
61 PREV_INSN (first) = NULL;
62 NEXT_INSN (last) = NULL;
63 if (prevfirst)
64 NEXT_INSN (prevfirst) = nextlast;
65 if (nextlast)
66 PREV_INSN (nextlast) = prevfirst;
67 else
68 set_last_insn (prevfirst);
69 if (!prevfirst)
70 set_first_insn (nextlast);
71 return first;
74 /* Skip over inter-block insns occurring after BB which are typically
75 associated with BB (e.g., barriers). If there are any such insns,
76 we return the last one. Otherwise, we return the end of BB. */
78 static rtx
79 skip_insns_after_block (basic_block bb)
81 rtx insn, last_insn, next_head, prev;
83 next_head = NULL_RTX;
84 if (bb->next_bb != EXIT_BLOCK_PTR)
85 next_head = BB_HEAD (bb->next_bb);
87 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
89 if (insn == next_head)
90 break;
92 switch (GET_CODE (insn))
94 case BARRIER:
95 last_insn = insn;
96 continue;
98 case NOTE:
99 switch (NOTE_LINE_NUMBER (insn))
101 case NOTE_INSN_LOOP_END:
102 case NOTE_INSN_BLOCK_END:
103 last_insn = insn;
104 continue;
105 case NOTE_INSN_DELETED:
106 case NOTE_INSN_DELETED_LABEL:
107 continue;
109 default:
110 continue;
111 break;
113 break;
115 case CODE_LABEL:
116 if (NEXT_INSN (insn)
117 && JUMP_P (NEXT_INSN (insn))
118 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
119 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
121 insn = NEXT_INSN (insn);
122 last_insn = insn;
123 continue;
125 break;
127 default:
128 break;
131 break;
134 /* It is possible to hit contradictory sequence. For instance:
136 jump_insn
137 NOTE_INSN_LOOP_BEG
138 barrier
140 Where barrier belongs to jump_insn, but the note does not. This can be
141 created by removing the basic block originally following
142 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
144 for (insn = last_insn; insn != BB_END (bb); insn = prev)
146 prev = PREV_INSN (insn);
147 if (NOTE_P (insn))
148 switch (NOTE_LINE_NUMBER (insn))
150 case NOTE_INSN_LOOP_END:
151 case NOTE_INSN_BLOCK_END:
152 case NOTE_INSN_DELETED:
153 case NOTE_INSN_DELETED_LABEL:
154 continue;
155 default:
156 reorder_insns (insn, insn, last_insn);
160 return last_insn;
163 /* Locate or create a label for a given basic block. */
165 static rtx
166 label_for_bb (basic_block bb)
168 rtx label = BB_HEAD (bb);
170 if (!LABEL_P (label))
172 if (dump_file)
173 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
175 label = block_label (bb);
178 return label;
181 /* Locate the effective beginning and end of the insn chain for each
182 block, as defined by skip_insns_after_block above. */
184 static void
185 record_effective_endpoints (void)
187 rtx next_insn;
188 basic_block bb;
189 rtx insn;
191 for (insn = get_insns ();
192 insn
193 && NOTE_P (insn)
194 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
195 insn = NEXT_INSN (insn))
196 continue;
197 /* No basic blocks at all? */
198 gcc_assert (insn);
200 if (PREV_INSN (insn))
201 cfg_layout_function_header =
202 unlink_insn_chain (get_insns (), PREV_INSN (insn));
203 else
204 cfg_layout_function_header = NULL_RTX;
206 next_insn = get_insns ();
207 FOR_EACH_BB (bb)
209 rtx end;
211 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
212 bb->rbi->header = unlink_insn_chain (next_insn,
213 PREV_INSN (BB_HEAD (bb)));
214 end = skip_insns_after_block (bb);
215 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
216 bb->rbi->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
217 next_insn = NEXT_INSN (BB_END (bb));
220 cfg_layout_function_footer = next_insn;
221 if (cfg_layout_function_footer)
222 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
225 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
226 numbers and files. In order to be GGC friendly we need to use separate
227 varrays. This also slightly improve the memory locality in binary search.
228 The _locs array contains locators where the given property change. The
229 block_locators_blocks contains the scope block that is used for all insn
230 locator greater than corresponding block_locators_locs value and smaller
231 than the following one. Similarly for the other properties. */
232 static GTY(()) varray_type block_locators_locs;
233 static GTY(()) VEC(tree,gc) *block_locators_blocks;
234 static GTY(()) varray_type line_locators_locs;
235 static GTY(()) varray_type line_locators_lines;
236 static GTY(()) varray_type file_locators_locs;
237 static GTY(()) varray_type file_locators_files;
238 int prologue_locator;
239 int epilogue_locator;
241 /* During the RTL expansion the lexical blocks and line numbers are
242 represented via INSN_NOTEs. Replace them by representation using
243 INSN_LOCATORs. */
245 void
246 insn_locators_initialize (void)
248 tree block = NULL;
249 tree last_block = NULL;
250 rtx insn, next;
251 int loc = 0;
252 int line_number = 0, last_line_number = 0;
253 const char *file_name = NULL, *last_file_name = NULL;
255 prologue_locator = epilogue_locator = 0;
257 VARRAY_INT_INIT (block_locators_locs, 32, "block_locators_locs");
258 block_locators_blocks = VEC_alloc (tree, gc, 32);
259 VARRAY_INT_INIT (line_locators_locs, 32, "line_locators_locs");
260 VARRAY_INT_INIT (line_locators_lines, 32, "line_locators_lines");
261 VARRAY_INT_INIT (file_locators_locs, 32, "file_locators_locs");
262 VARRAY_CHAR_PTR_INIT (file_locators_files, 32, "file_locators_files");
264 for (insn = get_insns (); insn; insn = next)
266 int active = 0;
268 next = NEXT_INSN (insn);
270 if (NOTE_P (insn))
272 gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_BEG
273 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END);
274 if (NOTE_LINE_NUMBER (insn) > 0)
276 expanded_location xloc;
277 NOTE_EXPANDED_LOCATION (xloc, insn);
278 line_number = xloc.line;
279 file_name = xloc.file;
282 else
283 active = (active_insn_p (insn)
284 && GET_CODE (PATTERN (insn)) != ADDR_VEC
285 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
287 check_block_change (insn, &block);
289 if (active
290 || !next
291 || (!prologue_locator && file_name))
293 if (last_block != block)
295 loc++;
296 VARRAY_PUSH_INT (block_locators_locs, loc);
297 VEC_safe_push (tree, gc, block_locators_blocks, block);
298 last_block = block;
300 if (last_line_number != line_number)
302 loc++;
303 VARRAY_PUSH_INT (line_locators_locs, loc);
304 VARRAY_PUSH_INT (line_locators_lines, line_number);
305 last_line_number = line_number;
307 if (last_file_name != file_name)
309 loc++;
310 VARRAY_PUSH_INT (file_locators_locs, loc);
311 VARRAY_PUSH_CHAR_PTR (file_locators_files, (char *) file_name);
312 last_file_name = file_name;
314 if (!prologue_locator && file_name)
315 prologue_locator = loc;
316 if (!next)
317 epilogue_locator = loc;
318 if (active)
319 INSN_LOCATOR (insn) = loc;
323 /* Tag the blocks with a depth number so that change_scope can find
324 the common parent easily. */
325 set_block_levels (DECL_INITIAL (cfun->decl), 0);
327 free_block_changes ();
330 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
331 found in the block tree. */
333 static void
334 set_block_levels (tree block, int level)
336 while (block)
338 BLOCK_NUMBER (block) = level;
339 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
340 block = BLOCK_CHAIN (block);
344 /* Return sope resulting from combination of S1 and S2. */
345 static tree
346 choose_inner_scope (tree s1, tree s2)
348 if (!s1)
349 return s2;
350 if (!s2)
351 return s1;
352 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
353 return s1;
354 return s2;
357 /* Emit lexical block notes needed to change scope from S1 to S2. */
359 static void
360 change_scope (rtx orig_insn, tree s1, tree s2)
362 rtx insn = orig_insn;
363 tree com = NULL_TREE;
364 tree ts1 = s1, ts2 = s2;
365 tree s;
367 while (ts1 != ts2)
369 gcc_assert (ts1 && ts2);
370 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
371 ts1 = BLOCK_SUPERCONTEXT (ts1);
372 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
373 ts2 = BLOCK_SUPERCONTEXT (ts2);
374 else
376 ts1 = BLOCK_SUPERCONTEXT (ts1);
377 ts2 = BLOCK_SUPERCONTEXT (ts2);
380 com = ts1;
382 /* Close scopes. */
383 s = s1;
384 while (s != com)
386 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
387 NOTE_BLOCK (note) = s;
388 s = BLOCK_SUPERCONTEXT (s);
391 /* Open scopes. */
392 s = s2;
393 while (s != com)
395 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
396 NOTE_BLOCK (insn) = s;
397 s = BLOCK_SUPERCONTEXT (s);
401 /* Return lexical scope block insn belong to. */
402 static tree
403 insn_scope (rtx insn)
405 int max = VARRAY_ACTIVE_SIZE (block_locators_locs);
406 int min = 0;
407 int loc = INSN_LOCATOR (insn);
409 /* When block_locators_locs was initialized, the pro- and epilogue
410 insns didn't exist yet and can therefore not be found this way.
411 But we know that they belong to the outer most block of the
412 current function.
413 Without this test, the prologue would be put inside the block of
414 the first valid instruction in the function and when that first
415 insn is part of an inlined function then the low_pc of that
416 inlined function is messed up. Likewise for the epilogue and
417 the last valid instruction. */
418 if (loc == prologue_locator || loc == epilogue_locator)
419 return DECL_INITIAL (cfun->decl);
421 if (!max || !loc)
422 return NULL;
423 while (1)
425 int pos = (min + max) / 2;
426 int tmp = VARRAY_INT (block_locators_locs, pos);
428 if (tmp <= loc && min != pos)
429 min = pos;
430 else if (tmp > loc && max != pos)
431 max = pos;
432 else
434 min = pos;
435 break;
438 return VEC_index (tree, block_locators_blocks, min);
441 /* Return line number of the statement specified by the locator. */
443 locator_line (int loc)
445 int max = VARRAY_ACTIVE_SIZE (line_locators_locs);
446 int min = 0;
448 if (!max || !loc)
449 return 0;
450 while (1)
452 int pos = (min + max) / 2;
453 int tmp = VARRAY_INT (line_locators_locs, pos);
455 if (tmp <= loc && min != pos)
456 min = pos;
457 else if (tmp > loc && max != pos)
458 max = pos;
459 else
461 min = pos;
462 break;
465 return VARRAY_INT (line_locators_lines, min);
468 /* Return line number of the statement that produced this insn. */
470 insn_line (rtx insn)
472 return locator_line (INSN_LOCATOR (insn));
475 /* Return source file of the statement specified by LOC. */
476 const char *
477 locator_file (int loc)
479 int max = VARRAY_ACTIVE_SIZE (file_locators_locs);
480 int min = 0;
482 if (!max || !loc)
483 return NULL;
484 while (1)
486 int pos = (min + max) / 2;
487 int tmp = VARRAY_INT (file_locators_locs, pos);
489 if (tmp <= loc && min != pos)
490 min = pos;
491 else if (tmp > loc && max != pos)
492 max = pos;
493 else
495 min = pos;
496 break;
499 return VARRAY_CHAR_PTR (file_locators_files, min);
502 /* Return source file of the statement that produced this insn. */
503 const char *
504 insn_file (rtx insn)
506 return locator_file (INSN_LOCATOR (insn));
509 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
510 on the scope tree and the newly reordered instructions. */
512 void
513 reemit_insn_block_notes (void)
515 tree cur_block = DECL_INITIAL (cfun->decl);
516 rtx insn, note;
518 insn = get_insns ();
519 if (!active_insn_p (insn))
520 insn = next_active_insn (insn);
521 for (; insn; insn = next_active_insn (insn))
523 tree this_block;
525 /* Avoid putting scope notes between jump table and its label. */
526 if (JUMP_P (insn)
527 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
528 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
529 continue;
531 this_block = insn_scope (insn);
532 /* For sequences compute scope resulting from merging all scopes
533 of instructions nested inside. */
534 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
536 int i;
537 rtx body = PATTERN (insn);
539 this_block = NULL;
540 for (i = 0; i < XVECLEN (body, 0); i++)
541 this_block = choose_inner_scope (this_block,
542 insn_scope (XVECEXP (body, 0, i)));
544 if (! this_block)
545 continue;
547 if (this_block != cur_block)
549 change_scope (insn, cur_block, this_block);
550 cur_block = this_block;
554 /* change_scope emits before the insn, not after. */
555 note = emit_note (NOTE_INSN_DELETED);
556 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
557 delete_insn (note);
559 reorder_blocks ();
562 /* Given a reorder chain, rearrange the code to match. */
564 static void
565 fixup_reorder_chain (void)
567 basic_block bb, prev_bb;
568 int index;
569 rtx insn = NULL;
571 if (cfg_layout_function_header)
573 set_first_insn (cfg_layout_function_header);
574 insn = cfg_layout_function_header;
575 while (NEXT_INSN (insn))
576 insn = NEXT_INSN (insn);
579 /* First do the bulk reordering -- rechain the blocks without regard to
580 the needed changes to jumps and labels. */
582 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
583 bb != 0;
584 bb = bb->rbi->next, index++)
586 if (bb->rbi->header)
588 if (insn)
589 NEXT_INSN (insn) = bb->rbi->header;
590 else
591 set_first_insn (bb->rbi->header);
592 PREV_INSN (bb->rbi->header) = insn;
593 insn = bb->rbi->header;
594 while (NEXT_INSN (insn))
595 insn = NEXT_INSN (insn);
597 if (insn)
598 NEXT_INSN (insn) = BB_HEAD (bb);
599 else
600 set_first_insn (BB_HEAD (bb));
601 PREV_INSN (BB_HEAD (bb)) = insn;
602 insn = BB_END (bb);
603 if (bb->rbi->footer)
605 NEXT_INSN (insn) = bb->rbi->footer;
606 PREV_INSN (bb->rbi->footer) = insn;
607 while (NEXT_INSN (insn))
608 insn = NEXT_INSN (insn);
612 gcc_assert (index == n_basic_blocks);
614 NEXT_INSN (insn) = cfg_layout_function_footer;
615 if (cfg_layout_function_footer)
616 PREV_INSN (cfg_layout_function_footer) = insn;
618 while (NEXT_INSN (insn))
619 insn = NEXT_INSN (insn);
621 set_last_insn (insn);
622 #ifdef ENABLE_CHECKING
623 verify_insn_chain ();
624 #endif
625 delete_dead_jumptables ();
627 /* Now add jumps and labels as needed to match the blocks new
628 outgoing edges. */
630 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = bb->rbi->next)
632 edge e_fall, e_taken, e;
633 rtx bb_end_insn;
634 basic_block nb;
635 edge_iterator ei;
637 if (EDGE_COUNT (bb->succs) == 0)
638 continue;
640 /* Find the old fallthru edge, and another non-EH edge for
641 a taken jump. */
642 e_taken = e_fall = NULL;
644 FOR_EACH_EDGE (e, ei, bb->succs)
645 if (e->flags & EDGE_FALLTHRU)
646 e_fall = e;
647 else if (! (e->flags & EDGE_EH))
648 e_taken = e;
650 bb_end_insn = BB_END (bb);
651 if (JUMP_P (bb_end_insn))
653 if (any_condjump_p (bb_end_insn))
655 /* If the old fallthru is still next, nothing to do. */
656 if (bb->rbi->next == e_fall->dest
657 || e_fall->dest == EXIT_BLOCK_PTR)
658 continue;
660 /* The degenerated case of conditional jump jumping to the next
661 instruction can happen on target having jumps with side
662 effects.
664 Create temporarily the duplicated edge representing branch.
665 It will get unidentified by force_nonfallthru_and_redirect
666 that would otherwise get confused by fallthru edge not pointing
667 to the next basic block. */
668 if (!e_taken)
670 rtx note;
671 edge e_fake;
672 bool redirected;
674 e_fake = unchecked_make_edge (bb, e_fall->dest, 0);
676 redirected = redirect_jump (BB_END (bb),
677 block_label (bb), 0);
678 gcc_assert (redirected);
680 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
681 if (note)
683 int prob = INTVAL (XEXP (note, 0));
685 e_fake->probability = prob;
686 e_fake->count = e_fall->count * prob / REG_BR_PROB_BASE;
687 e_fall->probability -= e_fall->probability;
688 e_fall->count -= e_fake->count;
689 if (e_fall->probability < 0)
690 e_fall->probability = 0;
691 if (e_fall->count < 0)
692 e_fall->count = 0;
695 /* There is one special case: if *neither* block is next,
696 such as happens at the very end of a function, then we'll
697 need to add a new unconditional jump. Choose the taken
698 edge based on known or assumed probability. */
699 else if (bb->rbi->next != e_taken->dest)
701 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
703 if (note
704 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
705 && invert_jump (bb_end_insn,
706 (e_fall->dest == EXIT_BLOCK_PTR
707 ? NULL_RTX
708 : label_for_bb (e_fall->dest)), 0))
710 e_fall->flags &= ~EDGE_FALLTHRU;
711 #ifdef ENABLE_CHECKING
712 gcc_assert (could_fall_through
713 (e_taken->src, e_taken->dest));
714 #endif
715 e_taken->flags |= EDGE_FALLTHRU;
716 update_br_prob_note (bb);
717 e = e_fall, e_fall = e_taken, e_taken = e;
721 /* If the "jumping" edge is a crossing edge, and the fall
722 through edge is non-crossing, leave things as they are. */
723 else if ((e_taken->flags & EDGE_CROSSING)
724 && !(e_fall->flags & EDGE_CROSSING))
725 continue;
727 /* Otherwise we can try to invert the jump. This will
728 basically never fail, however, keep up the pretense. */
729 else if (invert_jump (bb_end_insn,
730 (e_fall->dest == EXIT_BLOCK_PTR
731 ? NULL_RTX
732 : label_for_bb (e_fall->dest)), 0))
734 e_fall->flags &= ~EDGE_FALLTHRU;
735 #ifdef ENABLE_CHECKING
736 gcc_assert (could_fall_through
737 (e_taken->src, e_taken->dest));
738 #endif
739 e_taken->flags |= EDGE_FALLTHRU;
740 update_br_prob_note (bb);
741 continue;
744 else
746 /* Otherwise we have some return, switch or computed
747 jump. In the 99% case, there should not have been a
748 fallthru edge. */
749 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
750 continue;
753 else
755 /* No fallthru implies a noreturn function with EH edges, or
756 something similarly bizarre. In any case, we don't need to
757 do anything. */
758 if (! e_fall)
759 continue;
761 /* If the fallthru block is still next, nothing to do. */
762 if (bb->rbi->next == e_fall->dest)
763 continue;
765 /* A fallthru to exit block. */
766 if (e_fall->dest == EXIT_BLOCK_PTR)
767 continue;
770 /* We got here if we need to add a new jump insn. */
771 nb = force_nonfallthru (e_fall);
772 if (nb)
774 initialize_bb_rbi (nb);
775 nb->rbi->visited = 1;
776 nb->rbi->next = bb->rbi->next;
777 bb->rbi->next = nb;
778 /* Don't process this new block. */
779 bb = nb;
781 /* Make sure new bb is tagged for correct section (same as
782 fall-thru source, since you cannot fall-throu across
783 section boundaries). */
784 BB_COPY_PARTITION (e_fall->src, single_pred (bb));
785 if (flag_reorder_blocks_and_partition
786 && targetm.have_named_sections
787 && JUMP_P (BB_END (bb))
788 && !any_condjump_p (BB_END (bb))
789 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
790 REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
791 (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
795 /* Put basic_block_info in the new order. */
797 if (dump_file)
799 fprintf (dump_file, "Reordered sequence:\n");
800 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
802 bb = bb->rbi->next, index++)
804 fprintf (dump_file, " %i ", index);
805 if (get_bb_original (bb))
806 fprintf (dump_file, "duplicate of %i ",
807 get_bb_original (bb)->index);
808 else if (forwarder_block_p (bb)
809 && !LABEL_P (BB_HEAD (bb)))
810 fprintf (dump_file, "compensation ");
811 else
812 fprintf (dump_file, "bb %i ", bb->index);
813 fprintf (dump_file, " [%i]\n", bb->frequency);
817 prev_bb = ENTRY_BLOCK_PTR;
818 bb = ENTRY_BLOCK_PTR->next_bb;
819 index = 0;
821 for (; bb; prev_bb = bb, bb = bb->rbi->next, index ++)
823 bb->index = index;
824 BASIC_BLOCK (index) = bb;
826 bb->prev_bb = prev_bb;
827 prev_bb->next_bb = bb;
829 prev_bb->next_bb = EXIT_BLOCK_PTR;
830 EXIT_BLOCK_PTR->prev_bb = prev_bb;
832 /* Annoying special case - jump around dead jumptables left in the code. */
833 FOR_EACH_BB (bb)
835 edge e;
836 edge_iterator ei;
838 FOR_EACH_EDGE (e, ei, bb->succs)
839 if (e->flags & EDGE_FALLTHRU)
840 break;
842 if (e && !can_fallthru (e->src, e->dest))
843 force_nonfallthru (e);
847 /* Perform sanity checks on the insn chain.
848 1. Check that next/prev pointers are consistent in both the forward and
849 reverse direction.
850 2. Count insns in chain, going both directions, and check if equal.
851 3. Check that get_last_insn () returns the actual end of chain. */
853 void
854 verify_insn_chain (void)
856 rtx x, prevx, nextx;
857 int insn_cnt1, insn_cnt2;
859 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
860 x != 0;
861 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
862 gcc_assert (PREV_INSN (x) == prevx);
864 gcc_assert (prevx == get_last_insn ());
866 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
867 x != 0;
868 nextx = x, insn_cnt2++, x = PREV_INSN (x))
869 gcc_assert (NEXT_INSN (x) == nextx);
871 gcc_assert (insn_cnt1 == insn_cnt2);
874 /* If we have assembler epilogues, the block falling through to exit must
875 be the last one in the reordered chain when we reach final. Ensure
876 that this condition is met. */
877 static void
878 fixup_fallthru_exit_predecessor (void)
880 edge e;
881 edge_iterator ei;
882 basic_block bb = NULL;
884 /* This transformation is not valid before reload, because we might
885 separate a call from the instruction that copies the return
886 value. */
887 gcc_assert (reload_completed);
889 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
890 if (e->flags & EDGE_FALLTHRU)
891 bb = e->src;
893 if (bb && bb->rbi->next)
895 basic_block c = ENTRY_BLOCK_PTR->next_bb;
897 /* If the very first block is the one with the fall-through exit
898 edge, we have to split that block. */
899 if (c == bb)
901 bb = split_block (bb, NULL)->dest;
902 initialize_bb_rbi (bb);
903 bb->rbi->next = c->rbi->next;
904 c->rbi->next = bb;
905 bb->rbi->footer = c->rbi->footer;
906 c->rbi->footer = NULL;
909 while (c->rbi->next != bb)
910 c = c->rbi->next;
912 c->rbi->next = bb->rbi->next;
913 while (c->rbi->next)
914 c = c->rbi->next;
916 c->rbi->next = bb;
917 bb->rbi->next = NULL;
921 /* Return true in case it is possible to duplicate the basic block BB. */
923 /* We do not want to declare the function in a header file, since it should
924 only be used through the cfghooks interface, and we do not want to move
925 it to cfgrtl.c since it would require also moving quite a lot of related
926 code. */
927 extern bool cfg_layout_can_duplicate_bb_p (basic_block);
929 bool
930 cfg_layout_can_duplicate_bb_p (basic_block bb)
932 /* Do not attempt to duplicate tablejumps, as we need to unshare
933 the dispatch table. This is difficult to do, as the instructions
934 computing jump destination may be hoisted outside the basic block. */
935 if (tablejump_p (BB_END (bb), NULL, NULL))
936 return false;
938 /* Do not duplicate blocks containing insns that can't be copied. */
939 if (targetm.cannot_copy_insn_p)
941 rtx insn = BB_HEAD (bb);
942 while (1)
944 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
945 return false;
946 if (insn == BB_END (bb))
947 break;
948 insn = NEXT_INSN (insn);
952 return true;
956 duplicate_insn_chain (rtx from, rtx to)
958 rtx insn, last;
960 /* Avoid updating of boundaries of previous basic block. The
961 note will get removed from insn stream in fixup. */
962 last = emit_note (NOTE_INSN_DELETED);
964 /* Create copy at the end of INSN chain. The chain will
965 be reordered later. */
966 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
968 switch (GET_CODE (insn))
970 case INSN:
971 case CALL_INSN:
972 case JUMP_INSN:
973 /* Avoid copying of dispatch tables. We never duplicate
974 tablejumps, so this can hit only in case the table got
975 moved far from original jump. */
976 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
977 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
978 break;
979 emit_copy_of_insn_after (insn, get_last_insn ());
980 break;
982 case CODE_LABEL:
983 break;
985 case BARRIER:
986 emit_barrier ();
987 break;
989 case NOTE:
990 switch (NOTE_LINE_NUMBER (insn))
992 /* In case prologue is empty and function contain label
993 in first BB, we may want to copy the block. */
994 case NOTE_INSN_PROLOGUE_END:
996 case NOTE_INSN_LOOP_BEG:
997 case NOTE_INSN_LOOP_END:
998 /* Strip down the loop notes - we don't really want to keep
999 them consistent in loop copies. */
1000 case NOTE_INSN_DELETED:
1001 case NOTE_INSN_DELETED_LABEL:
1002 /* No problem to strip these. */
1003 case NOTE_INSN_EPILOGUE_BEG:
1004 case NOTE_INSN_FUNCTION_END:
1005 /* Debug code expect these notes to exist just once.
1006 Keep them in the master copy.
1007 ??? It probably makes more sense to duplicate them for each
1008 epilogue copy. */
1009 case NOTE_INSN_FUNCTION_BEG:
1010 /* There is always just single entry to function. */
1011 case NOTE_INSN_BASIC_BLOCK:
1012 break;
1014 case NOTE_INSN_REPEATED_LINE_NUMBER:
1015 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1016 emit_note_copy (insn);
1017 break;
1019 default:
1020 /* All other notes should have already been eliminated.
1022 gcc_assert (NOTE_LINE_NUMBER (insn) >= 0);
1024 /* It is possible that no_line_number is set and the note
1025 won't be emitted. */
1026 emit_note_copy (insn);
1028 break;
1029 default:
1030 gcc_unreachable ();
1033 insn = NEXT_INSN (last);
1034 delete_insn (last);
1035 return insn;
1037 /* Create a duplicate of the basic block BB. */
1039 /* We do not want to declare the function in a header file, since it should
1040 only be used through the cfghooks interface, and we do not want to move
1041 it to cfgrtl.c since it would require also moving quite a lot of related
1042 code. */
1043 extern basic_block cfg_layout_duplicate_bb (basic_block);
1045 basic_block
1046 cfg_layout_duplicate_bb (basic_block bb)
1048 rtx insn;
1049 basic_block new_bb;
1051 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1052 new_bb = create_basic_block (insn,
1053 insn ? get_last_insn () : NULL,
1054 EXIT_BLOCK_PTR->prev_bb);
1056 BB_COPY_PARTITION (new_bb, bb);
1057 if (bb->rbi->header)
1059 insn = bb->rbi->header;
1060 while (NEXT_INSN (insn))
1061 insn = NEXT_INSN (insn);
1062 insn = duplicate_insn_chain (bb->rbi->header, insn);
1063 if (insn)
1064 new_bb->rbi->header = unlink_insn_chain (insn, get_last_insn ());
1067 if (bb->rbi->footer)
1069 insn = bb->rbi->footer;
1070 while (NEXT_INSN (insn))
1071 insn = NEXT_INSN (insn);
1072 insn = duplicate_insn_chain (bb->rbi->footer, insn);
1073 if (insn)
1074 new_bb->rbi->footer = unlink_insn_chain (insn, get_last_insn ());
1077 if (bb->global_live_at_start)
1079 new_bb->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
1080 new_bb->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
1081 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
1082 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1085 return new_bb;
1088 /* Main entry point to this module - initialize the datastructures for
1089 CFG layout changes. It keeps LOOPS up-to-date if not null.
1091 FLAGS is a set of additional flags to pass to cleanup_cfg(). It should
1092 include CLEANUP_UPDATE_LIFE if liveness information must be kept up
1093 to date. */
1095 void
1096 cfg_layout_initialize (unsigned int flags)
1098 basic_block bb;
1100 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1101 initialize_bb_rbi (bb);
1103 initialize_original_copy_tables ();
1105 cfg_layout_rtl_register_cfg_hooks ();
1107 record_effective_endpoints ();
1109 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1112 /* Splits superblocks. */
1113 void
1114 break_superblocks (void)
1116 sbitmap superblocks;
1117 bool need = false;
1118 basic_block bb;
1120 superblocks = sbitmap_alloc (last_basic_block);
1121 sbitmap_zero (superblocks);
1123 FOR_EACH_BB (bb)
1124 if (bb->flags & BB_SUPERBLOCK)
1126 bb->flags &= ~BB_SUPERBLOCK;
1127 SET_BIT (superblocks, bb->index);
1128 need = true;
1131 if (need)
1133 rebuild_jump_labels (get_insns ());
1134 find_many_sub_basic_blocks (superblocks);
1137 free (superblocks);
1140 /* Finalize the changes: reorder insn list according to the sequence, enter
1141 compensation code, rebuild scope forest. */
1143 void
1144 cfg_layout_finalize (void)
1146 basic_block bb;
1148 #ifdef ENABLE_CHECKING
1149 verify_flow_info ();
1150 #endif
1151 rtl_register_cfg_hooks ();
1152 if (reload_completed
1153 #ifdef HAVE_epilogue
1154 && !HAVE_epilogue
1155 #endif
1157 fixup_fallthru_exit_predecessor ();
1158 fixup_reorder_chain ();
1160 #ifdef ENABLE_CHECKING
1161 verify_insn_chain ();
1162 #endif
1163 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1164 bb->rbi = NULL;
1166 break_superblocks ();
1168 #ifdef ENABLE_CHECKING
1169 verify_flow_info ();
1170 #endif
1172 free_original_copy_tables ();
1175 /* Checks whether all N blocks in BBS array can be copied. */
1176 bool
1177 can_copy_bbs_p (basic_block *bbs, unsigned n)
1179 unsigned i;
1180 edge e;
1181 int ret = true;
1183 for (i = 0; i < n; i++)
1184 bbs[i]->flags |= BB_DUPLICATED;
1186 for (i = 0; i < n; i++)
1188 /* In case we should redirect abnormal edge during duplication, fail. */
1189 edge_iterator ei;
1190 FOR_EACH_EDGE (e, ei, bbs[i]->succs)
1191 if ((e->flags & EDGE_ABNORMAL)
1192 && (e->dest->flags & BB_DUPLICATED))
1194 ret = false;
1195 goto end;
1198 if (!can_duplicate_block_p (bbs[i]))
1200 ret = false;
1201 break;
1205 end:
1206 for (i = 0; i < n; i++)
1207 bbs[i]->flags &= ~BB_DUPLICATED;
1209 return ret;
1212 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1213 are placed into array NEW_BBS in the same order. Edges from basic blocks
1214 in BBS are also duplicated and copies of those of them
1215 that lead into BBS are redirected to appropriate newly created block. The
1216 function assigns bbs into loops (copy of basic block bb is assigned to
1217 bb->loop_father->copy loop, so this must be set up correctly in advance)
1218 and updates dominators locally (LOOPS structure that contains the information
1219 about dominators is passed to enable this).
1221 BASE is the superloop to that basic block belongs; if its header or latch
1222 is copied, we do not set the new blocks as header or latch.
1224 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1225 also in the same order. */
1227 void
1228 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1229 edge *edges, unsigned num_edges, edge *new_edges,
1230 struct loop *base)
1232 unsigned i, j;
1233 basic_block bb, new_bb, dom_bb;
1234 edge e;
1236 /* Duplicate bbs, update dominators, assign bbs to loops. */
1237 for (i = 0; i < n; i++)
1239 /* Duplicate. */
1240 bb = bbs[i];
1241 new_bb = new_bbs[i] = duplicate_block (bb, NULL);
1242 bb->flags |= BB_DUPLICATED;
1243 /* Add to loop. */
1244 add_bb_to_loop (new_bb, bb->loop_father->copy);
1245 /* Possibly set header. */
1246 if (bb->loop_father->header == bb && bb->loop_father != base)
1247 new_bb->loop_father->header = new_bb;
1248 /* Or latch. */
1249 if (bb->loop_father->latch == bb && bb->loop_father != base)
1250 new_bb->loop_father->latch = new_bb;
1253 /* Set dominators. */
1254 for (i = 0; i < n; i++)
1256 bb = bbs[i];
1257 new_bb = new_bbs[i];
1259 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1260 if (dom_bb->flags & BB_DUPLICATED)
1262 dom_bb = get_bb_copy (dom_bb);
1263 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1267 /* Redirect edges. */
1268 for (j = 0; j < num_edges; j++)
1269 new_edges[j] = NULL;
1270 for (i = 0; i < n; i++)
1272 edge_iterator ei;
1273 new_bb = new_bbs[i];
1274 bb = bbs[i];
1276 FOR_EACH_EDGE (e, ei, new_bb->succs)
1278 for (j = 0; j < num_edges; j++)
1279 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1280 new_edges[j] = e;
1282 if (!(e->dest->flags & BB_DUPLICATED))
1283 continue;
1284 redirect_edge_and_branch_force (e, get_bb_copy (e->dest));
1288 /* Clear information about duplicates. */
1289 for (i = 0; i < n; i++)
1290 bbs[i]->flags &= ~BB_DUPLICATED;
1293 #include "gt-cfglayout.h"