* doc/install.texi: Update binutils requirement for powerpc*-linux.
[official-gcc.git] / gcc / cfglayout.c
blob655c9a1b397dd9b4d8121e1b93bfb2b7b6cb7240
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "obstack.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "output.h"
32 #include "function.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
36 #include "ggc.h"
37 #include "alloc-pool.h"
38 #include "flags.h"
40 /* Holds the interesting trailing notes for the function. */
41 rtx cfg_layout_function_footer, cfg_layout_function_header;
43 static rtx skip_insns_after_block (basic_block);
44 static void record_effective_endpoints (void);
45 static rtx label_for_bb (basic_block);
46 static void fixup_reorder_chain (void);
48 static void set_block_levels (tree, int);
49 static void change_scope (rtx, tree, tree);
51 void verify_insn_chain (void);
52 static void fixup_fallthru_exit_predecessor (void);
53 static tree insn_scope (rtx);
54 static void update_unlikely_executed_notes (basic_block);
56 rtx
57 unlink_insn_chain (rtx first, rtx last)
59 rtx prevfirst = PREV_INSN (first);
60 rtx nextlast = NEXT_INSN (last);
62 PREV_INSN (first) = NULL;
63 NEXT_INSN (last) = NULL;
64 if (prevfirst)
65 NEXT_INSN (prevfirst) = nextlast;
66 if (nextlast)
67 PREV_INSN (nextlast) = prevfirst;
68 else
69 set_last_insn (prevfirst);
70 if (!prevfirst)
71 set_first_insn (nextlast);
72 return first;
75 /* Skip over inter-block insns occurring after BB which are typically
76 associated with BB (e.g., barriers). If there are any such insns,
77 we return the last one. Otherwise, we return the end of BB. */
79 static rtx
80 skip_insns_after_block (basic_block bb)
82 rtx insn, last_insn, next_head, prev;
84 next_head = NULL_RTX;
85 if (bb->next_bb != EXIT_BLOCK_PTR)
86 next_head = BB_HEAD (bb->next_bb);
88 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
90 if (insn == next_head)
91 break;
93 switch (GET_CODE (insn))
95 case BARRIER:
96 last_insn = insn;
97 continue;
99 case NOTE:
100 switch (NOTE_LINE_NUMBER (insn))
102 case NOTE_INSN_LOOP_END:
103 case NOTE_INSN_BLOCK_END:
104 last_insn = insn;
105 continue;
106 case NOTE_INSN_DELETED:
107 case NOTE_INSN_DELETED_LABEL:
108 continue;
110 default:
111 continue;
112 break;
114 break;
116 case CODE_LABEL:
117 if (NEXT_INSN (insn)
118 && JUMP_P (NEXT_INSN (insn))
119 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
120 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
122 insn = NEXT_INSN (insn);
123 last_insn = insn;
124 continue;
126 break;
128 default:
129 break;
132 break;
135 /* It is possible to hit contradictory sequence. For instance:
137 jump_insn
138 NOTE_INSN_LOOP_BEG
139 barrier
141 Where barrier belongs to jump_insn, but the note does not. This can be
142 created by removing the basic block originally following
143 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
145 for (insn = last_insn; insn != BB_END (bb); insn = prev)
147 prev = PREV_INSN (insn);
148 if (NOTE_P (insn))
149 switch (NOTE_LINE_NUMBER (insn))
151 case NOTE_INSN_LOOP_END:
152 case NOTE_INSN_BLOCK_END:
153 case NOTE_INSN_DELETED:
154 case NOTE_INSN_DELETED_LABEL:
155 continue;
156 default:
157 reorder_insns (insn, insn, last_insn);
161 return last_insn;
164 /* Locate or create a label for a given basic block. */
166 static rtx
167 label_for_bb (basic_block bb)
169 rtx label = BB_HEAD (bb);
171 if (!LABEL_P (label))
173 if (dump_file)
174 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
176 label = block_label (bb);
179 return label;
182 /* Locate the effective beginning and end of the insn chain for each
183 block, as defined by skip_insns_after_block above. */
185 static void
186 record_effective_endpoints (void)
188 rtx next_insn;
189 basic_block bb;
190 rtx insn;
192 for (insn = get_insns ();
193 insn
194 && NOTE_P (insn)
195 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
196 insn = NEXT_INSN (insn))
197 continue;
198 /* No basic blocks at all? */
199 gcc_assert (insn);
201 if (PREV_INSN (insn))
202 cfg_layout_function_header =
203 unlink_insn_chain (get_insns (), PREV_INSN (insn));
204 else
205 cfg_layout_function_header = NULL_RTX;
207 next_insn = get_insns ();
208 FOR_EACH_BB (bb)
210 rtx end;
212 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
213 bb->rbi->header = unlink_insn_chain (next_insn,
214 PREV_INSN (BB_HEAD (bb)));
215 end = skip_insns_after_block (bb);
216 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
217 bb->rbi->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
218 next_insn = NEXT_INSN (BB_END (bb));
221 cfg_layout_function_footer = next_insn;
222 if (cfg_layout_function_footer)
223 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
226 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
227 numbers and files. In order to be GGC friendly we need to use separate
228 varrays. This also slightly improve the memory locality in binary search.
229 The _locs array contains locators where the given property change. The
230 block_locators_blocks contains the scope block that is used for all insn
231 locator greater than corresponding block_locators_locs value and smaller
232 than the following one. Similarly for the other properties. */
233 static GTY(()) varray_type block_locators_locs;
234 static GTY(()) varray_type block_locators_blocks;
235 static GTY(()) varray_type line_locators_locs;
236 static GTY(()) varray_type line_locators_lines;
237 static GTY(()) varray_type file_locators_locs;
238 static GTY(()) varray_type file_locators_files;
239 int prologue_locator;
240 int epilogue_locator;
242 /* During the RTL expansion the lexical blocks and line numbers are
243 represented via INSN_NOTEs. Replace them by representation using
244 INSN_LOCATORs. */
246 void
247 insn_locators_initialize (void)
249 tree block = NULL;
250 tree last_block = NULL;
251 rtx insn, next;
252 int loc = 0;
253 int line_number = 0, last_line_number = 0;
254 const char *file_name = NULL, *last_file_name = NULL;
256 prologue_locator = epilogue_locator = 0;
258 VARRAY_INT_INIT (block_locators_locs, 32, "block_locators_locs");
259 VARRAY_TREE_INIT (block_locators_blocks, 32, "block_locators_blocks");
260 VARRAY_INT_INIT (line_locators_locs, 32, "line_locators_locs");
261 VARRAY_INT_INIT (line_locators_lines, 32, "line_locators_lines");
262 VARRAY_INT_INIT (file_locators_locs, 32, "file_locators_locs");
263 VARRAY_CHAR_PTR_INIT (file_locators_files, 32, "file_locators_files");
265 for (insn = get_insns (); insn; insn = next)
267 int active = 0;
269 next = NEXT_INSN (insn);
271 if (NOTE_P (insn))
273 gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_BEG
274 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END);
275 if (NOTE_LINE_NUMBER (insn) > 0)
277 expanded_location xloc;
278 NOTE_EXPANDED_LOCATION (xloc, insn);
279 line_number = xloc.line;
280 file_name = xloc.file;
283 else
284 active = (active_insn_p (insn)
285 && GET_CODE (PATTERN (insn)) != ADDR_VEC
286 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
288 check_block_change (insn, &block);
290 if (active
291 || !next
292 || (!prologue_locator && file_name))
294 if (last_block != block)
296 loc++;
297 VARRAY_PUSH_INT (block_locators_locs, loc);
298 VARRAY_PUSH_TREE (block_locators_blocks, block);
299 last_block = block;
301 if (last_line_number != line_number)
303 loc++;
304 VARRAY_PUSH_INT (line_locators_locs, loc);
305 VARRAY_PUSH_INT (line_locators_lines, line_number);
306 last_line_number = line_number;
308 if (last_file_name != file_name)
310 loc++;
311 VARRAY_PUSH_INT (file_locators_locs, loc);
312 VARRAY_PUSH_CHAR_PTR (file_locators_files, (char *) file_name);
313 last_file_name = file_name;
315 if (!prologue_locator && file_name)
316 prologue_locator = loc;
317 if (!next)
318 epilogue_locator = loc;
319 if (active)
320 INSN_LOCATOR (insn) = loc;
324 /* Tag the blocks with a depth number so that change_scope can find
325 the common parent easily. */
326 set_block_levels (DECL_INITIAL (cfun->decl), 0);
328 free_block_changes ();
331 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
332 found in the block tree. */
334 static void
335 set_block_levels (tree block, int level)
337 while (block)
339 BLOCK_NUMBER (block) = level;
340 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
341 block = BLOCK_CHAIN (block);
345 /* Return sope resulting from combination of S1 and S2. */
346 static tree
347 choose_inner_scope (tree s1, tree s2)
349 if (!s1)
350 return s2;
351 if (!s2)
352 return s1;
353 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
354 return s1;
355 return s2;
358 /* Emit lexical block notes needed to change scope from S1 to S2. */
360 static void
361 change_scope (rtx orig_insn, tree s1, tree s2)
363 rtx insn = orig_insn;
364 tree com = NULL_TREE;
365 tree ts1 = s1, ts2 = s2;
366 tree s;
368 while (ts1 != ts2)
370 gcc_assert (ts1 && ts2);
371 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
372 ts1 = BLOCK_SUPERCONTEXT (ts1);
373 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
374 ts2 = BLOCK_SUPERCONTEXT (ts2);
375 else
377 ts1 = BLOCK_SUPERCONTEXT (ts1);
378 ts2 = BLOCK_SUPERCONTEXT (ts2);
381 com = ts1;
383 /* Close scopes. */
384 s = s1;
385 while (s != com)
387 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
388 NOTE_BLOCK (note) = s;
389 s = BLOCK_SUPERCONTEXT (s);
392 /* Open scopes. */
393 s = s2;
394 while (s != com)
396 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
397 NOTE_BLOCK (insn) = s;
398 s = BLOCK_SUPERCONTEXT (s);
402 /* Return lexical scope block insn belong to. */
403 static tree
404 insn_scope (rtx insn)
406 int max = VARRAY_ACTIVE_SIZE (block_locators_locs);
407 int min = 0;
408 int loc = INSN_LOCATOR (insn);
410 /* When block_locators_locs was initialized, the pro- and epilogue
411 insns didn't exist yet and can therefore not be found this way.
412 But we know that they belong to the outer most block of the
413 current function.
414 Without this test, the prologue would be put inside the block of
415 the first valid instruction in the function and when that first
416 insn is part of an inlined function then the low_pc of that
417 inlined function is messed up. Likewise for the epilogue and
418 the last valid instruction. */
419 if (loc == prologue_locator || loc == epilogue_locator)
420 return DECL_INITIAL (cfun->decl);
422 if (!max || !loc)
423 return NULL;
424 while (1)
426 int pos = (min + max) / 2;
427 int tmp = VARRAY_INT (block_locators_locs, pos);
429 if (tmp <= loc && min != pos)
430 min = pos;
431 else if (tmp > loc && max != pos)
432 max = pos;
433 else
435 min = pos;
436 break;
439 return VARRAY_TREE (block_locators_blocks, min);
442 /* Return line number of the statement specified by the locator. */
444 locator_line (int loc)
446 int max = VARRAY_ACTIVE_SIZE (line_locators_locs);
447 int min = 0;
449 if (!max || !loc)
450 return 0;
451 while (1)
453 int pos = (min + max) / 2;
454 int tmp = VARRAY_INT (line_locators_locs, pos);
456 if (tmp <= loc && min != pos)
457 min = pos;
458 else if (tmp > loc && max != pos)
459 max = pos;
460 else
462 min = pos;
463 break;
466 return VARRAY_INT (line_locators_lines, min);
469 /* Return line number of the statement that produced this insn. */
471 insn_line (rtx insn)
473 return locator_line (INSN_LOCATOR (insn));
476 /* Return source file of the statement specified by LOC. */
477 const char *
478 locator_file (int loc)
480 int max = VARRAY_ACTIVE_SIZE (file_locators_locs);
481 int min = 0;
483 if (!max || !loc)
484 return NULL;
485 while (1)
487 int pos = (min + max) / 2;
488 int tmp = VARRAY_INT (file_locators_locs, pos);
490 if (tmp <= loc && min != pos)
491 min = pos;
492 else if (tmp > loc && max != pos)
493 max = pos;
494 else
496 min = pos;
497 break;
500 return VARRAY_CHAR_PTR (file_locators_files, min);
503 /* Return source file of the statement that produced this insn. */
504 const char *
505 insn_file (rtx insn)
507 return locator_file (INSN_LOCATOR (insn));
510 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
511 on the scope tree and the newly reordered instructions. */
513 void
514 reemit_insn_block_notes (void)
516 tree cur_block = DECL_INITIAL (cfun->decl);
517 rtx insn, note;
519 insn = get_insns ();
520 if (!active_insn_p (insn))
521 insn = next_active_insn (insn);
522 for (; insn; insn = next_active_insn (insn))
524 tree this_block;
526 /* Avoid putting scope notes between jump table and its label. */
527 if (JUMP_P (insn)
528 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
529 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
530 continue;
532 this_block = insn_scope (insn);
533 /* For sequences compute scope resulting from merging all scopes
534 of instructions nested inside. */
535 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
537 int i;
538 rtx body = PATTERN (insn);
540 this_block = NULL;
541 for (i = 0; i < XVECLEN (body, 0); i++)
542 this_block = choose_inner_scope (this_block,
543 insn_scope (XVECEXP (body, 0, i)));
545 if (! this_block)
546 continue;
548 if (this_block != cur_block)
550 change_scope (insn, cur_block, this_block);
551 cur_block = this_block;
555 /* change_scope emits before the insn, not after. */
556 note = emit_note (NOTE_INSN_DELETED);
557 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
558 delete_insn (note);
560 reorder_blocks ();
563 /* Given a reorder chain, rearrange the code to match. */
565 static void
566 fixup_reorder_chain (void)
568 basic_block bb, prev_bb;
569 int index;
570 rtx insn = NULL;
572 if (cfg_layout_function_header)
574 set_first_insn (cfg_layout_function_header);
575 insn = cfg_layout_function_header;
576 while (NEXT_INSN (insn))
577 insn = NEXT_INSN (insn);
580 /* First do the bulk reordering -- rechain the blocks without regard to
581 the needed changes to jumps and labels. */
583 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
584 bb != 0;
585 bb = bb->rbi->next, index++)
587 if (bb->rbi->header)
589 if (insn)
590 NEXT_INSN (insn) = bb->rbi->header;
591 else
592 set_first_insn (bb->rbi->header);
593 PREV_INSN (bb->rbi->header) = insn;
594 insn = bb->rbi->header;
595 while (NEXT_INSN (insn))
596 insn = NEXT_INSN (insn);
598 if (insn)
599 NEXT_INSN (insn) = BB_HEAD (bb);
600 else
601 set_first_insn (BB_HEAD (bb));
602 PREV_INSN (BB_HEAD (bb)) = insn;
603 insn = BB_END (bb);
604 if (bb->rbi->footer)
606 NEXT_INSN (insn) = bb->rbi->footer;
607 PREV_INSN (bb->rbi->footer) = insn;
608 while (NEXT_INSN (insn))
609 insn = NEXT_INSN (insn);
613 gcc_assert (index == n_basic_blocks);
615 NEXT_INSN (insn) = cfg_layout_function_footer;
616 if (cfg_layout_function_footer)
617 PREV_INSN (cfg_layout_function_footer) = insn;
619 while (NEXT_INSN (insn))
620 insn = NEXT_INSN (insn);
622 set_last_insn (insn);
623 #ifdef ENABLE_CHECKING
624 verify_insn_chain ();
625 #endif
626 delete_dead_jumptables ();
628 /* Now add jumps and labels as needed to match the blocks new
629 outgoing edges. */
631 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = bb->rbi->next)
633 edge e_fall, e_taken, e;
634 rtx bb_end_insn;
635 basic_block nb;
636 edge_iterator ei;
638 if (EDGE_COUNT (bb->succs) == 0)
639 continue;
641 /* Find the old fallthru edge, and another non-EH edge for
642 a taken jump. */
643 e_taken = e_fall = NULL;
645 FOR_EACH_EDGE (e, ei, bb->succs)
646 if (e->flags & EDGE_FALLTHRU)
647 e_fall = e;
648 else if (! (e->flags & EDGE_EH))
649 e_taken = e;
651 bb_end_insn = BB_END (bb);
652 if (JUMP_P (bb_end_insn))
654 if (any_condjump_p (bb_end_insn))
656 /* If the old fallthru is still next, nothing to do. */
657 if (bb->rbi->next == e_fall->dest
658 || e_fall->dest == EXIT_BLOCK_PTR)
659 continue;
661 /* The degenerated case of conditional jump jumping to the next
662 instruction can happen on target having jumps with side
663 effects.
665 Create temporarily the duplicated edge representing branch.
666 It will get unidentified by force_nonfallthru_and_redirect
667 that would otherwise get confused by fallthru edge not pointing
668 to the next basic block. */
669 if (!e_taken)
671 rtx note;
672 edge e_fake;
673 bool redirected;
675 e_fake = unchecked_make_edge (bb, e_fall->dest, 0);
677 redirected = redirect_jump (BB_END (bb),
678 block_label (bb), 0);
679 gcc_assert (redirected);
681 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
682 if (note)
684 int prob = INTVAL (XEXP (note, 0));
686 e_fake->probability = prob;
687 e_fake->count = e_fall->count * prob / REG_BR_PROB_BASE;
688 e_fall->probability -= e_fall->probability;
689 e_fall->count -= e_fake->count;
690 if (e_fall->probability < 0)
691 e_fall->probability = 0;
692 if (e_fall->count < 0)
693 e_fall->count = 0;
696 /* There is one special case: if *neither* block is next,
697 such as happens at the very end of a function, then we'll
698 need to add a new unconditional jump. Choose the taken
699 edge based on known or assumed probability. */
700 else if (bb->rbi->next != e_taken->dest)
702 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
704 if (note
705 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
706 && invert_jump (bb_end_insn,
707 (e_fall->dest == EXIT_BLOCK_PTR
708 ? NULL_RTX
709 : label_for_bb (e_fall->dest)), 0))
711 e_fall->flags &= ~EDGE_FALLTHRU;
712 #ifdef ENABLE_CHECKING
713 gcc_assert (could_fall_through
714 (e_taken->src, e_taken->dest));
715 #endif
716 e_taken->flags |= EDGE_FALLTHRU;
717 update_br_prob_note (bb);
718 e = e_fall, e_fall = e_taken, e_taken = e;
722 /* If the "jumping" edge is a crossing edge, and the fall
723 through edge is non-crossing, leave things as they are. */
724 else if ((e_taken->flags & EDGE_CROSSING)
725 && !(e_fall->flags & EDGE_CROSSING))
726 continue;
728 /* Otherwise we can try to invert the jump. This will
729 basically never fail, however, keep up the pretense. */
730 else if (invert_jump (bb_end_insn,
731 (e_fall->dest == EXIT_BLOCK_PTR
732 ? NULL_RTX
733 : label_for_bb (e_fall->dest)), 0))
735 e_fall->flags &= ~EDGE_FALLTHRU;
736 #ifdef ENABLE_CHECKING
737 gcc_assert (could_fall_through
738 (e_taken->src, e_taken->dest));
739 #endif
740 e_taken->flags |= EDGE_FALLTHRU;
741 update_br_prob_note (bb);
742 continue;
745 else
747 /* Otherwise we have some return, switch or computed
748 jump. In the 99% case, there should not have been a
749 fallthru edge. */
750 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
751 continue;
754 else
756 /* No fallthru implies a noreturn function with EH edges, or
757 something similarly bizarre. In any case, we don't need to
758 do anything. */
759 if (! e_fall)
760 continue;
762 /* If the fallthru block is still next, nothing to do. */
763 if (bb->rbi->next == e_fall->dest)
764 continue;
766 /* A fallthru to exit block. */
767 if (e_fall->dest == EXIT_BLOCK_PTR)
768 continue;
771 /* We got here if we need to add a new jump insn. */
772 nb = force_nonfallthru (e_fall);
773 if (nb)
775 initialize_bb_rbi (nb);
776 nb->rbi->visited = 1;
777 nb->rbi->next = bb->rbi->next;
778 bb->rbi->next = nb;
779 /* Don't process this new block. */
780 bb = nb;
782 /* Make sure new bb is tagged for correct section (same as
783 fall-thru source, since you cannot fall-throu across
784 section boundaries). */
785 BB_COPY_PARTITION (e_fall->src, single_pred (bb));
786 if (flag_reorder_blocks_and_partition
787 && targetm.have_named_sections)
789 if (BB_PARTITION (single_pred (bb)) == BB_COLD_PARTITION)
791 rtx new_note;
792 rtx note = BB_HEAD (e_fall->src);
794 while (!INSN_P (note)
795 && note != BB_END (e_fall->src))
796 note = NEXT_INSN (note);
798 new_note = emit_note_before
799 (NOTE_INSN_UNLIKELY_EXECUTED_CODE,
800 note);
801 NOTE_BASIC_BLOCK (new_note) = bb;
803 if (JUMP_P (BB_END (bb))
804 && !any_condjump_p (BB_END (bb))
805 && (single_succ_edge (bb)->flags & EDGE_CROSSING))
806 REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
807 (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
812 /* Put basic_block_info in the new order. */
814 if (dump_file)
816 fprintf (dump_file, "Reordered sequence:\n");
817 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
819 bb = bb->rbi->next, index++)
821 fprintf (dump_file, " %i ", index);
822 if (bb->rbi->original)
823 fprintf (dump_file, "duplicate of %i ",
824 bb->rbi->original->index);
825 else if (forwarder_block_p (bb)
826 && !LABEL_P (BB_HEAD (bb)))
827 fprintf (dump_file, "compensation ");
828 else
829 fprintf (dump_file, "bb %i ", bb->index);
830 fprintf (dump_file, " [%i]\n", bb->frequency);
834 prev_bb = ENTRY_BLOCK_PTR;
835 bb = ENTRY_BLOCK_PTR->next_bb;
836 index = 0;
838 for (; bb; prev_bb = bb, bb = bb->rbi->next, index ++)
840 bb->index = index;
841 BASIC_BLOCK (index) = bb;
843 update_unlikely_executed_notes (bb);
845 bb->prev_bb = prev_bb;
846 prev_bb->next_bb = bb;
848 prev_bb->next_bb = EXIT_BLOCK_PTR;
849 EXIT_BLOCK_PTR->prev_bb = prev_bb;
851 /* Annoying special case - jump around dead jumptables left in the code. */
852 FOR_EACH_BB (bb)
854 edge e;
855 edge_iterator ei;
857 FOR_EACH_EDGE (e, ei, bb->succs)
858 if (e->flags & EDGE_FALLTHRU)
859 break;
861 if (e && !can_fallthru (e->src, e->dest))
862 force_nonfallthru (e);
866 /* Update the basic block number information in any
867 NOTE_INSN_UNLIKELY_EXECUTED_CODE notes within the basic block. */
869 static void
870 update_unlikely_executed_notes (basic_block bb)
872 rtx cur_insn;
874 for (cur_insn = BB_HEAD (bb); cur_insn != BB_END (bb);
875 cur_insn = NEXT_INSN (cur_insn))
876 if (NOTE_P (cur_insn)
877 && NOTE_LINE_NUMBER (cur_insn) == NOTE_INSN_UNLIKELY_EXECUTED_CODE)
878 NOTE_BASIC_BLOCK (cur_insn) = bb;
881 /* Perform sanity checks on the insn chain.
882 1. Check that next/prev pointers are consistent in both the forward and
883 reverse direction.
884 2. Count insns in chain, going both directions, and check if equal.
885 3. Check that get_last_insn () returns the actual end of chain. */
887 void
888 verify_insn_chain (void)
890 rtx x, prevx, nextx;
891 int insn_cnt1, insn_cnt2;
893 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
894 x != 0;
895 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
896 gcc_assert (PREV_INSN (x) == prevx);
898 gcc_assert (prevx == get_last_insn ());
900 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
901 x != 0;
902 nextx = x, insn_cnt2++, x = PREV_INSN (x))
903 gcc_assert (NEXT_INSN (x) == nextx);
905 gcc_assert (insn_cnt1 == insn_cnt2);
908 /* If we have assembler epilogues, the block falling through to exit must
909 be the last one in the reordered chain when we reach final. Ensure
910 that this condition is met. */
911 static void
912 fixup_fallthru_exit_predecessor (void)
914 edge e;
915 edge_iterator ei;
916 basic_block bb = NULL;
918 /* This transformation is not valid before reload, because we might
919 separate a call from the instruction that copies the return
920 value. */
921 gcc_assert (reload_completed);
923 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
924 if (e->flags & EDGE_FALLTHRU)
925 bb = e->src;
927 if (bb && bb->rbi->next)
929 basic_block c = ENTRY_BLOCK_PTR->next_bb;
931 /* If the very first block is the one with the fall-through exit
932 edge, we have to split that block. */
933 if (c == bb)
935 bb = split_block (bb, NULL)->dest;
936 initialize_bb_rbi (bb);
937 bb->rbi->next = c->rbi->next;
938 c->rbi->next = bb;
939 bb->rbi->footer = c->rbi->footer;
940 c->rbi->footer = NULL;
943 while (c->rbi->next != bb)
944 c = c->rbi->next;
946 c->rbi->next = bb->rbi->next;
947 while (c->rbi->next)
948 c = c->rbi->next;
950 c->rbi->next = bb;
951 bb->rbi->next = NULL;
955 /* Return true in case it is possible to duplicate the basic block BB. */
957 /* We do not want to declare the function in a header file, since it should
958 only be used through the cfghooks interface, and we do not want to move
959 it to cfgrtl.c since it would require also moving quite a lot of related
960 code. */
961 extern bool cfg_layout_can_duplicate_bb_p (basic_block);
963 bool
964 cfg_layout_can_duplicate_bb_p (basic_block bb)
966 /* Do not attempt to duplicate tablejumps, as we need to unshare
967 the dispatch table. This is difficult to do, as the instructions
968 computing jump destination may be hoisted outside the basic block. */
969 if (tablejump_p (BB_END (bb), NULL, NULL))
970 return false;
972 /* Do not duplicate blocks containing insns that can't be copied. */
973 if (targetm.cannot_copy_insn_p)
975 rtx insn = BB_HEAD (bb);
976 while (1)
978 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
979 return false;
980 if (insn == BB_END (bb))
981 break;
982 insn = NEXT_INSN (insn);
986 return true;
990 duplicate_insn_chain (rtx from, rtx to)
992 rtx insn, last;
994 /* Avoid updating of boundaries of previous basic block. The
995 note will get removed from insn stream in fixup. */
996 last = emit_note (NOTE_INSN_DELETED);
998 /* Create copy at the end of INSN chain. The chain will
999 be reordered later. */
1000 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
1002 switch (GET_CODE (insn))
1004 case INSN:
1005 case CALL_INSN:
1006 case JUMP_INSN:
1007 /* Avoid copying of dispatch tables. We never duplicate
1008 tablejumps, so this can hit only in case the table got
1009 moved far from original jump. */
1010 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
1011 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1012 break;
1013 emit_copy_of_insn_after (insn, get_last_insn ());
1014 break;
1016 case CODE_LABEL:
1017 break;
1019 case BARRIER:
1020 emit_barrier ();
1021 break;
1023 case NOTE:
1024 switch (NOTE_LINE_NUMBER (insn))
1026 /* In case prologue is empty and function contain label
1027 in first BB, we may want to copy the block. */
1028 case NOTE_INSN_PROLOGUE_END:
1030 case NOTE_INSN_LOOP_BEG:
1031 case NOTE_INSN_LOOP_END:
1032 /* Strip down the loop notes - we don't really want to keep
1033 them consistent in loop copies. */
1034 case NOTE_INSN_DELETED:
1035 case NOTE_INSN_DELETED_LABEL:
1036 /* No problem to strip these. */
1037 case NOTE_INSN_EPILOGUE_BEG:
1038 case NOTE_INSN_FUNCTION_END:
1039 /* Debug code expect these notes to exist just once.
1040 Keep them in the master copy.
1041 ??? It probably makes more sense to duplicate them for each
1042 epilogue copy. */
1043 case NOTE_INSN_FUNCTION_BEG:
1044 /* There is always just single entry to function. */
1045 case NOTE_INSN_BASIC_BLOCK:
1046 break;
1048 case NOTE_INSN_REPEATED_LINE_NUMBER:
1049 case NOTE_INSN_UNLIKELY_EXECUTED_CODE:
1050 emit_note_copy (insn);
1051 break;
1053 default:
1054 /* All other notes should have already been eliminated.
1056 gcc_assert (NOTE_LINE_NUMBER (insn) >= 0);
1058 /* It is possible that no_line_number is set and the note
1059 won't be emitted. */
1060 emit_note_copy (insn);
1062 break;
1063 default:
1064 gcc_unreachable ();
1067 insn = NEXT_INSN (last);
1068 delete_insn (last);
1069 return insn;
1071 /* Create a duplicate of the basic block BB. */
1073 /* We do not want to declare the function in a header file, since it should
1074 only be used through the cfghooks interface, and we do not want to move
1075 it to cfgrtl.c since it would require also moving quite a lot of related
1076 code. */
1077 extern basic_block cfg_layout_duplicate_bb (basic_block);
1079 basic_block
1080 cfg_layout_duplicate_bb (basic_block bb)
1082 rtx insn;
1083 basic_block new_bb;
1085 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1086 new_bb = create_basic_block (insn,
1087 insn ? get_last_insn () : NULL,
1088 EXIT_BLOCK_PTR->prev_bb);
1090 BB_COPY_PARTITION (new_bb, bb);
1091 if (bb->rbi->header)
1093 insn = bb->rbi->header;
1094 while (NEXT_INSN (insn))
1095 insn = NEXT_INSN (insn);
1096 insn = duplicate_insn_chain (bb->rbi->header, insn);
1097 if (insn)
1098 new_bb->rbi->header = unlink_insn_chain (insn, get_last_insn ());
1101 if (bb->rbi->footer)
1103 insn = bb->rbi->footer;
1104 while (NEXT_INSN (insn))
1105 insn = NEXT_INSN (insn);
1106 insn = duplicate_insn_chain (bb->rbi->footer, insn);
1107 if (insn)
1108 new_bb->rbi->footer = unlink_insn_chain (insn, get_last_insn ());
1111 if (bb->global_live_at_start)
1113 new_bb->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
1114 new_bb->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
1115 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
1116 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1119 return new_bb;
1122 /* Main entry point to this module - initialize the datastructures for
1123 CFG layout changes. It keeps LOOPS up-to-date if not null.
1125 FLAGS is a set of additional flags to pass to cleanup_cfg(). It should
1126 include CLEANUP_UPDATE_LIFE if liveness information must be kept up
1127 to date. */
1129 void
1130 cfg_layout_initialize (unsigned int flags)
1132 basic_block bb;
1134 /* Our algorithm depends on fact that there are no dead jumptables
1135 around the code. */
1136 alloc_rbi_pool ();
1138 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1139 initialize_bb_rbi (bb);
1141 cfg_layout_rtl_register_cfg_hooks ();
1143 record_effective_endpoints ();
1145 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1148 /* Splits superblocks. */
1149 void
1150 break_superblocks (void)
1152 sbitmap superblocks;
1153 bool need = false;
1154 basic_block bb;
1156 superblocks = sbitmap_alloc (last_basic_block);
1157 sbitmap_zero (superblocks);
1159 FOR_EACH_BB (bb)
1160 if (bb->flags & BB_SUPERBLOCK)
1162 bb->flags &= ~BB_SUPERBLOCK;
1163 SET_BIT (superblocks, bb->index);
1164 need = true;
1167 if (need)
1169 rebuild_jump_labels (get_insns ());
1170 find_many_sub_basic_blocks (superblocks);
1173 free (superblocks);
1176 /* Finalize the changes: reorder insn list according to the sequence, enter
1177 compensation code, rebuild scope forest. */
1179 void
1180 cfg_layout_finalize (void)
1182 basic_block bb;
1184 #ifdef ENABLE_CHECKING
1185 verify_flow_info ();
1186 #endif
1187 rtl_register_cfg_hooks ();
1188 if (reload_completed
1189 #ifdef HAVE_epilogue
1190 && !HAVE_epilogue
1191 #endif
1193 fixup_fallthru_exit_predecessor ();
1194 fixup_reorder_chain ();
1196 #ifdef ENABLE_CHECKING
1197 verify_insn_chain ();
1198 #endif
1200 free_rbi_pool ();
1201 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1202 bb->rbi = NULL;
1204 break_superblocks ();
1206 #ifdef ENABLE_CHECKING
1207 verify_flow_info ();
1208 #endif
1211 /* Checks whether all N blocks in BBS array can be copied. */
1212 bool
1213 can_copy_bbs_p (basic_block *bbs, unsigned n)
1215 unsigned i;
1216 edge e;
1217 int ret = true;
1219 for (i = 0; i < n; i++)
1220 bbs[i]->rbi->duplicated = 1;
1222 for (i = 0; i < n; i++)
1224 /* In case we should redirect abnormal edge during duplication, fail. */
1225 edge_iterator ei;
1226 FOR_EACH_EDGE (e, ei, bbs[i]->succs)
1227 if ((e->flags & EDGE_ABNORMAL)
1228 && e->dest->rbi->duplicated)
1230 ret = false;
1231 goto end;
1234 if (!can_duplicate_block_p (bbs[i]))
1236 ret = false;
1237 break;
1241 end:
1242 for (i = 0; i < n; i++)
1243 bbs[i]->rbi->duplicated = 0;
1245 return ret;
1248 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1249 are placed into array NEW_BBS in the same order. Edges from basic blocks
1250 in BBS are also duplicated and copies of those of them
1251 that lead into BBS are redirected to appropriate newly created block. The
1252 function assigns bbs into loops (copy of basic block bb is assigned to
1253 bb->loop_father->copy loop, so this must be set up correctly in advance)
1254 and updates dominators locally (LOOPS structure that contains the information
1255 about dominators is passed to enable this).
1257 BASE is the superloop to that basic block belongs; if its header or latch
1258 is copied, we do not set the new blocks as header or latch.
1260 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1261 also in the same order. */
1263 void
1264 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1265 edge *edges, unsigned n_edges, edge *new_edges,
1266 struct loop *base)
1268 unsigned i, j;
1269 basic_block bb, new_bb, dom_bb;
1270 edge e;
1272 /* Duplicate bbs, update dominators, assign bbs to loops. */
1273 for (i = 0; i < n; i++)
1275 /* Duplicate. */
1276 bb = bbs[i];
1277 new_bb = new_bbs[i] = duplicate_block (bb, NULL);
1278 bb->rbi->duplicated = 1;
1279 /* Add to loop. */
1280 add_bb_to_loop (new_bb, bb->loop_father->copy);
1281 /* Possibly set header. */
1282 if (bb->loop_father->header == bb && bb->loop_father != base)
1283 new_bb->loop_father->header = new_bb;
1284 /* Or latch. */
1285 if (bb->loop_father->latch == bb && bb->loop_father != base)
1286 new_bb->loop_father->latch = new_bb;
1289 /* Set dominators. */
1290 for (i = 0; i < n; i++)
1292 bb = bbs[i];
1293 new_bb = new_bbs[i];
1295 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1296 if (dom_bb->rbi->duplicated)
1298 dom_bb = dom_bb->rbi->copy;
1299 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1303 /* Redirect edges. */
1304 for (j = 0; j < n_edges; j++)
1305 new_edges[j] = NULL;
1306 for (i = 0; i < n; i++)
1308 edge_iterator ei;
1309 new_bb = new_bbs[i];
1310 bb = bbs[i];
1312 FOR_EACH_EDGE (e, ei, new_bb->succs)
1314 for (j = 0; j < n_edges; j++)
1315 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1316 new_edges[j] = e;
1318 if (!e->dest->rbi->duplicated)
1319 continue;
1320 redirect_edge_and_branch_force (e, e->dest->rbi->copy);
1324 /* Clear information about duplicates. */
1325 for (i = 0; i < n; i++)
1326 bbs[i]->rbi->duplicated = 0;
1329 #include "gt-cfglayout.h"