* gcc.dg/compat/sdata-section.h (SDATA_SECTION): Don't use an attribute
[official-gcc.git] / gcc / cfglayout.c
blobbc7cec93f0ae1d8474015d000aa81022e3938a61
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "insn-config.h"
30 #include "output.h"
31 #include "function.h"
32 #include "obstack.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
36 #include "ggc.h"
37 #include "alloc-pool.h"
38 #include "flags.h"
40 /* The contents of the current function definition are allocated
41 in this obstack, and all are freed at the end of the function. */
42 extern struct obstack flow_obstack;
44 /* Holds the interesting trailing notes for the function. */
45 rtx cfg_layout_function_footer, cfg_layout_function_header;
47 static rtx skip_insns_after_block (basic_block);
48 static void record_effective_endpoints (void);
49 static rtx label_for_bb (basic_block);
50 static void fixup_reorder_chain (void);
52 static void set_block_levels (tree, int);
53 static void change_scope (rtx, tree, tree);
55 void verify_insn_chain (void);
56 static void fixup_fallthru_exit_predecessor (void);
57 static tree insn_scope (rtx);
58 static void update_unlikely_executed_notes (basic_block);
60 rtx
61 unlink_insn_chain (rtx first, rtx last)
63 rtx prevfirst = PREV_INSN (first);
64 rtx nextlast = NEXT_INSN (last);
66 PREV_INSN (first) = NULL;
67 NEXT_INSN (last) = NULL;
68 if (prevfirst)
69 NEXT_INSN (prevfirst) = nextlast;
70 if (nextlast)
71 PREV_INSN (nextlast) = prevfirst;
72 else
73 set_last_insn (prevfirst);
74 if (!prevfirst)
75 set_first_insn (nextlast);
76 return first;
79 /* Skip over inter-block insns occurring after BB which are typically
80 associated with BB (e.g., barriers). If there are any such insns,
81 we return the last one. Otherwise, we return the end of BB. */
83 static rtx
84 skip_insns_after_block (basic_block bb)
86 rtx insn, last_insn, next_head, prev;
88 next_head = NULL_RTX;
89 if (bb->next_bb != EXIT_BLOCK_PTR)
90 next_head = BB_HEAD (bb->next_bb);
92 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
94 if (insn == next_head)
95 break;
97 switch (GET_CODE (insn))
99 case BARRIER:
100 last_insn = insn;
101 continue;
103 case NOTE:
104 switch (NOTE_LINE_NUMBER (insn))
106 case NOTE_INSN_LOOP_END:
107 case NOTE_INSN_BLOCK_END:
108 last_insn = insn;
109 continue;
110 case NOTE_INSN_DELETED:
111 case NOTE_INSN_DELETED_LABEL:
112 continue;
114 default:
115 continue;
116 break;
118 break;
120 case CODE_LABEL:
121 if (NEXT_INSN (insn)
122 && JUMP_P (NEXT_INSN (insn))
123 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
124 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
126 insn = NEXT_INSN (insn);
127 last_insn = insn;
128 continue;
130 break;
132 default:
133 break;
136 break;
139 /* It is possible to hit contradictory sequence. For instance:
141 jump_insn
142 NOTE_INSN_LOOP_BEG
143 barrier
145 Where barrier belongs to jump_insn, but the note does not. This can be
146 created by removing the basic block originally following
147 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
149 for (insn = last_insn; insn != BB_END (bb); insn = prev)
151 prev = PREV_INSN (insn);
152 if (NOTE_P (insn))
153 switch (NOTE_LINE_NUMBER (insn))
155 case NOTE_INSN_LOOP_END:
156 case NOTE_INSN_BLOCK_END:
157 case NOTE_INSN_DELETED:
158 case NOTE_INSN_DELETED_LABEL:
159 continue;
160 default:
161 reorder_insns (insn, insn, last_insn);
165 return last_insn;
168 /* Locate or create a label for a given basic block. */
170 static rtx
171 label_for_bb (basic_block bb)
173 rtx label = BB_HEAD (bb);
175 if (!LABEL_P (label))
177 if (dump_file)
178 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
180 label = block_label (bb);
183 return label;
186 /* Locate the effective beginning and end of the insn chain for each
187 block, as defined by skip_insns_after_block above. */
189 static void
190 record_effective_endpoints (void)
192 rtx next_insn;
193 basic_block bb;
194 rtx insn;
196 for (insn = get_insns ();
197 insn
198 && NOTE_P (insn)
199 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
200 insn = NEXT_INSN (insn))
201 continue;
202 /* No basic blocks at all? */
203 gcc_assert (insn);
205 if (PREV_INSN (insn))
206 cfg_layout_function_header =
207 unlink_insn_chain (get_insns (), PREV_INSN (insn));
208 else
209 cfg_layout_function_header = NULL_RTX;
211 next_insn = get_insns ();
212 FOR_EACH_BB (bb)
214 rtx end;
216 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
217 bb->rbi->header = unlink_insn_chain (next_insn,
218 PREV_INSN (BB_HEAD (bb)));
219 end = skip_insns_after_block (bb);
220 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
221 bb->rbi->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
222 next_insn = NEXT_INSN (BB_END (bb));
225 cfg_layout_function_footer = next_insn;
226 if (cfg_layout_function_footer)
227 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
230 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
231 numbers and files. In order to be GGC friendly we need to use separate
232 varrays. This also slightly improve the memory locality in binary search.
233 The _locs array contains locators where the given property change. The
234 block_locators_blocks contains the scope block that is used for all insn
235 locator greater than corresponding block_locators_locs value and smaller
236 than the following one. Similarly for the other properties. */
237 static GTY(()) varray_type block_locators_locs;
238 static GTY(()) varray_type block_locators_blocks;
239 static GTY(()) varray_type line_locators_locs;
240 static GTY(()) varray_type line_locators_lines;
241 static GTY(()) varray_type file_locators_locs;
242 static GTY(()) varray_type file_locators_files;
243 int prologue_locator;
244 int epilogue_locator;
246 /* During the RTL expansion the lexical blocks and line numbers are
247 represented via INSN_NOTEs. Replace them by representation using
248 INSN_LOCATORs. */
250 void
251 insn_locators_initialize (void)
253 tree block = NULL;
254 tree last_block = NULL;
255 rtx insn, next;
256 int loc = 0;
257 int line_number = 0, last_line_number = 0;
258 const char *file_name = NULL, *last_file_name = NULL;
260 prologue_locator = epilogue_locator = 0;
262 VARRAY_INT_INIT (block_locators_locs, 32, "block_locators_locs");
263 VARRAY_TREE_INIT (block_locators_blocks, 32, "block_locators_blocks");
264 VARRAY_INT_INIT (line_locators_locs, 32, "line_locators_locs");
265 VARRAY_INT_INIT (line_locators_lines, 32, "line_locators_lines");
266 VARRAY_INT_INIT (file_locators_locs, 32, "file_locators_locs");
267 VARRAY_CHAR_PTR_INIT (file_locators_files, 32, "file_locators_files");
269 for (insn = get_insns (); insn; insn = next)
271 int active = 0;
273 next = NEXT_INSN (insn);
275 if (NOTE_P (insn))
277 gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_BEG
278 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END);
279 if (NOTE_LINE_NUMBER (insn) > 0)
281 expanded_location xloc;
282 NOTE_EXPANDED_LOCATION (xloc, insn);
283 line_number = xloc.line;
284 file_name = xloc.file;
287 else
288 active = (active_insn_p (insn)
289 && GET_CODE (PATTERN (insn)) != ADDR_VEC
290 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
292 check_block_change (insn, &block);
294 if (active
295 || !next
296 || (!prologue_locator && file_name))
298 if (last_block != block)
300 loc++;
301 VARRAY_PUSH_INT (block_locators_locs, loc);
302 VARRAY_PUSH_TREE (block_locators_blocks, block);
303 last_block = block;
305 if (last_line_number != line_number)
307 loc++;
308 VARRAY_PUSH_INT (line_locators_locs, loc);
309 VARRAY_PUSH_INT (line_locators_lines, line_number);
310 last_line_number = line_number;
312 if (last_file_name != file_name)
314 loc++;
315 VARRAY_PUSH_INT (file_locators_locs, loc);
316 VARRAY_PUSH_CHAR_PTR (file_locators_files, (char *) file_name);
317 last_file_name = file_name;
319 if (!prologue_locator && file_name)
320 prologue_locator = loc;
321 if (!next)
322 epilogue_locator = loc;
323 if (active)
324 INSN_LOCATOR (insn) = loc;
328 /* Tag the blocks with a depth number so that change_scope can find
329 the common parent easily. */
330 set_block_levels (DECL_INITIAL (cfun->decl), 0);
332 free_block_changes ();
335 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
336 found in the block tree. */
338 static void
339 set_block_levels (tree block, int level)
341 while (block)
343 BLOCK_NUMBER (block) = level;
344 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
345 block = BLOCK_CHAIN (block);
349 /* Return sope resulting from combination of S1 and S2. */
350 tree
351 choose_inner_scope (tree s1, tree s2)
353 if (!s1)
354 return s2;
355 if (!s2)
356 return s1;
357 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
358 return s1;
359 return s2;
362 /* Emit lexical block notes needed to change scope from S1 to S2. */
364 static void
365 change_scope (rtx orig_insn, tree s1, tree s2)
367 rtx insn = orig_insn;
368 tree com = NULL_TREE;
369 tree ts1 = s1, ts2 = s2;
370 tree s;
372 while (ts1 != ts2)
374 gcc_assert (ts1 && ts2);
375 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
376 ts1 = BLOCK_SUPERCONTEXT (ts1);
377 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
378 ts2 = BLOCK_SUPERCONTEXT (ts2);
379 else
381 ts1 = BLOCK_SUPERCONTEXT (ts1);
382 ts2 = BLOCK_SUPERCONTEXT (ts2);
385 com = ts1;
387 /* Close scopes. */
388 s = s1;
389 while (s != com)
391 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
392 NOTE_BLOCK (note) = s;
393 s = BLOCK_SUPERCONTEXT (s);
396 /* Open scopes. */
397 s = s2;
398 while (s != com)
400 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
401 NOTE_BLOCK (insn) = s;
402 s = BLOCK_SUPERCONTEXT (s);
406 /* Return lexical scope block insn belong to. */
407 static tree
408 insn_scope (rtx insn)
410 int max = VARRAY_ACTIVE_SIZE (block_locators_locs);
411 int min = 0;
412 int loc = INSN_LOCATOR (insn);
414 /* When block_locators_locs was initialized, the pro- and epilogue
415 insns didn't exist yet and can therefore not be found this way.
416 But we know that they belong to the outer most block of the
417 current function.
418 Without this test, the prologue would be put inside the block of
419 the first valid instruction in the function and when that first
420 insn is part of an inlined function then the low_pc of that
421 inlined function is messed up. Likewise for the epilogue and
422 the last valid instruction. */
423 if (loc == prologue_locator || loc == epilogue_locator)
424 return DECL_INITIAL (cfun->decl);
426 if (!max || !loc)
427 return NULL;
428 while (1)
430 int pos = (min + max) / 2;
431 int tmp = VARRAY_INT (block_locators_locs, pos);
433 if (tmp <= loc && min != pos)
434 min = pos;
435 else if (tmp > loc && max != pos)
436 max = pos;
437 else
439 min = pos;
440 break;
443 return VARRAY_TREE (block_locators_blocks, min);
446 /* Return line number of the statement specified by the locator. */
448 locator_line (int loc)
450 int max = VARRAY_ACTIVE_SIZE (line_locators_locs);
451 int min = 0;
453 if (!max || !loc)
454 return 0;
455 while (1)
457 int pos = (min + max) / 2;
458 int tmp = VARRAY_INT (line_locators_locs, pos);
460 if (tmp <= loc && min != pos)
461 min = pos;
462 else if (tmp > loc && max != pos)
463 max = pos;
464 else
466 min = pos;
467 break;
470 return VARRAY_INT (line_locators_lines, min);
473 /* Return line number of the statement that produced this insn. */
475 insn_line (rtx insn)
477 return locator_line (INSN_LOCATOR (insn));
480 /* Return source file of the statement specified by LOC. */
481 const char *
482 locator_file (int loc)
484 int max = VARRAY_ACTIVE_SIZE (file_locators_locs);
485 int min = 0;
487 if (!max || !loc)
488 return NULL;
489 while (1)
491 int pos = (min + max) / 2;
492 int tmp = VARRAY_INT (file_locators_locs, pos);
494 if (tmp <= loc && min != pos)
495 min = pos;
496 else if (tmp > loc && max != pos)
497 max = pos;
498 else
500 min = pos;
501 break;
504 return VARRAY_CHAR_PTR (file_locators_files, min);
507 /* Return source file of the statement that produced this insn. */
508 const char *
509 insn_file (rtx insn)
511 return locator_file (INSN_LOCATOR (insn));
514 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
515 on the scope tree and the newly reordered instructions. */
517 void
518 reemit_insn_block_notes (void)
520 tree cur_block = DECL_INITIAL (cfun->decl);
521 rtx insn, note;
523 insn = get_insns ();
524 if (!active_insn_p (insn))
525 insn = next_active_insn (insn);
526 for (; insn; insn = next_active_insn (insn))
528 tree this_block;
530 this_block = insn_scope (insn);
531 /* For sequences compute scope resulting from merging all scopes
532 of instructions nested inside. */
533 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
535 int i;
536 rtx body = PATTERN (insn);
538 this_block = NULL;
539 for (i = 0; i < XVECLEN (body, 0); i++)
540 this_block = choose_inner_scope (this_block,
541 insn_scope (XVECEXP (body, 0, i)));
543 if (! this_block)
544 continue;
546 if (this_block != cur_block)
548 change_scope (insn, cur_block, this_block);
549 cur_block = this_block;
553 /* change_scope emits before the insn, not after. */
554 note = emit_note (NOTE_INSN_DELETED);
555 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
556 delete_insn (note);
558 reorder_blocks ();
561 /* Given a reorder chain, rearrange the code to match. */
563 static void
564 fixup_reorder_chain (void)
566 basic_block bb, prev_bb;
567 int index;
568 rtx insn = NULL;
570 if (cfg_layout_function_header)
572 set_first_insn (cfg_layout_function_header);
573 insn = cfg_layout_function_header;
574 while (NEXT_INSN (insn))
575 insn = NEXT_INSN (insn);
578 /* First do the bulk reordering -- rechain the blocks without regard to
579 the needed changes to jumps and labels. */
581 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
582 bb != 0;
583 bb = bb->rbi->next, index++)
585 if (bb->rbi->header)
587 if (insn)
588 NEXT_INSN (insn) = bb->rbi->header;
589 else
590 set_first_insn (bb->rbi->header);
591 PREV_INSN (bb->rbi->header) = insn;
592 insn = bb->rbi->header;
593 while (NEXT_INSN (insn))
594 insn = NEXT_INSN (insn);
596 if (insn)
597 NEXT_INSN (insn) = BB_HEAD (bb);
598 else
599 set_first_insn (BB_HEAD (bb));
600 PREV_INSN (BB_HEAD (bb)) = insn;
601 insn = BB_END (bb);
602 if (bb->rbi->footer)
604 NEXT_INSN (insn) = bb->rbi->footer;
605 PREV_INSN (bb->rbi->footer) = insn;
606 while (NEXT_INSN (insn))
607 insn = NEXT_INSN (insn);
611 gcc_assert (index == n_basic_blocks);
613 NEXT_INSN (insn) = cfg_layout_function_footer;
614 if (cfg_layout_function_footer)
615 PREV_INSN (cfg_layout_function_footer) = insn;
617 while (NEXT_INSN (insn))
618 insn = NEXT_INSN (insn);
620 set_last_insn (insn);
621 #ifdef ENABLE_CHECKING
622 verify_insn_chain ();
623 #endif
624 delete_dead_jumptables ();
626 /* Now add jumps and labels as needed to match the blocks new
627 outgoing edges. */
629 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = bb->rbi->next)
631 edge e_fall, e_taken, e;
632 rtx bb_end_insn;
633 basic_block nb;
634 basic_block old_bb;
636 if (bb->succ == NULL)
637 continue;
639 /* Find the old fallthru edge, and another non-EH edge for
640 a taken jump. */
641 e_taken = e_fall = NULL;
642 for (e = bb->succ; e ; e = e->succ_next)
643 if (e->flags & EDGE_FALLTHRU)
644 e_fall = e;
645 else if (! (e->flags & EDGE_EH))
646 e_taken = e;
648 bb_end_insn = BB_END (bb);
649 if (JUMP_P (bb_end_insn))
651 if (any_condjump_p (bb_end_insn))
653 /* If the old fallthru is still next, nothing to do. */
654 if (bb->rbi->next == e_fall->dest
655 || e_fall->dest == EXIT_BLOCK_PTR)
656 continue;
658 /* The degenerated case of conditional jump jumping to the next
659 instruction can happen on target having jumps with side
660 effects.
662 Create temporarily the duplicated edge representing branch.
663 It will get unidentified by force_nonfallthru_and_redirect
664 that would otherwise get confused by fallthru edge not pointing
665 to the next basic block. */
666 if (!e_taken)
668 rtx note;
669 edge e_fake;
670 bool redirected;
672 e_fake = unchecked_make_edge (bb, e_fall->dest, 0);
674 redirected = redirect_jump (BB_END (bb),
675 block_label (bb), 0);
676 gcc_assert (redirected);
678 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
679 if (note)
681 int prob = INTVAL (XEXP (note, 0));
683 e_fake->probability = prob;
684 e_fake->count = e_fall->count * prob / REG_BR_PROB_BASE;
685 e_fall->probability -= e_fall->probability;
686 e_fall->count -= e_fake->count;
687 if (e_fall->probability < 0)
688 e_fall->probability = 0;
689 if (e_fall->count < 0)
690 e_fall->count = 0;
693 /* There is one special case: if *neither* block is next,
694 such as happens at the very end of a function, then we'll
695 need to add a new unconditional jump. Choose the taken
696 edge based on known or assumed probability. */
697 else if (bb->rbi->next != e_taken->dest)
699 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
701 if (note
702 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
703 && invert_jump (bb_end_insn,
704 (e_fall->dest == EXIT_BLOCK_PTR
705 ? NULL_RTX
706 : label_for_bb (e_fall->dest)), 0))
708 e_fall->flags &= ~EDGE_FALLTHRU;
709 #ifdef ENABLE_CHECKING
710 gcc_assert (could_fall_through
711 (e_taken->src, e_taken->dest));
712 #endif
713 e_taken->flags |= EDGE_FALLTHRU;
714 update_br_prob_note (bb);
715 e = e_fall, e_fall = e_taken, e_taken = e;
719 /* If the "jumping" edge is a crossing edge, and the fall
720 through edge is non-crossing, leave things as they are. */
721 else if ((e_taken->flags & EDGE_CROSSING)
722 && !(e_fall->flags & EDGE_CROSSING))
723 continue;
725 /* Otherwise we can try to invert the jump. This will
726 basically never fail, however, keep up the pretense. */
727 else if (invert_jump (bb_end_insn,
728 (e_fall->dest == EXIT_BLOCK_PTR
729 ? NULL_RTX
730 : label_for_bb (e_fall->dest)), 0))
732 e_fall->flags &= ~EDGE_FALLTHRU;
733 #ifdef ENABLE_CHECKING
734 gcc_assert (could_fall_through
735 (e_taken->src, e_taken->dest));
736 #endif
737 e_taken->flags |= EDGE_FALLTHRU;
738 update_br_prob_note (bb);
739 continue;
742 else
744 #ifndef CASE_DROPS_THROUGH
745 /* Otherwise we have some return, switch or computed
746 jump. In the 99% case, there should not have been a
747 fallthru edge. */
748 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
749 continue;
750 #else
751 if (returnjump_p (bb_end_insn) || !e_fall)
752 continue;
753 /* Except for VAX. Since we didn't have predication for the
754 tablejump, the fallthru block should not have moved. */
755 if (bb->rbi->next == e_fall->dest)
756 continue;
757 bb_end_insn = skip_insns_after_block (bb);
758 #endif
761 else
763 /* No fallthru implies a noreturn function with EH edges, or
764 something similarly bizarre. In any case, we don't need to
765 do anything. */
766 if (! e_fall)
767 continue;
769 /* If the fallthru block is still next, nothing to do. */
770 if (bb->rbi->next == e_fall->dest)
771 continue;
773 /* A fallthru to exit block. */
774 if (e_fall->dest == EXIT_BLOCK_PTR)
775 continue;
778 /* We got here if we need to add a new jump insn. */
779 nb = force_nonfallthru (e_fall);
780 if (nb)
782 initialize_bb_rbi (nb);
783 nb->rbi->visited = 1;
784 nb->rbi->next = bb->rbi->next;
785 bb->rbi->next = nb;
786 /* Don't process this new block. */
787 old_bb = bb;
788 bb = nb;
790 /* Make sure new bb is tagged for correct section (same as
791 fall-thru source, since you cannot fall-throu across
792 section boundaries). */
793 BB_COPY_PARTITION (e_fall->src, bb->pred->src);
794 if (flag_reorder_blocks_and_partition
795 && targetm.have_named_sections)
797 if (BB_PARTITION (bb->pred->src) == BB_COLD_PARTITION)
799 rtx new_note;
800 rtx note = BB_HEAD (e_fall->src);
802 while (!INSN_P (note)
803 && note != BB_END (e_fall->src))
804 note = NEXT_INSN (note);
806 new_note = emit_note_before
807 (NOTE_INSN_UNLIKELY_EXECUTED_CODE,
808 note);
809 NOTE_BASIC_BLOCK (new_note) = bb;
811 if (JUMP_P (BB_END (bb))
812 && !any_condjump_p (BB_END (bb))
813 && (bb->succ->flags & EDGE_CROSSING))
814 REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
815 (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
820 /* Put basic_block_info in the new order. */
822 if (dump_file)
824 fprintf (dump_file, "Reordered sequence:\n");
825 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
827 bb = bb->rbi->next, index++)
829 fprintf (dump_file, " %i ", index);
830 if (bb->rbi->original)
831 fprintf (dump_file, "duplicate of %i ",
832 bb->rbi->original->index);
833 else if (forwarder_block_p (bb)
834 && !LABEL_P (BB_HEAD (bb)))
835 fprintf (dump_file, "compensation ");
836 else
837 fprintf (dump_file, "bb %i ", bb->index);
838 fprintf (dump_file, " [%i]\n", bb->frequency);
842 prev_bb = ENTRY_BLOCK_PTR;
843 bb = ENTRY_BLOCK_PTR->next_bb;
844 index = 0;
846 for (; bb; prev_bb = bb, bb = bb->rbi->next, index ++)
848 bb->index = index;
849 BASIC_BLOCK (index) = bb;
851 update_unlikely_executed_notes (bb);
853 bb->prev_bb = prev_bb;
854 prev_bb->next_bb = bb;
856 prev_bb->next_bb = EXIT_BLOCK_PTR;
857 EXIT_BLOCK_PTR->prev_bb = prev_bb;
859 /* Annoying special case - jump around dead jumptables left in the code. */
860 FOR_EACH_BB (bb)
862 edge e;
863 for (e = bb->succ; e && !(e->flags & EDGE_FALLTHRU); e = e->succ_next)
864 continue;
865 if (e && !can_fallthru (e->src, e->dest))
866 force_nonfallthru (e);
870 /* Update the basic block number information in any
871 NOTE_INSN_UNLIKELY_EXECUTED_CODE notes within the basic block. */
873 static void
874 update_unlikely_executed_notes (basic_block bb)
876 rtx cur_insn;
878 for (cur_insn = BB_HEAD (bb); cur_insn != BB_END (bb);
879 cur_insn = NEXT_INSN (cur_insn))
880 if (NOTE_P (cur_insn)
881 && NOTE_LINE_NUMBER (cur_insn) == NOTE_INSN_UNLIKELY_EXECUTED_CODE)
882 NOTE_BASIC_BLOCK (cur_insn) = bb;
885 /* Perform sanity checks on the insn chain.
886 1. Check that next/prev pointers are consistent in both the forward and
887 reverse direction.
888 2. Count insns in chain, going both directions, and check if equal.
889 3. Check that get_last_insn () returns the actual end of chain. */
891 void
892 verify_insn_chain (void)
894 rtx x, prevx, nextx;
895 int insn_cnt1, insn_cnt2;
897 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
898 x != 0;
899 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
900 gcc_assert (PREV_INSN (x) == prevx);
902 gcc_assert (prevx == get_last_insn ());
904 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
905 x != 0;
906 nextx = x, insn_cnt2++, x = PREV_INSN (x))
907 gcc_assert (NEXT_INSN (x) == nextx);
909 gcc_assert (insn_cnt1 == insn_cnt2);
912 /* If we have assembler epilogues, the block falling through to exit must
913 be the last one in the reordered chain when we reach final. Ensure
914 that this condition is met. */
915 static void
916 fixup_fallthru_exit_predecessor (void)
918 edge e;
919 basic_block bb = NULL;
921 /* This transformation is not valid before reload, because we might
922 separate a call from the instruction that copies the return
923 value. */
924 gcc_assert (reload_completed);
926 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
927 if (e->flags & EDGE_FALLTHRU)
928 bb = e->src;
930 if (bb && bb->rbi->next)
932 basic_block c = ENTRY_BLOCK_PTR->next_bb;
934 /* If the very first block is the one with the fall-through exit
935 edge, we have to split that block. */
936 if (c == bb)
938 bb = split_block (bb, NULL)->dest;
939 initialize_bb_rbi (bb);
940 bb->rbi->next = c->rbi->next;
941 c->rbi->next = bb;
942 bb->rbi->footer = c->rbi->footer;
943 c->rbi->footer = NULL;
946 while (c->rbi->next != bb)
947 c = c->rbi->next;
949 c->rbi->next = bb->rbi->next;
950 while (c->rbi->next)
951 c = c->rbi->next;
953 c->rbi->next = bb;
954 bb->rbi->next = NULL;
958 /* Return true in case it is possible to duplicate the basic block BB. */
960 /* We do not want to declare the function in a header file, since it should
961 only be used through the cfghooks interface, and we do not want to move
962 it to cfgrtl.c since it would require also moving quite a lot of related
963 code. */
964 extern bool cfg_layout_can_duplicate_bb_p (basic_block);
966 bool
967 cfg_layout_can_duplicate_bb_p (basic_block bb)
969 /* Do not attempt to duplicate tablejumps, as we need to unshare
970 the dispatch table. This is difficult to do, as the instructions
971 computing jump destination may be hoisted outside the basic block. */
972 if (tablejump_p (BB_END (bb), NULL, NULL))
973 return false;
975 /* Do not duplicate blocks containing insns that can't be copied. */
976 if (targetm.cannot_copy_insn_p)
978 rtx insn = BB_HEAD (bb);
979 while (1)
981 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
982 return false;
983 if (insn == BB_END (bb))
984 break;
985 insn = NEXT_INSN (insn);
989 return true;
993 duplicate_insn_chain (rtx from, rtx to)
995 rtx insn, last;
997 /* Avoid updating of boundaries of previous basic block. The
998 note will get removed from insn stream in fixup. */
999 last = emit_note (NOTE_INSN_DELETED);
1001 /* Create copy at the end of INSN chain. The chain will
1002 be reordered later. */
1003 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
1005 switch (GET_CODE (insn))
1007 case INSN:
1008 case CALL_INSN:
1009 case JUMP_INSN:
1010 /* Avoid copying of dispatch tables. We never duplicate
1011 tablejumps, so this can hit only in case the table got
1012 moved far from original jump. */
1013 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
1014 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1015 break;
1016 emit_copy_of_insn_after (insn, get_last_insn ());
1017 break;
1019 case CODE_LABEL:
1020 break;
1022 case BARRIER:
1023 emit_barrier ();
1024 break;
1026 case NOTE:
1027 switch (NOTE_LINE_NUMBER (insn))
1029 /* In case prologue is empty and function contain label
1030 in first BB, we may want to copy the block. */
1031 case NOTE_INSN_PROLOGUE_END:
1033 case NOTE_INSN_LOOP_BEG:
1034 case NOTE_INSN_LOOP_END:
1035 /* Strip down the loop notes - we don't really want to keep
1036 them consistent in loop copies. */
1037 case NOTE_INSN_DELETED:
1038 case NOTE_INSN_DELETED_LABEL:
1039 /* No problem to strip these. */
1040 case NOTE_INSN_EPILOGUE_BEG:
1041 case NOTE_INSN_FUNCTION_END:
1042 /* Debug code expect these notes to exist just once.
1043 Keep them in the master copy.
1044 ??? It probably makes more sense to duplicate them for each
1045 epilogue copy. */
1046 case NOTE_INSN_FUNCTION_BEG:
1047 /* There is always just single entry to function. */
1048 case NOTE_INSN_BASIC_BLOCK:
1049 break;
1051 case NOTE_INSN_REPEATED_LINE_NUMBER:
1052 case NOTE_INSN_UNLIKELY_EXECUTED_CODE:
1053 emit_note_copy (insn);
1054 break;
1056 default:
1057 /* All other notes should have already been eliminated.
1059 gcc_assert (NOTE_LINE_NUMBER (insn) >= 0);
1061 /* It is possible that no_line_number is set and the note
1062 won't be emitted. */
1063 emit_note_copy (insn);
1065 break;
1066 default:
1067 gcc_unreachable ();
1070 insn = NEXT_INSN (last);
1071 delete_insn (last);
1072 return insn;
1074 /* Create a duplicate of the basic block BB. */
1076 /* We do not want to declare the function in a header file, since it should
1077 only be used through the cfghooks interface, and we do not want to move
1078 it to cfgrtl.c since it would require also moving quite a lot of related
1079 code. */
1080 extern basic_block cfg_layout_duplicate_bb (basic_block);
1082 basic_block
1083 cfg_layout_duplicate_bb (basic_block bb)
1085 rtx insn;
1086 basic_block new_bb;
1088 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1089 new_bb = create_basic_block (insn,
1090 insn ? get_last_insn () : NULL,
1091 EXIT_BLOCK_PTR->prev_bb);
1093 BB_COPY_PARTITION (new_bb, bb);
1094 if (bb->rbi->header)
1096 insn = bb->rbi->header;
1097 while (NEXT_INSN (insn))
1098 insn = NEXT_INSN (insn);
1099 insn = duplicate_insn_chain (bb->rbi->header, insn);
1100 if (insn)
1101 new_bb->rbi->header = unlink_insn_chain (insn, get_last_insn ());
1104 if (bb->rbi->footer)
1106 insn = bb->rbi->footer;
1107 while (NEXT_INSN (insn))
1108 insn = NEXT_INSN (insn);
1109 insn = duplicate_insn_chain (bb->rbi->footer, insn);
1110 if (insn)
1111 new_bb->rbi->footer = unlink_insn_chain (insn, get_last_insn ());
1114 if (bb->global_live_at_start)
1116 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1117 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1118 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
1119 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1122 return new_bb;
1125 /* Main entry point to this module - initialize the datastructures for
1126 CFG layout changes. It keeps LOOPS up-to-date if not null.
1128 FLAGS is a set of additional flags to pass to cleanup_cfg(). It should
1129 include CLEANUP_UPDATE_LIFE if liveness information must be kept up
1130 to date. */
1132 void
1133 cfg_layout_initialize (unsigned int flags)
1135 basic_block bb;
1137 /* Our algorithm depends on fact that there are no dead jumptables
1138 around the code. */
1139 alloc_rbi_pool ();
1141 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1142 initialize_bb_rbi (bb);
1144 cfg_layout_rtl_register_cfg_hooks ();
1146 record_effective_endpoints ();
1148 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1151 /* Splits superblocks. */
1152 void
1153 break_superblocks (void)
1155 sbitmap superblocks;
1156 bool need = false;
1157 basic_block bb;
1159 superblocks = sbitmap_alloc (last_basic_block);
1160 sbitmap_zero (superblocks);
1162 FOR_EACH_BB (bb)
1163 if (bb->flags & BB_SUPERBLOCK)
1165 bb->flags &= ~BB_SUPERBLOCK;
1166 SET_BIT (superblocks, bb->index);
1167 need = true;
1170 if (need)
1172 rebuild_jump_labels (get_insns ());
1173 find_many_sub_basic_blocks (superblocks);
1176 free (superblocks);
1179 /* Finalize the changes: reorder insn list according to the sequence, enter
1180 compensation code, rebuild scope forest. */
1182 void
1183 cfg_layout_finalize (void)
1185 basic_block bb;
1187 #ifdef ENABLE_CHECKING
1188 verify_flow_info ();
1189 #endif
1190 rtl_register_cfg_hooks ();
1191 if (reload_completed
1192 #ifdef HAVE_epilogue
1193 && !HAVE_epilogue
1194 #endif
1196 fixup_fallthru_exit_predecessor ();
1197 fixup_reorder_chain ();
1199 #ifdef ENABLE_CHECKING
1200 verify_insn_chain ();
1201 #endif
1203 free_rbi_pool ();
1204 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1205 bb->rbi = NULL;
1207 break_superblocks ();
1209 #ifdef ENABLE_CHECKING
1210 verify_flow_info ();
1211 #endif
1214 /* Checks whether all N blocks in BBS array can be copied. */
1215 bool
1216 can_copy_bbs_p (basic_block *bbs, unsigned n)
1218 unsigned i;
1219 edge e;
1220 int ret = true;
1222 for (i = 0; i < n; i++)
1223 bbs[i]->rbi->duplicated = 1;
1225 for (i = 0; i < n; i++)
1227 /* In case we should redirect abnormal edge during duplication, fail. */
1228 for (e = bbs[i]->succ; e; e = e->succ_next)
1229 if ((e->flags & EDGE_ABNORMAL)
1230 && e->dest->rbi->duplicated)
1232 ret = false;
1233 goto end;
1236 if (!can_duplicate_block_p (bbs[i]))
1238 ret = false;
1239 break;
1243 end:
1244 for (i = 0; i < n; i++)
1245 bbs[i]->rbi->duplicated = 0;
1247 return ret;
1250 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1251 are placed into array NEW_BBS in the same order. Edges from basic blocks
1252 in BBS are also duplicated and copies of those of them
1253 that lead into BBS are redirected to appropriate newly created block. The
1254 function assigns bbs into loops (copy of basic block bb is assigned to
1255 bb->loop_father->copy loop, so this must be set up correctly in advance)
1256 and updates dominators locally (LOOPS structure that contains the information
1257 about dominators is passed to enable this).
1259 BASE is the superloop to that basic block belongs; if its header or latch
1260 is copied, we do not set the new blocks as header or latch.
1262 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1263 also in the same order. */
1265 void
1266 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1267 edge *edges, unsigned n_edges, edge *new_edges,
1268 struct loop *base)
1270 unsigned i, j;
1271 basic_block bb, new_bb, dom_bb;
1272 edge e;
1274 /* Duplicate bbs, update dominators, assign bbs to loops. */
1275 for (i = 0; i < n; i++)
1277 /* Duplicate. */
1278 bb = bbs[i];
1279 new_bb = new_bbs[i] = duplicate_block (bb, NULL);
1280 bb->rbi->duplicated = 1;
1281 /* Add to loop. */
1282 add_bb_to_loop (new_bb, bb->loop_father->copy);
1283 /* Possibly set header. */
1284 if (bb->loop_father->header == bb && bb->loop_father != base)
1285 new_bb->loop_father->header = new_bb;
1286 /* Or latch. */
1287 if (bb->loop_father->latch == bb && bb->loop_father != base)
1288 new_bb->loop_father->latch = new_bb;
1291 /* Set dominators. */
1292 for (i = 0; i < n; i++)
1294 bb = bbs[i];
1295 new_bb = new_bbs[i];
1297 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1298 if (dom_bb->rbi->duplicated)
1300 dom_bb = dom_bb->rbi->copy;
1301 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1305 /* Redirect edges. */
1306 for (j = 0; j < n_edges; j++)
1307 new_edges[j] = NULL;
1308 for (i = 0; i < n; i++)
1310 new_bb = new_bbs[i];
1311 bb = bbs[i];
1313 for (e = new_bb->succ; e; e = e->succ_next)
1315 for (j = 0; j < n_edges; j++)
1316 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1317 new_edges[j] = e;
1319 if (!e->dest->rbi->duplicated)
1320 continue;
1321 redirect_edge_and_branch_force (e, e->dest->rbi->copy);
1325 /* Clear information about duplicates. */
1326 for (i = 0; i < n; i++)
1327 bbs[i]->rbi->duplicated = 0;
1330 #include "gt-cfglayout.h"