* tree.c (build_int_cst_wide): Add an assertion (gcc_unreachable)
[official-gcc.git] / gcc / cfglayout.c
blob51a0a3bf116c1e9e269ee2fbf70570e3af727909
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
19 02110-1301, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "obstack.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "output.h"
32 #include "function.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
36 #include "ggc.h"
37 #include "alloc-pool.h"
38 #include "flags.h"
39 #include "tree-pass.h"
40 #include "vecprim.h"
42 /* Holds the interesting trailing notes for the function. */
43 rtx cfg_layout_function_footer, cfg_layout_function_header;
45 static rtx skip_insns_after_block (basic_block);
46 static void record_effective_endpoints (void);
47 static rtx label_for_bb (basic_block);
48 static void fixup_reorder_chain (void);
50 static void set_block_levels (tree, int);
51 static void change_scope (rtx, tree, tree);
53 void verify_insn_chain (void);
54 static void fixup_fallthru_exit_predecessor (void);
55 static tree insn_scope (rtx);
57 rtx
58 unlink_insn_chain (rtx first, rtx last)
60 rtx prevfirst = PREV_INSN (first);
61 rtx nextlast = NEXT_INSN (last);
63 PREV_INSN (first) = NULL;
64 NEXT_INSN (last) = NULL;
65 if (prevfirst)
66 NEXT_INSN (prevfirst) = nextlast;
67 if (nextlast)
68 PREV_INSN (nextlast) = prevfirst;
69 else
70 set_last_insn (prevfirst);
71 if (!prevfirst)
72 set_first_insn (nextlast);
73 return first;
76 /* Skip over inter-block insns occurring after BB which are typically
77 associated with BB (e.g., barriers). If there are any such insns,
78 we return the last one. Otherwise, we return the end of BB. */
80 static rtx
81 skip_insns_after_block (basic_block bb)
83 rtx insn, last_insn, next_head, prev;
85 next_head = NULL_RTX;
86 if (bb->next_bb != EXIT_BLOCK_PTR)
87 next_head = BB_HEAD (bb->next_bb);
89 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
91 if (insn == next_head)
92 break;
94 switch (GET_CODE (insn))
96 case BARRIER:
97 last_insn = insn;
98 continue;
100 case NOTE:
101 switch (NOTE_LINE_NUMBER (insn))
103 case NOTE_INSN_BLOCK_END:
104 last_insn = insn;
105 continue;
106 case NOTE_INSN_DELETED:
107 case NOTE_INSN_DELETED_LABEL:
108 continue;
110 default:
111 continue;
112 break;
114 break;
116 case CODE_LABEL:
117 if (NEXT_INSN (insn)
118 && JUMP_P (NEXT_INSN (insn))
119 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
120 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
122 insn = NEXT_INSN (insn);
123 last_insn = insn;
124 continue;
126 break;
128 default:
129 break;
132 break;
135 /* It is possible to hit contradictory sequence. For instance:
137 jump_insn
138 NOTE_INSN_BLOCK_BEG
139 barrier
141 Where barrier belongs to jump_insn, but the note does not. This can be
142 created by removing the basic block originally following
143 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
145 for (insn = last_insn; insn != BB_END (bb); insn = prev)
147 prev = PREV_INSN (insn);
148 if (NOTE_P (insn))
149 switch (NOTE_LINE_NUMBER (insn))
151 case NOTE_INSN_BLOCK_END:
152 case NOTE_INSN_DELETED:
153 case NOTE_INSN_DELETED_LABEL:
154 continue;
155 default:
156 reorder_insns (insn, insn, last_insn);
160 return last_insn;
163 /* Locate or create a label for a given basic block. */
165 static rtx
166 label_for_bb (basic_block bb)
168 rtx label = BB_HEAD (bb);
170 if (!LABEL_P (label))
172 if (dump_file)
173 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
175 label = block_label (bb);
178 return label;
181 /* Locate the effective beginning and end of the insn chain for each
182 block, as defined by skip_insns_after_block above. */
184 static void
185 record_effective_endpoints (void)
187 rtx next_insn;
188 basic_block bb;
189 rtx insn;
191 for (insn = get_insns ();
192 insn
193 && NOTE_P (insn)
194 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
195 insn = NEXT_INSN (insn))
196 continue;
197 /* No basic blocks at all? */
198 gcc_assert (insn);
200 if (PREV_INSN (insn))
201 cfg_layout_function_header =
202 unlink_insn_chain (get_insns (), PREV_INSN (insn));
203 else
204 cfg_layout_function_header = NULL_RTX;
206 next_insn = get_insns ();
207 FOR_EACH_BB (bb)
209 rtx end;
211 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
212 bb->il.rtl->header = unlink_insn_chain (next_insn,
213 PREV_INSN (BB_HEAD (bb)));
214 end = skip_insns_after_block (bb);
215 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
216 bb->il.rtl->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
217 next_insn = NEXT_INSN (BB_END (bb));
220 cfg_layout_function_footer = next_insn;
221 if (cfg_layout_function_footer)
222 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
225 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
226 numbers and files. In order to be GGC friendly we need to use separate
227 varrays. This also slightly improve the memory locality in binary search.
228 The _locs array contains locators where the given property change. The
229 block_locators_blocks contains the scope block that is used for all insn
230 locator greater than corresponding block_locators_locs value and smaller
231 than the following one. Similarly for the other properties. */
232 static VEC(int,heap) *block_locators_locs;
233 static GTY(()) VEC(tree,gc) *block_locators_blocks;
234 static VEC(int,heap) *line_locators_locs;
235 static VEC(int,heap) *line_locators_lines;
236 static VEC(int,heap) *file_locators_locs;
237 static GTY(()) varray_type file_locators_files;
238 int prologue_locator;
239 int epilogue_locator;
241 /* During the RTL expansion the lexical blocks and line numbers are
242 represented via INSN_NOTEs. Replace them by representation using
243 INSN_LOCATORs. */
245 unsigned int
246 insn_locators_initialize (void)
248 tree block = NULL;
249 tree last_block = NULL;
250 rtx insn, next;
251 int loc = 0;
252 int line_number = 0, last_line_number = 0;
253 const char *file_name = NULL, *last_file_name = NULL;
255 prologue_locator = epilogue_locator = 0;
257 block_locators_locs = VEC_alloc (int, heap, 32);
258 block_locators_blocks = VEC_alloc (tree, gc, 32);
259 line_locators_locs = VEC_alloc (int, heap, 32);
260 line_locators_lines = VEC_alloc (int, heap, 32);
261 file_locators_locs = VEC_alloc (int, heap, 32);
262 VARRAY_CHAR_PTR_INIT (file_locators_files, 32, "file_locators_files");
264 for (insn = get_insns (); insn; insn = next)
266 int active = 0;
268 next = NEXT_INSN (insn);
270 if (NOTE_P (insn))
272 gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_BEG
273 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END);
274 if (NOTE_LINE_NUMBER (insn) > 0)
276 expanded_location xloc;
277 NOTE_EXPANDED_LOCATION (xloc, insn);
278 line_number = xloc.line;
279 file_name = xloc.file;
280 delete_insn (insn);
283 else
284 active = (active_insn_p (insn)
285 && GET_CODE (PATTERN (insn)) != ADDR_VEC
286 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
288 check_block_change (insn, &block);
290 if (active
291 || !next
292 || (!prologue_locator && file_name))
294 if (last_block != block)
296 loc++;
297 VEC_safe_push (int, heap, block_locators_locs, loc);
298 VEC_safe_push (tree, gc, block_locators_blocks, block);
299 last_block = block;
301 if (last_line_number != line_number)
303 loc++;
304 VEC_safe_push (int, heap, line_locators_locs, loc);
305 VEC_safe_push (int, heap, line_locators_lines, line_number);
306 last_line_number = line_number;
308 if (last_file_name != file_name)
310 loc++;
311 VEC_safe_push (int, heap, file_locators_locs, loc);
312 VARRAY_PUSH_CHAR_PTR (file_locators_files, (char *) file_name);
313 last_file_name = file_name;
315 if (!prologue_locator && file_name)
316 prologue_locator = loc;
317 if (!next)
318 epilogue_locator = loc;
319 if (active)
320 INSN_LOCATOR (insn) = loc;
324 /* Tag the blocks with a depth number so that change_scope can find
325 the common parent easily. */
326 set_block_levels (DECL_INITIAL (cfun->decl), 0);
328 free_block_changes ();
329 return 0;
332 struct tree_opt_pass pass_insn_locators_initialize =
334 "locators", /* name */
335 NULL, /* gate */
336 insn_locators_initialize, /* execute */
337 NULL, /* sub */
338 NULL, /* next */
339 0, /* static_pass_number */
340 0, /* tv_id */
341 0, /* properties_required */
342 0, /* properties_provided */
343 0, /* properties_destroyed */
344 0, /* todo_flags_start */
345 TODO_dump_func, /* todo_flags_finish */
346 0 /* letter */
350 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
351 found in the block tree. */
353 static void
354 set_block_levels (tree block, int level)
356 while (block)
358 BLOCK_NUMBER (block) = level;
359 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
360 block = BLOCK_CHAIN (block);
364 /* Return sope resulting from combination of S1 and S2. */
365 static tree
366 choose_inner_scope (tree s1, tree s2)
368 if (!s1)
369 return s2;
370 if (!s2)
371 return s1;
372 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
373 return s1;
374 return s2;
377 /* Emit lexical block notes needed to change scope from S1 to S2. */
379 static void
380 change_scope (rtx orig_insn, tree s1, tree s2)
382 rtx insn = orig_insn;
383 tree com = NULL_TREE;
384 tree ts1 = s1, ts2 = s2;
385 tree s;
387 while (ts1 != ts2)
389 gcc_assert (ts1 && ts2);
390 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
391 ts1 = BLOCK_SUPERCONTEXT (ts1);
392 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
393 ts2 = BLOCK_SUPERCONTEXT (ts2);
394 else
396 ts1 = BLOCK_SUPERCONTEXT (ts1);
397 ts2 = BLOCK_SUPERCONTEXT (ts2);
400 com = ts1;
402 /* Close scopes. */
403 s = s1;
404 while (s != com)
406 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
407 NOTE_BLOCK (note) = s;
408 s = BLOCK_SUPERCONTEXT (s);
411 /* Open scopes. */
412 s = s2;
413 while (s != com)
415 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
416 NOTE_BLOCK (insn) = s;
417 s = BLOCK_SUPERCONTEXT (s);
421 /* Return lexical scope block insn belong to. */
422 static tree
423 insn_scope (rtx insn)
425 int max = VEC_length (int, block_locators_locs);
426 int min = 0;
427 int loc = INSN_LOCATOR (insn);
429 /* When block_locators_locs was initialized, the pro- and epilogue
430 insns didn't exist yet and can therefore not be found this way.
431 But we know that they belong to the outer most block of the
432 current function.
433 Without this test, the prologue would be put inside the block of
434 the first valid instruction in the function and when that first
435 insn is part of an inlined function then the low_pc of that
436 inlined function is messed up. Likewise for the epilogue and
437 the last valid instruction. */
438 if (loc == prologue_locator || loc == epilogue_locator)
439 return DECL_INITIAL (cfun->decl);
441 if (!max || !loc)
442 return NULL;
443 while (1)
445 int pos = (min + max) / 2;
446 int tmp = VEC_index (int, block_locators_locs, pos);
448 if (tmp <= loc && min != pos)
449 min = pos;
450 else if (tmp > loc && max != pos)
451 max = pos;
452 else
454 min = pos;
455 break;
458 return VEC_index (tree, block_locators_blocks, min);
461 /* Return line number of the statement specified by the locator. */
463 locator_line (int loc)
465 int max = VEC_length (int, line_locators_locs);
466 int min = 0;
468 if (!max || !loc)
469 return 0;
470 while (1)
472 int pos = (min + max) / 2;
473 int tmp = VEC_index (int, line_locators_locs, pos);
475 if (tmp <= loc && min != pos)
476 min = pos;
477 else if (tmp > loc && max != pos)
478 max = pos;
479 else
481 min = pos;
482 break;
485 return VEC_index (int, line_locators_lines, min);
488 /* Return line number of the statement that produced this insn. */
490 insn_line (rtx insn)
492 return locator_line (INSN_LOCATOR (insn));
495 /* Return source file of the statement specified by LOC. */
496 const char *
497 locator_file (int loc)
499 int max = VEC_length (int, file_locators_locs);
500 int min = 0;
502 if (!max || !loc)
503 return NULL;
504 while (1)
506 int pos = (min + max) / 2;
507 int tmp = VEC_index (int, file_locators_locs, pos);
509 if (tmp <= loc && min != pos)
510 min = pos;
511 else if (tmp > loc && max != pos)
512 max = pos;
513 else
515 min = pos;
516 break;
519 return VARRAY_CHAR_PTR (file_locators_files, min);
522 /* Return source file of the statement that produced this insn. */
523 const char *
524 insn_file (rtx insn)
526 return locator_file (INSN_LOCATOR (insn));
529 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
530 on the scope tree and the newly reordered instructions. */
532 void
533 reemit_insn_block_notes (void)
535 tree cur_block = DECL_INITIAL (cfun->decl);
536 rtx insn, note;
538 insn = get_insns ();
539 if (!active_insn_p (insn))
540 insn = next_active_insn (insn);
541 for (; insn; insn = next_active_insn (insn))
543 tree this_block;
545 /* Avoid putting scope notes between jump table and its label. */
546 if (JUMP_P (insn)
547 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
548 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
549 continue;
551 this_block = insn_scope (insn);
552 /* For sequences compute scope resulting from merging all scopes
553 of instructions nested inside. */
554 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
556 int i;
557 rtx body = PATTERN (insn);
559 this_block = NULL;
560 for (i = 0; i < XVECLEN (body, 0); i++)
561 this_block = choose_inner_scope (this_block,
562 insn_scope (XVECEXP (body, 0, i)));
564 if (! this_block)
565 continue;
567 if (this_block != cur_block)
569 change_scope (insn, cur_block, this_block);
570 cur_block = this_block;
574 /* change_scope emits before the insn, not after. */
575 note = emit_note (NOTE_INSN_DELETED);
576 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
577 delete_insn (note);
579 reorder_blocks ();
582 /* Given a reorder chain, rearrange the code to match. */
584 static void
585 fixup_reorder_chain (void)
587 basic_block bb, prev_bb;
588 int index;
589 rtx insn = NULL;
591 if (cfg_layout_function_header)
593 set_first_insn (cfg_layout_function_header);
594 insn = cfg_layout_function_header;
595 while (NEXT_INSN (insn))
596 insn = NEXT_INSN (insn);
599 /* First do the bulk reordering -- rechain the blocks without regard to
600 the needed changes to jumps and labels. */
602 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
603 bb != 0;
604 bb = bb->aux, index++)
606 if (bb->il.rtl->header)
608 if (insn)
609 NEXT_INSN (insn) = bb->il.rtl->header;
610 else
611 set_first_insn (bb->il.rtl->header);
612 PREV_INSN (bb->il.rtl->header) = insn;
613 insn = bb->il.rtl->header;
614 while (NEXT_INSN (insn))
615 insn = NEXT_INSN (insn);
617 if (insn)
618 NEXT_INSN (insn) = BB_HEAD (bb);
619 else
620 set_first_insn (BB_HEAD (bb));
621 PREV_INSN (BB_HEAD (bb)) = insn;
622 insn = BB_END (bb);
623 if (bb->il.rtl->footer)
625 NEXT_INSN (insn) = bb->il.rtl->footer;
626 PREV_INSN (bb->il.rtl->footer) = insn;
627 while (NEXT_INSN (insn))
628 insn = NEXT_INSN (insn);
632 gcc_assert (index == n_basic_blocks);
634 NEXT_INSN (insn) = cfg_layout_function_footer;
635 if (cfg_layout_function_footer)
636 PREV_INSN (cfg_layout_function_footer) = insn;
638 while (NEXT_INSN (insn))
639 insn = NEXT_INSN (insn);
641 set_last_insn (insn);
642 #ifdef ENABLE_CHECKING
643 verify_insn_chain ();
644 #endif
645 delete_dead_jumptables ();
647 /* Now add jumps and labels as needed to match the blocks new
648 outgoing edges. */
650 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = bb->aux)
652 edge e_fall, e_taken, e;
653 rtx bb_end_insn;
654 basic_block nb;
655 edge_iterator ei;
657 if (EDGE_COUNT (bb->succs) == 0)
658 continue;
660 /* Find the old fallthru edge, and another non-EH edge for
661 a taken jump. */
662 e_taken = e_fall = NULL;
664 FOR_EACH_EDGE (e, ei, bb->succs)
665 if (e->flags & EDGE_FALLTHRU)
666 e_fall = e;
667 else if (! (e->flags & EDGE_EH))
668 e_taken = e;
670 bb_end_insn = BB_END (bb);
671 if (JUMP_P (bb_end_insn))
673 if (any_condjump_p (bb_end_insn))
675 /* If the old fallthru is still next, nothing to do. */
676 if (bb->aux == e_fall->dest
677 || e_fall->dest == EXIT_BLOCK_PTR)
678 continue;
680 /* The degenerated case of conditional jump jumping to the next
681 instruction can happen for jumps with side effects. We need
682 to construct a forwarder block and this will be done just
683 fine by force_nonfallthru below. */
684 if (!e_taken)
687 /* There is another special case: if *neither* block is next,
688 such as happens at the very end of a function, then we'll
689 need to add a new unconditional jump. Choose the taken
690 edge based on known or assumed probability. */
691 else if (bb->aux != e_taken->dest)
693 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
695 if (note
696 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
697 && invert_jump (bb_end_insn,
698 (e_fall->dest == EXIT_BLOCK_PTR
699 ? NULL_RTX
700 : label_for_bb (e_fall->dest)), 0))
702 e_fall->flags &= ~EDGE_FALLTHRU;
703 #ifdef ENABLE_CHECKING
704 gcc_assert (could_fall_through
705 (e_taken->src, e_taken->dest));
706 #endif
707 e_taken->flags |= EDGE_FALLTHRU;
708 update_br_prob_note (bb);
709 e = e_fall, e_fall = e_taken, e_taken = e;
713 /* If the "jumping" edge is a crossing edge, and the fall
714 through edge is non-crossing, leave things as they are. */
715 else if ((e_taken->flags & EDGE_CROSSING)
716 && !(e_fall->flags & EDGE_CROSSING))
717 continue;
719 /* Otherwise we can try to invert the jump. This will
720 basically never fail, however, keep up the pretense. */
721 else if (invert_jump (bb_end_insn,
722 (e_fall->dest == EXIT_BLOCK_PTR
723 ? NULL_RTX
724 : label_for_bb (e_fall->dest)), 0))
726 e_fall->flags &= ~EDGE_FALLTHRU;
727 #ifdef ENABLE_CHECKING
728 gcc_assert (could_fall_through
729 (e_taken->src, e_taken->dest));
730 #endif
731 e_taken->flags |= EDGE_FALLTHRU;
732 update_br_prob_note (bb);
733 continue;
736 else
738 /* Otherwise we have some return, switch or computed
739 jump. In the 99% case, there should not have been a
740 fallthru edge. */
741 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
742 continue;
745 else
747 /* No fallthru implies a noreturn function with EH edges, or
748 something similarly bizarre. In any case, we don't need to
749 do anything. */
750 if (! e_fall)
751 continue;
753 /* If the fallthru block is still next, nothing to do. */
754 if (bb->aux == e_fall->dest)
755 continue;
757 /* A fallthru to exit block. */
758 if (e_fall->dest == EXIT_BLOCK_PTR)
759 continue;
762 /* We got here if we need to add a new jump insn. */
763 nb = force_nonfallthru (e_fall);
764 if (nb)
766 nb->il.rtl->visited = 1;
767 nb->aux = bb->aux;
768 bb->aux = nb;
769 /* Don't process this new block. */
770 bb = nb;
772 /* Make sure new bb is tagged for correct section (same as
773 fall-thru source, since you cannot fall-throu across
774 section boundaries). */
775 BB_COPY_PARTITION (e_fall->src, single_pred (bb));
776 if (flag_reorder_blocks_and_partition
777 && targetm.have_named_sections
778 && JUMP_P (BB_END (bb))
779 && !any_condjump_p (BB_END (bb))
780 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
781 REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
782 (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
786 /* Put basic_block_info in the new order. */
788 if (dump_file)
790 fprintf (dump_file, "Reordered sequence:\n");
791 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
793 bb = bb->aux, index++)
795 fprintf (dump_file, " %i ", index);
796 if (get_bb_original (bb))
797 fprintf (dump_file, "duplicate of %i ",
798 get_bb_original (bb)->index);
799 else if (forwarder_block_p (bb)
800 && !LABEL_P (BB_HEAD (bb)))
801 fprintf (dump_file, "compensation ");
802 else
803 fprintf (dump_file, "bb %i ", bb->index);
804 fprintf (dump_file, " [%i]\n", bb->frequency);
808 prev_bb = ENTRY_BLOCK_PTR;
809 bb = ENTRY_BLOCK_PTR->next_bb;
810 index = NUM_FIXED_BLOCKS;
812 for (; bb; prev_bb = bb, bb = bb->aux, index ++)
814 bb->index = index;
815 SET_BASIC_BLOCK (index, bb);
817 bb->prev_bb = prev_bb;
818 prev_bb->next_bb = bb;
820 prev_bb->next_bb = EXIT_BLOCK_PTR;
821 EXIT_BLOCK_PTR->prev_bb = prev_bb;
823 /* Annoying special case - jump around dead jumptables left in the code. */
824 FOR_EACH_BB (bb)
826 edge e;
827 edge_iterator ei;
829 FOR_EACH_EDGE (e, ei, bb->succs)
830 if (e->flags & EDGE_FALLTHRU)
831 break;
833 if (e && !can_fallthru (e->src, e->dest))
834 force_nonfallthru (e);
838 /* Perform sanity checks on the insn chain.
839 1. Check that next/prev pointers are consistent in both the forward and
840 reverse direction.
841 2. Count insns in chain, going both directions, and check if equal.
842 3. Check that get_last_insn () returns the actual end of chain. */
844 void
845 verify_insn_chain (void)
847 rtx x, prevx, nextx;
848 int insn_cnt1, insn_cnt2;
850 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
851 x != 0;
852 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
853 gcc_assert (PREV_INSN (x) == prevx);
855 gcc_assert (prevx == get_last_insn ());
857 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
858 x != 0;
859 nextx = x, insn_cnt2++, x = PREV_INSN (x))
860 gcc_assert (NEXT_INSN (x) == nextx);
862 gcc_assert (insn_cnt1 == insn_cnt2);
865 /* If we have assembler epilogues, the block falling through to exit must
866 be the last one in the reordered chain when we reach final. Ensure
867 that this condition is met. */
868 static void
869 fixup_fallthru_exit_predecessor (void)
871 edge e;
872 edge_iterator ei;
873 basic_block bb = NULL;
875 /* This transformation is not valid before reload, because we might
876 separate a call from the instruction that copies the return
877 value. */
878 gcc_assert (reload_completed);
880 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
881 if (e->flags & EDGE_FALLTHRU)
882 bb = e->src;
884 if (bb && bb->aux)
886 basic_block c = ENTRY_BLOCK_PTR->next_bb;
888 /* If the very first block is the one with the fall-through exit
889 edge, we have to split that block. */
890 if (c == bb)
892 bb = split_block (bb, NULL)->dest;
893 bb->aux = c->aux;
894 c->aux = bb;
895 bb->il.rtl->footer = c->il.rtl->footer;
896 c->il.rtl->footer = NULL;
899 while (c->aux != bb)
900 c = c->aux;
902 c->aux = bb->aux;
903 while (c->aux)
904 c = c->aux;
906 c->aux = bb;
907 bb->aux = NULL;
911 /* Return true in case it is possible to duplicate the basic block BB. */
913 /* We do not want to declare the function in a header file, since it should
914 only be used through the cfghooks interface, and we do not want to move
915 it to cfgrtl.c since it would require also moving quite a lot of related
916 code. */
917 extern bool cfg_layout_can_duplicate_bb_p (basic_block);
919 bool
920 cfg_layout_can_duplicate_bb_p (basic_block bb)
922 /* Do not attempt to duplicate tablejumps, as we need to unshare
923 the dispatch table. This is difficult to do, as the instructions
924 computing jump destination may be hoisted outside the basic block. */
925 if (tablejump_p (BB_END (bb), NULL, NULL))
926 return false;
928 /* Do not duplicate blocks containing insns that can't be copied. */
929 if (targetm.cannot_copy_insn_p)
931 rtx insn = BB_HEAD (bb);
932 while (1)
934 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
935 return false;
936 if (insn == BB_END (bb))
937 break;
938 insn = NEXT_INSN (insn);
942 return true;
946 duplicate_insn_chain (rtx from, rtx to)
948 rtx insn, last;
950 /* Avoid updating of boundaries of previous basic block. The
951 note will get removed from insn stream in fixup. */
952 last = emit_note (NOTE_INSN_DELETED);
954 /* Create copy at the end of INSN chain. The chain will
955 be reordered later. */
956 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
958 switch (GET_CODE (insn))
960 case INSN:
961 case CALL_INSN:
962 case JUMP_INSN:
963 /* Avoid copying of dispatch tables. We never duplicate
964 tablejumps, so this can hit only in case the table got
965 moved far from original jump. */
966 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
967 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
968 break;
969 emit_copy_of_insn_after (insn, get_last_insn ());
970 break;
972 case CODE_LABEL:
973 break;
975 case BARRIER:
976 emit_barrier ();
977 break;
979 case NOTE:
980 switch (NOTE_LINE_NUMBER (insn))
982 /* In case prologue is empty and function contain label
983 in first BB, we may want to copy the block. */
984 case NOTE_INSN_PROLOGUE_END:
986 case NOTE_INSN_DELETED:
987 case NOTE_INSN_DELETED_LABEL:
988 /* No problem to strip these. */
989 case NOTE_INSN_EPILOGUE_BEG:
990 case NOTE_INSN_FUNCTION_END:
991 /* Debug code expect these notes to exist just once.
992 Keep them in the master copy.
993 ??? It probably makes more sense to duplicate them for each
994 epilogue copy. */
995 case NOTE_INSN_FUNCTION_BEG:
996 /* There is always just single entry to function. */
997 case NOTE_INSN_BASIC_BLOCK:
998 break;
1000 case NOTE_INSN_REPEATED_LINE_NUMBER:
1001 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1002 emit_note_copy (insn);
1003 break;
1005 default:
1006 /* All other notes should have already been eliminated.
1008 gcc_assert (NOTE_LINE_NUMBER (insn) >= 0);
1010 /* It is possible that no_line_number is set and the note
1011 won't be emitted. */
1012 emit_note_copy (insn);
1014 break;
1015 default:
1016 gcc_unreachable ();
1019 insn = NEXT_INSN (last);
1020 delete_insn (last);
1021 return insn;
1023 /* Create a duplicate of the basic block BB. */
1025 /* We do not want to declare the function in a header file, since it should
1026 only be used through the cfghooks interface, and we do not want to move
1027 it to cfgrtl.c since it would require also moving quite a lot of related
1028 code. */
1029 extern basic_block cfg_layout_duplicate_bb (basic_block);
1031 basic_block
1032 cfg_layout_duplicate_bb (basic_block bb)
1034 rtx insn;
1035 basic_block new_bb;
1037 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1038 new_bb = create_basic_block (insn,
1039 insn ? get_last_insn () : NULL,
1040 EXIT_BLOCK_PTR->prev_bb);
1042 BB_COPY_PARTITION (new_bb, bb);
1043 if (bb->il.rtl->header)
1045 insn = bb->il.rtl->header;
1046 while (NEXT_INSN (insn))
1047 insn = NEXT_INSN (insn);
1048 insn = duplicate_insn_chain (bb->il.rtl->header, insn);
1049 if (insn)
1050 new_bb->il.rtl->header = unlink_insn_chain (insn, get_last_insn ());
1053 if (bb->il.rtl->footer)
1055 insn = bb->il.rtl->footer;
1056 while (NEXT_INSN (insn))
1057 insn = NEXT_INSN (insn);
1058 insn = duplicate_insn_chain (bb->il.rtl->footer, insn);
1059 if (insn)
1060 new_bb->il.rtl->footer = unlink_insn_chain (insn, get_last_insn ());
1063 if (bb->il.rtl->global_live_at_start)
1065 new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
1066 new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
1067 COPY_REG_SET (new_bb->il.rtl->global_live_at_start,
1068 bb->il.rtl->global_live_at_start);
1069 COPY_REG_SET (new_bb->il.rtl->global_live_at_end,
1070 bb->il.rtl->global_live_at_end);
1073 return new_bb;
1076 /* Main entry point to this module - initialize the datastructures for
1077 CFG layout changes. It keeps LOOPS up-to-date if not null.
1079 FLAGS is a set of additional flags to pass to cleanup_cfg(). It should
1080 include CLEANUP_UPDATE_LIFE if liveness information must be kept up
1081 to date. */
1083 void
1084 cfg_layout_initialize (unsigned int flags)
1086 initialize_original_copy_tables ();
1088 cfg_layout_rtl_register_cfg_hooks ();
1090 record_effective_endpoints ();
1092 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1095 /* Splits superblocks. */
1096 void
1097 break_superblocks (void)
1099 sbitmap superblocks;
1100 bool need = false;
1101 basic_block bb;
1103 superblocks = sbitmap_alloc (last_basic_block);
1104 sbitmap_zero (superblocks);
1106 FOR_EACH_BB (bb)
1107 if (bb->flags & BB_SUPERBLOCK)
1109 bb->flags &= ~BB_SUPERBLOCK;
1110 SET_BIT (superblocks, bb->index);
1111 need = true;
1114 if (need)
1116 rebuild_jump_labels (get_insns ());
1117 find_many_sub_basic_blocks (superblocks);
1120 free (superblocks);
1123 /* Finalize the changes: reorder insn list according to the sequence specified
1124 by aux pointers, enter compensation code, rebuild scope forest. */
1126 void
1127 cfg_layout_finalize (void)
1129 basic_block bb;
1131 #ifdef ENABLE_CHECKING
1132 verify_flow_info ();
1133 #endif
1134 rtl_register_cfg_hooks ();
1135 if (reload_completed
1136 #ifdef HAVE_epilogue
1137 && !HAVE_epilogue
1138 #endif
1140 fixup_fallthru_exit_predecessor ();
1141 fixup_reorder_chain ();
1143 #ifdef ENABLE_CHECKING
1144 verify_insn_chain ();
1145 #endif
1146 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1148 bb->il.rtl->header = bb->il.rtl->footer = NULL;
1149 bb->aux = NULL;
1150 bb->il.rtl->visited = 0;
1153 break_superblocks ();
1155 #ifdef ENABLE_CHECKING
1156 verify_flow_info ();
1157 #endif
1159 free_original_copy_tables ();
1162 /* Checks whether all N blocks in BBS array can be copied. */
1163 bool
1164 can_copy_bbs_p (basic_block *bbs, unsigned n)
1166 unsigned i;
1167 edge e;
1168 int ret = true;
1170 for (i = 0; i < n; i++)
1171 bbs[i]->flags |= BB_DUPLICATED;
1173 for (i = 0; i < n; i++)
1175 /* In case we should redirect abnormal edge during duplication, fail. */
1176 edge_iterator ei;
1177 FOR_EACH_EDGE (e, ei, bbs[i]->succs)
1178 if ((e->flags & EDGE_ABNORMAL)
1179 && (e->dest->flags & BB_DUPLICATED))
1181 ret = false;
1182 goto end;
1185 if (!can_duplicate_block_p (bbs[i]))
1187 ret = false;
1188 break;
1192 end:
1193 for (i = 0; i < n; i++)
1194 bbs[i]->flags &= ~BB_DUPLICATED;
1196 return ret;
1199 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1200 are placed into array NEW_BBS in the same order. Edges from basic blocks
1201 in BBS are also duplicated and copies of those of them
1202 that lead into BBS are redirected to appropriate newly created block. The
1203 function assigns bbs into loops (copy of basic block bb is assigned to
1204 bb->loop_father->copy loop, so this must be set up correctly in advance)
1205 and updates dominators locally (LOOPS structure that contains the information
1206 about dominators is passed to enable this).
1208 BASE is the superloop to that basic block belongs; if its header or latch
1209 is copied, we do not set the new blocks as header or latch.
1211 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1212 also in the same order.
1214 Newly created basic blocks are put after the basic block AFTER in the
1215 instruction stream, and the order of the blocks in BBS array is preserved. */
1217 void
1218 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1219 edge *edges, unsigned num_edges, edge *new_edges,
1220 struct loop *base, basic_block after)
1222 unsigned i, j;
1223 basic_block bb, new_bb, dom_bb;
1224 edge e;
1226 /* Duplicate bbs, update dominators, assign bbs to loops. */
1227 for (i = 0; i < n; i++)
1229 /* Duplicate. */
1230 bb = bbs[i];
1231 new_bb = new_bbs[i] = duplicate_block (bb, NULL, after);
1232 after = new_bb;
1233 bb->flags |= BB_DUPLICATED;
1234 /* Add to loop. */
1235 add_bb_to_loop (new_bb, bb->loop_father->copy);
1236 /* Possibly set header. */
1237 if (bb->loop_father->header == bb && bb->loop_father != base)
1238 new_bb->loop_father->header = new_bb;
1239 /* Or latch. */
1240 if (bb->loop_father->latch == bb && bb->loop_father != base)
1241 new_bb->loop_father->latch = new_bb;
1244 /* Set dominators. */
1245 for (i = 0; i < n; i++)
1247 bb = bbs[i];
1248 new_bb = new_bbs[i];
1250 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1251 if (dom_bb->flags & BB_DUPLICATED)
1253 dom_bb = get_bb_copy (dom_bb);
1254 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1258 /* Redirect edges. */
1259 for (j = 0; j < num_edges; j++)
1260 new_edges[j] = NULL;
1261 for (i = 0; i < n; i++)
1263 edge_iterator ei;
1264 new_bb = new_bbs[i];
1265 bb = bbs[i];
1267 FOR_EACH_EDGE (e, ei, new_bb->succs)
1269 for (j = 0; j < num_edges; j++)
1270 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1271 new_edges[j] = e;
1273 if (!(e->dest->flags & BB_DUPLICATED))
1274 continue;
1275 redirect_edge_and_branch_force (e, get_bb_copy (e->dest));
1279 /* Clear information about duplicates. */
1280 for (i = 0; i < n; i++)
1281 bbs[i]->flags &= ~BB_DUPLICATED;
1284 #include "gt-cfglayout.h"