* gcc.dg/const-elim-1.c: xfail for xtensa.
[official-gcc.git] / gcc / cfglayout.c
blob25349658355d4218e5e94cabb87ed81615689944
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "insn-config.h"
30 #include "output.h"
31 #include "function.h"
32 #include "obstack.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
36 #include "ggc.h"
37 #include "alloc-pool.h"
39 /* The contents of the current function definition are allocated
40 in this obstack, and all are freed at the end of the function. */
41 extern struct obstack flow_obstack;
43 alloc_pool cfg_layout_pool;
45 /* Holds the interesting trailing notes for the function. */
46 rtx cfg_layout_function_footer, cfg_layout_function_header;
48 static rtx skip_insns_after_block (basic_block);
49 static void record_effective_endpoints (void);
50 static rtx label_for_bb (basic_block);
51 static void fixup_reorder_chain (void);
53 static void set_block_levels (tree, int);
54 static void change_scope (rtx, tree, tree);
56 void verify_insn_chain (void);
57 static void fixup_fallthru_exit_predecessor (void);
58 static rtx duplicate_insn_chain (rtx, rtx);
59 static tree insn_scope (rtx);
61 rtx
62 unlink_insn_chain (rtx first, rtx last)
64 rtx prevfirst = PREV_INSN (first);
65 rtx nextlast = NEXT_INSN (last);
67 PREV_INSN (first) = NULL;
68 NEXT_INSN (last) = NULL;
69 if (prevfirst)
70 NEXT_INSN (prevfirst) = nextlast;
71 if (nextlast)
72 PREV_INSN (nextlast) = prevfirst;
73 else
74 set_last_insn (prevfirst);
75 if (!prevfirst)
76 set_first_insn (nextlast);
77 return first;
80 /* Skip over inter-block insns occurring after BB which are typically
81 associated with BB (e.g., barriers). If there are any such insns,
82 we return the last one. Otherwise, we return the end of BB. */
84 static rtx
85 skip_insns_after_block (basic_block bb)
87 rtx insn, last_insn, next_head, prev;
89 next_head = NULL_RTX;
90 if (bb->next_bb != EXIT_BLOCK_PTR)
91 next_head = BB_HEAD (bb->next_bb);
93 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
95 if (insn == next_head)
96 break;
98 switch (GET_CODE (insn))
100 case BARRIER:
101 last_insn = insn;
102 continue;
104 case NOTE:
105 switch (NOTE_LINE_NUMBER (insn))
107 case NOTE_INSN_LOOP_END:
108 case NOTE_INSN_BLOCK_END:
109 last_insn = insn;
110 continue;
111 case NOTE_INSN_DELETED:
112 case NOTE_INSN_DELETED_LABEL:
113 continue;
115 default:
116 continue;
117 break;
119 break;
121 case CODE_LABEL:
122 if (NEXT_INSN (insn)
123 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
124 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
125 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
127 insn = NEXT_INSN (insn);
128 last_insn = insn;
129 continue;
131 break;
133 default:
134 break;
137 break;
140 /* It is possible to hit contradictory sequence. For instance:
142 jump_insn
143 NOTE_INSN_LOOP_BEG
144 barrier
146 Where barrier belongs to jump_insn, but the note does not. This can be
147 created by removing the basic block originally following
148 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
150 for (insn = last_insn; insn != BB_END (bb); insn = prev)
152 prev = PREV_INSN (insn);
153 if (GET_CODE (insn) == NOTE)
154 switch (NOTE_LINE_NUMBER (insn))
156 case NOTE_INSN_LOOP_END:
157 case NOTE_INSN_BLOCK_END:
158 case NOTE_INSN_DELETED:
159 case NOTE_INSN_DELETED_LABEL:
160 continue;
161 default:
162 reorder_insns (insn, insn, last_insn);
166 return last_insn;
169 /* Locate or create a label for a given basic block. */
171 static rtx
172 label_for_bb (basic_block bb)
174 rtx label = BB_HEAD (bb);
176 if (GET_CODE (label) != CODE_LABEL)
178 if (dump_file)
179 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
181 label = block_label (bb);
184 return label;
187 /* Locate the effective beginning and end of the insn chain for each
188 block, as defined by skip_insns_after_block above. */
190 static void
191 record_effective_endpoints (void)
193 rtx next_insn;
194 basic_block bb;
195 rtx insn;
197 for (insn = get_insns ();
198 insn
199 && GET_CODE (insn) == NOTE
200 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
201 insn = NEXT_INSN (insn))
202 continue;
203 if (!insn)
204 abort (); /* No basic blocks at all? */
205 if (PREV_INSN (insn))
206 cfg_layout_function_header =
207 unlink_insn_chain (get_insns (), PREV_INSN (insn));
208 else
209 cfg_layout_function_header = NULL_RTX;
211 next_insn = get_insns ();
212 FOR_EACH_BB (bb)
214 rtx end;
216 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
217 bb->rbi->header = unlink_insn_chain (next_insn,
218 PREV_INSN (BB_HEAD (bb)));
219 end = skip_insns_after_block (bb);
220 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
221 bb->rbi->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
222 next_insn = NEXT_INSN (BB_END (bb));
225 cfg_layout_function_footer = next_insn;
226 if (cfg_layout_function_footer)
227 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
230 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
231 numbers and files. In order to be GGC friendly we need to use separate
232 varrays. This also slightly improve the memory locality in binary search.
233 The _locs array contains locators where the given property change. The
234 block_locators_blocks contains the scope block that is used for all insn
235 locator greater than corresponding block_locators_locs value and smaller
236 than the following one. Similarly for the other properties. */
237 static GTY(()) varray_type block_locators_locs;
238 static GTY(()) varray_type block_locators_blocks;
239 static GTY(()) varray_type line_locators_locs;
240 static GTY(()) varray_type line_locators_lines;
241 static GTY(()) varray_type file_locators_locs;
242 static GTY(()) varray_type file_locators_files;
243 int prologue_locator;
244 int epilogue_locator;
246 /* During the RTL expansion the lexical blocks and line numbers are
247 represented via INSN_NOTEs. Replace them by representation using
248 INSN_LOCATORs. */
250 void
251 insn_locators_initialize (void)
253 tree block = NULL;
254 tree last_block = NULL;
255 rtx insn, next;
256 int loc = 0;
257 int line_number = 0, last_line_number = 0;
258 char *file_name = NULL, *last_file_name = NULL;
260 prologue_locator = epilogue_locator = 0;
262 VARRAY_INT_INIT (block_locators_locs, 32, "block_locators_locs");
263 VARRAY_TREE_INIT (block_locators_blocks, 32, "block_locators_blocks");
264 VARRAY_INT_INIT (line_locators_locs, 32, "line_locators_locs");
265 VARRAY_INT_INIT (line_locators_lines, 32, "line_locators_lines");
266 VARRAY_INT_INIT (file_locators_locs, 32, "file_locators_locs");
267 VARRAY_CHAR_PTR_INIT (file_locators_files, 32, "file_locators_files");
269 for (insn = get_insns (); insn; insn = next)
271 next = NEXT_INSN (insn);
273 if ((active_insn_p (insn)
274 && GET_CODE (PATTERN (insn)) != ADDR_VEC
275 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
276 || !NEXT_INSN (insn)
277 || (!prologue_locator && file_name))
279 if (last_block != block)
281 loc++;
282 VARRAY_PUSH_INT (block_locators_locs, loc);
283 VARRAY_PUSH_TREE (block_locators_blocks, block);
284 last_block = block;
286 if (last_line_number != line_number)
288 loc++;
289 VARRAY_PUSH_INT (line_locators_locs, loc);
290 VARRAY_PUSH_INT (line_locators_lines, line_number);
291 last_line_number = line_number;
293 if (last_file_name != file_name)
295 loc++;
296 VARRAY_PUSH_INT (file_locators_locs, loc);
297 VARRAY_PUSH_CHAR_PTR (file_locators_files, file_name);
298 last_file_name = file_name;
301 if (!prologue_locator && file_name)
302 prologue_locator = loc;
303 if (!NEXT_INSN (insn))
304 epilogue_locator = loc;
305 if (active_insn_p (insn))
306 INSN_LOCATOR (insn) = loc;
307 else if (GET_CODE (insn) == NOTE)
309 switch (NOTE_LINE_NUMBER (insn))
311 case NOTE_INSN_BLOCK_BEG:
312 block = NOTE_BLOCK (insn);
313 delete_insn (insn);
314 break;
315 case NOTE_INSN_BLOCK_END:
316 block = BLOCK_SUPERCONTEXT (block);
317 if (block && TREE_CODE (block) == FUNCTION_DECL)
318 block = 0;
319 delete_insn (insn);
320 break;
321 default:
322 if (NOTE_LINE_NUMBER (insn) > 0)
324 line_number = NOTE_LINE_NUMBER (insn);
325 file_name = (char *)NOTE_SOURCE_FILE (insn);
327 break;
332 /* Tag the blocks with a depth number so that change_scope can find
333 the common parent easily. */
334 set_block_levels (DECL_INITIAL (cfun->decl), 0);
337 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
338 found in the block tree. */
340 static void
341 set_block_levels (tree block, int level)
343 while (block)
345 BLOCK_NUMBER (block) = level;
346 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
347 block = BLOCK_CHAIN (block);
351 /* Return sope resulting from combination of S1 and S2. */
352 tree
353 choose_inner_scope (tree s1, tree s2)
355 if (!s1)
356 return s2;
357 if (!s2)
358 return s1;
359 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
360 return s1;
361 return s2;
364 /* Emit lexical block notes needed to change scope from S1 to S2. */
366 static void
367 change_scope (rtx orig_insn, tree s1, tree s2)
369 rtx insn = orig_insn;
370 tree com = NULL_TREE;
371 tree ts1 = s1, ts2 = s2;
372 tree s;
374 while (ts1 != ts2)
376 if (ts1 == NULL || ts2 == NULL)
377 abort ();
378 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
379 ts1 = BLOCK_SUPERCONTEXT (ts1);
380 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
381 ts2 = BLOCK_SUPERCONTEXT (ts2);
382 else
384 ts1 = BLOCK_SUPERCONTEXT (ts1);
385 ts2 = BLOCK_SUPERCONTEXT (ts2);
388 com = ts1;
390 /* Close scopes. */
391 s = s1;
392 while (s != com)
394 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
395 NOTE_BLOCK (note) = s;
396 s = BLOCK_SUPERCONTEXT (s);
399 /* Open scopes. */
400 s = s2;
401 while (s != com)
403 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
404 NOTE_BLOCK (insn) = s;
405 s = BLOCK_SUPERCONTEXT (s);
409 /* Return lexical scope block insn belong to. */
410 static tree
411 insn_scope (rtx insn)
413 int max = VARRAY_ACTIVE_SIZE (block_locators_locs);
414 int min = 0;
415 int loc = INSN_LOCATOR (insn);
417 /* When block_locators_locs was initialized, the pro- and epilogue
418 insns didn't exist yet and can therefore not be found this way.
419 But we know that they belong to the outer most block of the
420 current function.
421 Without this test, the prologue would be put inside the block of
422 the first valid instruction in the function and when that first
423 insn is part of an inlined function then the low_pc of that
424 inlined function is messed up. Likewise for the epilogue and
425 the last valid instruction. */
426 if (loc == prologue_locator || loc == epilogue_locator)
427 return DECL_INITIAL (cfun->decl);
429 if (!max || !loc)
430 return NULL;
431 while (1)
433 int pos = (min + max) / 2;
434 int tmp = VARRAY_INT (block_locators_locs, pos);
436 if (tmp <= loc && min != pos)
437 min = pos;
438 else if (tmp > loc && max != pos)
439 max = pos;
440 else
442 min = pos;
443 break;
446 return VARRAY_TREE (block_locators_blocks, min);
449 /* Return line number of the statement specified by the locator. */
451 locator_line (int loc)
453 int max = VARRAY_ACTIVE_SIZE (line_locators_locs);
454 int min = 0;
456 if (!max || !loc)
457 return 0;
458 while (1)
460 int pos = (min + max) / 2;
461 int tmp = VARRAY_INT (line_locators_locs, pos);
463 if (tmp <= loc && min != pos)
464 min = pos;
465 else if (tmp > loc && max != pos)
466 max = pos;
467 else
469 min = pos;
470 break;
473 return VARRAY_INT (line_locators_lines, min);
476 /* Return line number of the statement that produced this insn. */
478 insn_line (rtx insn)
480 return locator_line (INSN_LOCATOR (insn));
483 /* Return source file of the statement specified by LOC. */
484 const char *
485 locator_file (int loc)
487 int max = VARRAY_ACTIVE_SIZE (file_locators_locs);
488 int min = 0;
490 if (!max || !loc)
491 return NULL;
492 while (1)
494 int pos = (min + max) / 2;
495 int tmp = VARRAY_INT (file_locators_locs, pos);
497 if (tmp <= loc && min != pos)
498 min = pos;
499 else if (tmp > loc && max != pos)
500 max = pos;
501 else
503 min = pos;
504 break;
507 return VARRAY_CHAR_PTR (file_locators_files, min);
510 /* Return source file of the statement that produced this insn. */
511 const char *
512 insn_file (rtx insn)
514 return locator_file (INSN_LOCATOR (insn));
517 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
518 on the scope tree and the newly reordered instructions. */
520 void
521 reemit_insn_block_notes (void)
523 tree cur_block = DECL_INITIAL (cfun->decl);
524 rtx insn, note;
526 insn = get_insns ();
527 if (!active_insn_p (insn))
528 insn = next_active_insn (insn);
529 for (; insn; insn = next_active_insn (insn))
531 tree this_block;
533 this_block = insn_scope (insn);
534 /* For sequences compute scope resulting from merging all scopes
535 of instructions nested inside. */
536 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
538 int i;
539 rtx body = PATTERN (insn);
541 this_block = NULL;
542 for (i = 0; i < XVECLEN (body, 0); i++)
543 this_block = choose_inner_scope (this_block,
544 insn_scope (XVECEXP (body, 0, i)));
546 if (! this_block)
547 continue;
549 if (this_block != cur_block)
551 change_scope (insn, cur_block, this_block);
552 cur_block = this_block;
556 /* change_scope emits before the insn, not after. */
557 note = emit_note (NOTE_INSN_DELETED);
558 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
559 delete_insn (note);
561 reorder_blocks ();
564 /* Given a reorder chain, rearrange the code to match. */
566 static void
567 fixup_reorder_chain (void)
569 basic_block bb, prev_bb;
570 int index;
571 rtx insn = NULL;
573 if (cfg_layout_function_header)
575 set_first_insn (cfg_layout_function_header);
576 insn = cfg_layout_function_header;
577 while (NEXT_INSN (insn))
578 insn = NEXT_INSN (insn);
581 /* First do the bulk reordering -- rechain the blocks without regard to
582 the needed changes to jumps and labels. */
584 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
585 bb != 0;
586 bb = bb->rbi->next, index++)
588 if (bb->rbi->header)
590 if (insn)
591 NEXT_INSN (insn) = bb->rbi->header;
592 else
593 set_first_insn (bb->rbi->header);
594 PREV_INSN (bb->rbi->header) = insn;
595 insn = bb->rbi->header;
596 while (NEXT_INSN (insn))
597 insn = NEXT_INSN (insn);
599 if (insn)
600 NEXT_INSN (insn) = BB_HEAD (bb);
601 else
602 set_first_insn (BB_HEAD (bb));
603 PREV_INSN (BB_HEAD (bb)) = insn;
604 insn = BB_END (bb);
605 if (bb->rbi->footer)
607 NEXT_INSN (insn) = bb->rbi->footer;
608 PREV_INSN (bb->rbi->footer) = insn;
609 while (NEXT_INSN (insn))
610 insn = NEXT_INSN (insn);
614 if (index != n_basic_blocks)
615 abort ();
617 NEXT_INSN (insn) = cfg_layout_function_footer;
618 if (cfg_layout_function_footer)
619 PREV_INSN (cfg_layout_function_footer) = insn;
621 while (NEXT_INSN (insn))
622 insn = NEXT_INSN (insn);
624 set_last_insn (insn);
625 #ifdef ENABLE_CHECKING
626 verify_insn_chain ();
627 #endif
628 delete_dead_jumptables ();
630 /* Now add jumps and labels as needed to match the blocks new
631 outgoing edges. */
633 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = bb->rbi->next)
635 edge e_fall, e_taken, e;
636 rtx bb_end_insn;
637 basic_block nb;
639 if (bb->succ == NULL)
640 continue;
642 /* Find the old fallthru edge, and another non-EH edge for
643 a taken jump. */
644 e_taken = e_fall = NULL;
645 for (e = bb->succ; e ; e = e->succ_next)
646 if (e->flags & EDGE_FALLTHRU)
647 e_fall = e;
648 else if (! (e->flags & EDGE_EH))
649 e_taken = e;
651 bb_end_insn = BB_END (bb);
652 if (GET_CODE (bb_end_insn) == JUMP_INSN)
654 if (any_condjump_p (bb_end_insn))
656 /* If the old fallthru is still next, nothing to do. */
657 if (bb->rbi->next == e_fall->dest
658 || (!bb->rbi->next
659 && e_fall->dest == EXIT_BLOCK_PTR))
660 continue;
662 /* The degenerated case of conditional jump jumping to the next
663 instruction can happen on target having jumps with side
664 effects.
666 Create temporarily the duplicated edge representing branch.
667 It will get unidentified by force_nonfallthru_and_redirect
668 that would otherwise get confused by fallthru edge not pointing
669 to the next basic block. */
670 if (!e_taken)
672 rtx note;
673 edge e_fake;
675 e_fake = unchecked_make_edge (bb, e_fall->dest, 0);
677 if (!redirect_jump (BB_END (bb), block_label (bb), 0))
678 abort ();
679 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
680 if (note)
682 int prob = INTVAL (XEXP (note, 0));
684 e_fake->probability = prob;
685 e_fake->count = e_fall->count * prob / REG_BR_PROB_BASE;
686 e_fall->probability -= e_fall->probability;
687 e_fall->count -= e_fake->count;
688 if (e_fall->probability < 0)
689 e_fall->probability = 0;
690 if (e_fall->count < 0)
691 e_fall->count = 0;
694 /* There is one special case: if *neither* block is next,
695 such as happens at the very end of a function, then we'll
696 need to add a new unconditional jump. Choose the taken
697 edge based on known or assumed probability. */
698 else if (bb->rbi->next != e_taken->dest)
700 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
702 if (note
703 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
704 && invert_jump (bb_end_insn,
705 label_for_bb (e_fall->dest), 0))
707 e_fall->flags &= ~EDGE_FALLTHRU;
708 e_taken->flags |= EDGE_FALLTHRU;
709 update_br_prob_note (bb);
710 e = e_fall, e_fall = e_taken, e_taken = e;
714 /* Otherwise we can try to invert the jump. This will
715 basically never fail, however, keep up the pretense. */
716 else if (invert_jump (bb_end_insn,
717 label_for_bb (e_fall->dest), 0))
719 e_fall->flags &= ~EDGE_FALLTHRU;
720 e_taken->flags |= EDGE_FALLTHRU;
721 update_br_prob_note (bb);
722 continue;
725 else if (returnjump_p (bb_end_insn))
726 continue;
727 else
729 /* Otherwise we have some switch or computed jump. In the
730 99% case, there should not have been a fallthru edge. */
731 if (! e_fall)
732 continue;
734 #ifdef CASE_DROPS_THROUGH
735 /* Except for VAX. Since we didn't have predication for the
736 tablejump, the fallthru block should not have moved. */
737 if (bb->rbi->next == e_fall->dest)
738 continue;
739 bb_end_insn = skip_insns_after_block (bb);
740 #else
741 abort ();
742 #endif
745 else
747 /* No fallthru implies a noreturn function with EH edges, or
748 something similarly bizarre. In any case, we don't need to
749 do anything. */
750 if (! e_fall)
751 continue;
753 /* If the fallthru block is still next, nothing to do. */
754 if (bb->rbi->next == e_fall->dest)
755 continue;
757 /* A fallthru to exit block. */
758 if (!bb->rbi->next && e_fall->dest == EXIT_BLOCK_PTR)
759 continue;
762 /* We got here if we need to add a new jump insn. */
763 nb = force_nonfallthru (e_fall);
764 if (nb)
766 cfg_layout_initialize_rbi (nb);
767 nb->rbi->visited = 1;
768 nb->rbi->next = bb->rbi->next;
769 bb->rbi->next = nb;
770 /* Don't process this new block. */
771 bb = nb;
775 /* Put basic_block_info in the new order. */
777 if (dump_file)
779 fprintf (dump_file, "Reordered sequence:\n");
780 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
782 bb = bb->rbi->next, index++)
784 fprintf (dump_file, " %i ", index);
785 if (bb->rbi->original)
786 fprintf (dump_file, "duplicate of %i ",
787 bb->rbi->original->index);
788 else if (forwarder_block_p (bb)
789 && GET_CODE (BB_HEAD (bb)) != CODE_LABEL)
790 fprintf (dump_file, "compensation ");
791 else
792 fprintf (dump_file, "bb %i ", bb->index);
793 fprintf (dump_file, " [%i]\n", bb->frequency);
797 prev_bb = ENTRY_BLOCK_PTR;
798 bb = ENTRY_BLOCK_PTR->next_bb;
799 index = 0;
801 for (; bb; prev_bb = bb, bb = bb->rbi->next, index ++)
803 bb->index = index;
804 BASIC_BLOCK (index) = bb;
806 bb->prev_bb = prev_bb;
807 prev_bb->next_bb = bb;
809 prev_bb->next_bb = EXIT_BLOCK_PTR;
810 EXIT_BLOCK_PTR->prev_bb = prev_bb;
812 /* Annoying special case - jump around dead jumptables left in the code. */
813 FOR_EACH_BB (bb)
815 edge e;
816 for (e = bb->succ; e && !(e->flags & EDGE_FALLTHRU); e = e->succ_next)
817 continue;
818 if (e && !can_fallthru (e->src, e->dest))
819 force_nonfallthru (e);
823 /* Perform sanity checks on the insn chain.
824 1. Check that next/prev pointers are consistent in both the forward and
825 reverse direction.
826 2. Count insns in chain, going both directions, and check if equal.
827 3. Check that get_last_insn () returns the actual end of chain. */
829 void
830 verify_insn_chain (void)
832 rtx x, prevx, nextx;
833 int insn_cnt1, insn_cnt2;
835 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
836 x != 0;
837 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
838 if (PREV_INSN (x) != prevx)
839 abort ();
841 if (prevx != get_last_insn ())
842 abort ();
844 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
845 x != 0;
846 nextx = x, insn_cnt2++, x = PREV_INSN (x))
847 if (NEXT_INSN (x) != nextx)
848 abort ();
850 if (insn_cnt1 != insn_cnt2)
851 abort ();
854 /* The block falling through to exit must be the last one in the
855 reordered chain. Ensure that this condition is met. */
856 static void
857 fixup_fallthru_exit_predecessor (void)
859 edge e;
860 basic_block bb = NULL;
862 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
863 if (e->flags & EDGE_FALLTHRU)
864 bb = e->src;
866 if (bb && bb->rbi->next)
868 basic_block c = ENTRY_BLOCK_PTR->next_bb;
870 while (c->rbi->next != bb)
871 c = c->rbi->next;
873 c->rbi->next = bb->rbi->next;
874 while (c->rbi->next)
875 c = c->rbi->next;
877 c->rbi->next = bb;
878 bb->rbi->next = NULL;
882 /* Return true in case it is possible to duplicate the basic block BB. */
884 bool
885 cfg_layout_can_duplicate_bb_p (basic_block bb)
887 edge s;
889 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR)
890 return false;
892 /* Duplicating fallthru block to exit would require adding a jump
893 and splitting the real last BB. */
894 for (s = bb->succ; s; s = s->succ_next)
895 if (s->dest == EXIT_BLOCK_PTR && s->flags & EDGE_FALLTHRU)
896 return false;
898 /* Do not attempt to duplicate tablejumps, as we need to unshare
899 the dispatch table. This is difficult to do, as the instructions
900 computing jump destination may be hoisted outside the basic block. */
901 if (tablejump_p (BB_END (bb), NULL, NULL))
902 return false;
904 /* Do not duplicate blocks containing insns that can't be copied. */
905 if (targetm.cannot_copy_insn_p)
907 rtx insn = BB_HEAD (bb);
908 while (1)
910 if (INSN_P (insn) && (*targetm.cannot_copy_insn_p) (insn))
911 return false;
912 if (insn == BB_END (bb))
913 break;
914 insn = NEXT_INSN (insn);
918 return true;
921 static rtx
922 duplicate_insn_chain (rtx from, rtx to)
924 rtx insn, last;
926 /* Avoid updating of boundaries of previous basic block. The
927 note will get removed from insn stream in fixup. */
928 last = emit_note (NOTE_INSN_DELETED);
930 /* Create copy at the end of INSN chain. The chain will
931 be reordered later. */
932 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
934 switch (GET_CODE (insn))
936 case INSN:
937 case CALL_INSN:
938 case JUMP_INSN:
939 /* Avoid copying of dispatch tables. We never duplicate
940 tablejumps, so this can hit only in case the table got
941 moved far from original jump. */
942 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
943 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
944 break;
945 emit_copy_of_insn_after (insn, get_last_insn ());
946 break;
948 case CODE_LABEL:
949 break;
951 case BARRIER:
952 emit_barrier ();
953 break;
955 case NOTE:
956 switch (NOTE_LINE_NUMBER (insn))
958 /* In case prologue is empty and function contain label
959 in first BB, we may want to copy the block. */
960 case NOTE_INSN_PROLOGUE_END:
962 case NOTE_INSN_LOOP_VTOP:
963 case NOTE_INSN_LOOP_CONT:
964 case NOTE_INSN_LOOP_BEG:
965 case NOTE_INSN_LOOP_END:
966 /* Strip down the loop notes - we don't really want to keep
967 them consistent in loop copies. */
968 case NOTE_INSN_DELETED:
969 case NOTE_INSN_DELETED_LABEL:
970 /* No problem to strip these. */
971 case NOTE_INSN_EPILOGUE_BEG:
972 case NOTE_INSN_FUNCTION_END:
973 /* Debug code expect these notes to exist just once.
974 Keep them in the master copy.
975 ??? It probably makes more sense to duplicate them for each
976 epilogue copy. */
977 case NOTE_INSN_FUNCTION_BEG:
978 /* There is always just single entry to function. */
979 case NOTE_INSN_BASIC_BLOCK:
980 break;
982 /* There is no purpose to duplicate prologue. */
983 case NOTE_INSN_BLOCK_BEG:
984 case NOTE_INSN_BLOCK_END:
985 /* The BLOCK_BEG/BLOCK_END notes should be eliminated when BB
986 reordering is in the progress. */
987 case NOTE_INSN_EH_REGION_BEG:
988 case NOTE_INSN_EH_REGION_END:
989 /* Should never exist at BB duplication time. */
990 abort ();
991 break;
992 case NOTE_INSN_REPEATED_LINE_NUMBER:
993 emit_note_copy (insn);
994 break;
996 default:
997 if (NOTE_LINE_NUMBER (insn) < 0)
998 abort ();
999 /* It is possible that no_line_number is set and the note
1000 won't be emitted. */
1001 emit_note_copy (insn);
1003 break;
1004 default:
1005 abort ();
1008 insn = NEXT_INSN (last);
1009 delete_insn (last);
1010 return insn;
1012 /* Create a duplicate of the basic block BB and redirect edge E into it.
1013 If E is not specified, BB is just copied, but updating the frequencies
1014 etc. is left to the caller. */
1016 basic_block
1017 cfg_layout_duplicate_bb (basic_block bb, edge e)
1019 rtx insn;
1020 edge s, n;
1021 basic_block new_bb;
1022 gcov_type new_count = e ? e->count : 0;
1024 if (bb->count < new_count)
1025 new_count = bb->count;
1026 if (!bb->pred)
1027 abort ();
1028 #ifdef ENABLE_CHECKING
1029 if (!cfg_layout_can_duplicate_bb_p (bb))
1030 abort ();
1031 #endif
1033 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1034 new_bb = create_basic_block (insn,
1035 insn ? get_last_insn () : NULL,
1036 EXIT_BLOCK_PTR->prev_bb);
1038 if (bb->rbi->header)
1040 insn = bb->rbi->header;
1041 while (NEXT_INSN (insn))
1042 insn = NEXT_INSN (insn);
1043 insn = duplicate_insn_chain (bb->rbi->header, insn);
1044 if (insn)
1045 new_bb->rbi->header = unlink_insn_chain (insn, get_last_insn ());
1048 if (bb->rbi->footer)
1050 insn = bb->rbi->footer;
1051 while (NEXT_INSN (insn))
1052 insn = NEXT_INSN (insn);
1053 insn = duplicate_insn_chain (bb->rbi->footer, insn);
1054 if (insn)
1055 new_bb->rbi->footer = unlink_insn_chain (insn, get_last_insn ());
1058 if (bb->global_live_at_start)
1060 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1061 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1062 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
1063 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1066 new_bb->loop_depth = bb->loop_depth;
1067 new_bb->flags = bb->flags;
1068 for (s = bb->succ; s; s = s->succ_next)
1070 /* Since we are creating edges from a new block to successors
1071 of another block (which therefore are known to be disjoint), there
1072 is no need to actually check for duplicated edges. */
1073 n = unchecked_make_edge (new_bb, s->dest, s->flags);
1074 n->probability = s->probability;
1075 if (e && bb->count)
1077 /* Take care for overflows! */
1078 n->count = s->count * (new_count * 10000 / bb->count) / 10000;
1079 s->count -= n->count;
1081 else
1082 n->count = s->count;
1083 n->aux = s->aux;
1086 if (e)
1088 new_bb->count = new_count;
1089 bb->count -= new_count;
1091 new_bb->frequency = EDGE_FREQUENCY (e);
1092 bb->frequency -= EDGE_FREQUENCY (e);
1094 redirect_edge_and_branch_force (e, new_bb);
1096 if (bb->count < 0)
1097 bb->count = 0;
1098 if (bb->frequency < 0)
1099 bb->frequency = 0;
1101 else
1103 new_bb->count = bb->count;
1104 new_bb->frequency = bb->frequency;
1107 new_bb->rbi->original = bb;
1108 bb->rbi->copy = new_bb;
1110 return new_bb;
1113 void
1114 cfg_layout_initialize_rbi (basic_block bb)
1116 if (bb->rbi)
1117 abort ();
1118 bb->rbi = pool_alloc (cfg_layout_pool);
1119 memset (bb->rbi, 0, sizeof (struct reorder_block_def));
1122 /* Main entry point to this module - initialize the datastructures for
1123 CFG layout changes. It keeps LOOPS up-to-date if not null. */
1125 void
1126 cfg_layout_initialize (void)
1128 basic_block bb;
1130 /* Our algorithm depends on fact that there are now dead jumptables
1131 around the code. */
1132 cfg_layout_pool =
1133 create_alloc_pool ("cfg layout pool", sizeof (struct reorder_block_def),
1134 n_basic_blocks + 2);
1135 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1136 cfg_layout_initialize_rbi (bb);
1138 cfg_layout_rtl_register_cfg_hooks ();
1140 record_effective_endpoints ();
1142 cleanup_cfg (CLEANUP_CFGLAYOUT);
1145 /* Splits superblocks. */
1146 void
1147 break_superblocks (void)
1149 sbitmap superblocks;
1150 bool need = false;
1151 basic_block bb;
1153 superblocks = sbitmap_alloc (last_basic_block);
1154 sbitmap_zero (superblocks);
1156 FOR_EACH_BB (bb)
1157 if (bb->flags & BB_SUPERBLOCK)
1159 bb->flags &= ~BB_SUPERBLOCK;
1160 SET_BIT (superblocks, bb->index);
1161 need = true;
1164 if (need)
1166 rebuild_jump_labels (get_insns ());
1167 find_many_sub_basic_blocks (superblocks);
1170 free (superblocks);
1173 /* Finalize the changes: reorder insn list according to the sequence, enter
1174 compensation code, rebuild scope forest. */
1176 void
1177 cfg_layout_finalize (void)
1179 basic_block bb;
1181 #ifdef ENABLE_CHECKING
1182 verify_flow_info ();
1183 #endif
1184 rtl_register_cfg_hooks ();
1185 fixup_fallthru_exit_predecessor ();
1186 fixup_reorder_chain ();
1188 #ifdef ENABLE_CHECKING
1189 verify_insn_chain ();
1190 #endif
1192 free_alloc_pool (cfg_layout_pool);
1193 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1194 bb->rbi = NULL;
1196 break_superblocks ();
1198 #ifdef ENABLE_CHECKING
1199 verify_flow_info ();
1200 #endif
1203 /* Checks whether all N blocks in BBS array can be copied. */
1204 bool
1205 can_copy_bbs_p (basic_block *bbs, unsigned n)
1207 unsigned i;
1208 edge e;
1209 int ret = true;
1211 for (i = 0; i < n; i++)
1212 bbs[i]->rbi->duplicated = 1;
1214 for (i = 0; i < n; i++)
1216 /* In case we should redirect abnormal edge during duplication, fail. */
1217 for (e = bbs[i]->succ; e; e = e->succ_next)
1218 if ((e->flags & EDGE_ABNORMAL)
1219 && e->dest->rbi->duplicated)
1221 ret = false;
1222 goto end;
1225 if (!cfg_layout_can_duplicate_bb_p (bbs[i]))
1227 ret = false;
1228 break;
1232 end:
1233 for (i = 0; i < n; i++)
1234 bbs[i]->rbi->duplicated = 0;
1236 return ret;
1239 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1240 are placed into array NEW_BBS in the same order. Edges from basic blocks
1241 in BBS are also duplicated and copies of those of them
1242 that lead into BBS are redirected to appropriate newly created block. The
1243 function assigns bbs into loops (copy of basic block bb is assigned to
1244 bb->loop_father->copy loop, so this must be set up correctly in advance)
1245 and updates dominators locally (LOOPS structure that contains the information
1246 about dominators is passed to enable this).
1248 BASE is the superloop to that basic block belongs; if its header or latch
1249 is copied, we do not set the new blocks as header or latch.
1251 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1252 also in the same order. */
1254 void
1255 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1256 edge *edges, unsigned n_edges, edge *new_edges,
1257 struct loop *base)
1259 unsigned i, j;
1260 basic_block bb, new_bb, dom_bb;
1261 edge e;
1263 /* Duplicate bbs, update dominators, assign bbs to loops. */
1264 for (i = 0; i < n; i++)
1266 /* Duplicate. */
1267 bb = bbs[i];
1268 new_bb = new_bbs[i] = cfg_layout_duplicate_bb (bb, NULL);
1269 bb->rbi->duplicated = 1;
1270 /* Add to loop. */
1271 add_bb_to_loop (new_bb, bb->loop_father->copy);
1272 /* Possibly set header. */
1273 if (bb->loop_father->header == bb && bb->loop_father != base)
1274 new_bb->loop_father->header = new_bb;
1275 /* Or latch. */
1276 if (bb->loop_father->latch == bb && bb->loop_father != base)
1277 new_bb->loop_father->latch = new_bb;
1280 /* Set dominators. */
1281 for (i = 0; i < n; i++)
1283 bb = bbs[i];
1284 new_bb = new_bbs[i];
1286 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1287 if (dom_bb->rbi->duplicated)
1289 dom_bb = dom_bb->rbi->copy;
1290 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1294 /* Redirect edges. */
1295 for (j = 0; j < n_edges; j++)
1296 new_edges[j] = NULL;
1297 for (i = 0; i < n; i++)
1299 new_bb = new_bbs[i];
1300 bb = bbs[i];
1302 for (e = new_bb->succ; e; e = e->succ_next)
1304 for (j = 0; j < n_edges; j++)
1305 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1306 new_edges[j] = e;
1308 if (!e->dest->rbi->duplicated)
1309 continue;
1310 redirect_edge_and_branch_force (e, e->dest->rbi->copy);
1314 /* Clear information about duplicates. */
1315 for (i = 0; i < n; i++)
1316 bbs[i]->rbi->duplicated = 0;
1319 #include "gt-cfglayout.h"