* config/i386/i386.md (*fyl2x_sfxf3, *fyl2x_dfxf3): Remove insn
[official-gcc.git] / gcc / cfglayout.c
blobe62c60ab83fcf98e4875473d15203bc70f2a3d9e
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "insn-config.h"
30 #include "output.h"
31 #include "function.h"
32 #include "obstack.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
36 #include "ggc.h"
37 #include "alloc-pool.h"
38 #include "flags.h"
40 /* The contents of the current function definition are allocated
41 in this obstack, and all are freed at the end of the function. */
42 extern struct obstack flow_obstack;
44 alloc_pool cfg_layout_pool;
46 /* Holds the interesting trailing notes for the function. */
47 rtx cfg_layout_function_footer, cfg_layout_function_header;
49 static rtx skip_insns_after_block (basic_block);
50 static void record_effective_endpoints (void);
51 static rtx label_for_bb (basic_block);
52 static void fixup_reorder_chain (void);
54 static void set_block_levels (tree, int);
55 static void change_scope (rtx, tree, tree);
57 void verify_insn_chain (void);
58 static void fixup_fallthru_exit_predecessor (void);
59 static rtx duplicate_insn_chain (rtx, rtx);
60 static tree insn_scope (rtx);
61 static void update_unlikely_executed_notes (basic_block);
63 rtx
64 unlink_insn_chain (rtx first, rtx last)
66 rtx prevfirst = PREV_INSN (first);
67 rtx nextlast = NEXT_INSN (last);
69 PREV_INSN (first) = NULL;
70 NEXT_INSN (last) = NULL;
71 if (prevfirst)
72 NEXT_INSN (prevfirst) = nextlast;
73 if (nextlast)
74 PREV_INSN (nextlast) = prevfirst;
75 else
76 set_last_insn (prevfirst);
77 if (!prevfirst)
78 set_first_insn (nextlast);
79 return first;
82 /* Skip over inter-block insns occurring after BB which are typically
83 associated with BB (e.g., barriers). If there are any such insns,
84 we return the last one. Otherwise, we return the end of BB. */
86 static rtx
87 skip_insns_after_block (basic_block bb)
89 rtx insn, last_insn, next_head, prev;
91 next_head = NULL_RTX;
92 if (bb->next_bb != EXIT_BLOCK_PTR)
93 next_head = BB_HEAD (bb->next_bb);
95 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
97 if (insn == next_head)
98 break;
100 switch (GET_CODE (insn))
102 case BARRIER:
103 last_insn = insn;
104 continue;
106 case NOTE:
107 switch (NOTE_LINE_NUMBER (insn))
109 case NOTE_INSN_LOOP_END:
110 case NOTE_INSN_BLOCK_END:
111 last_insn = insn;
112 continue;
113 case NOTE_INSN_DELETED:
114 case NOTE_INSN_DELETED_LABEL:
115 continue;
117 default:
118 continue;
119 break;
121 break;
123 case CODE_LABEL:
124 if (NEXT_INSN (insn)
125 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
126 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
127 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
129 insn = NEXT_INSN (insn);
130 last_insn = insn;
131 continue;
133 break;
135 default:
136 break;
139 break;
142 /* It is possible to hit contradictory sequence. For instance:
144 jump_insn
145 NOTE_INSN_LOOP_BEG
146 barrier
148 Where barrier belongs to jump_insn, but the note does not. This can be
149 created by removing the basic block originally following
150 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
152 for (insn = last_insn; insn != BB_END (bb); insn = prev)
154 prev = PREV_INSN (insn);
155 if (GET_CODE (insn) == NOTE)
156 switch (NOTE_LINE_NUMBER (insn))
158 case NOTE_INSN_LOOP_END:
159 case NOTE_INSN_BLOCK_END:
160 case NOTE_INSN_DELETED:
161 case NOTE_INSN_DELETED_LABEL:
162 continue;
163 default:
164 reorder_insns (insn, insn, last_insn);
168 return last_insn;
171 /* Locate or create a label for a given basic block. */
173 static rtx
174 label_for_bb (basic_block bb)
176 rtx label = BB_HEAD (bb);
178 if (GET_CODE (label) != CODE_LABEL)
180 if (dump_file)
181 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
183 label = block_label (bb);
186 return label;
189 /* Locate the effective beginning and end of the insn chain for each
190 block, as defined by skip_insns_after_block above. */
192 static void
193 record_effective_endpoints (void)
195 rtx next_insn;
196 basic_block bb;
197 rtx insn;
199 for (insn = get_insns ();
200 insn
201 && GET_CODE (insn) == NOTE
202 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
203 insn = NEXT_INSN (insn))
204 continue;
205 if (!insn)
206 abort (); /* No basic blocks at all? */
207 if (PREV_INSN (insn))
208 cfg_layout_function_header =
209 unlink_insn_chain (get_insns (), PREV_INSN (insn));
210 else
211 cfg_layout_function_header = NULL_RTX;
213 next_insn = get_insns ();
214 FOR_EACH_BB (bb)
216 rtx end;
218 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
219 bb->rbi->header = unlink_insn_chain (next_insn,
220 PREV_INSN (BB_HEAD (bb)));
221 end = skip_insns_after_block (bb);
222 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
223 bb->rbi->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
224 next_insn = NEXT_INSN (BB_END (bb));
227 cfg_layout_function_footer = next_insn;
228 if (cfg_layout_function_footer)
229 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
232 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
233 numbers and files. In order to be GGC friendly we need to use separate
234 varrays. This also slightly improve the memory locality in binary search.
235 The _locs array contains locators where the given property change. The
236 block_locators_blocks contains the scope block that is used for all insn
237 locator greater than corresponding block_locators_locs value and smaller
238 than the following one. Similarly for the other properties. */
239 static GTY(()) varray_type block_locators_locs;
240 static GTY(()) varray_type block_locators_blocks;
241 static GTY(()) varray_type line_locators_locs;
242 static GTY(()) varray_type line_locators_lines;
243 static GTY(()) varray_type file_locators_locs;
244 static GTY(()) varray_type file_locators_files;
245 int prologue_locator;
246 int epilogue_locator;
248 /* During the RTL expansion the lexical blocks and line numbers are
249 represented via INSN_NOTEs. Replace them by representation using
250 INSN_LOCATORs. */
252 void
253 insn_locators_initialize (void)
255 tree block = NULL;
256 tree last_block = NULL;
257 rtx insn, next;
258 int loc = 0;
259 int line_number = 0, last_line_number = 0;
260 char *file_name = NULL, *last_file_name = NULL;
262 prologue_locator = epilogue_locator = 0;
264 VARRAY_INT_INIT (block_locators_locs, 32, "block_locators_locs");
265 VARRAY_TREE_INIT (block_locators_blocks, 32, "block_locators_blocks");
266 VARRAY_INT_INIT (line_locators_locs, 32, "line_locators_locs");
267 VARRAY_INT_INIT (line_locators_lines, 32, "line_locators_lines");
268 VARRAY_INT_INIT (file_locators_locs, 32, "file_locators_locs");
269 VARRAY_CHAR_PTR_INIT (file_locators_files, 32, "file_locators_files");
271 for (insn = get_insns (); insn; insn = next)
273 next = NEXT_INSN (insn);
275 if ((active_insn_p (insn)
276 && GET_CODE (PATTERN (insn)) != ADDR_VEC
277 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
278 || !NEXT_INSN (insn)
279 || (!prologue_locator && file_name))
281 if (last_block != block)
283 loc++;
284 VARRAY_PUSH_INT (block_locators_locs, loc);
285 VARRAY_PUSH_TREE (block_locators_blocks, block);
286 last_block = block;
288 if (last_line_number != line_number)
290 loc++;
291 VARRAY_PUSH_INT (line_locators_locs, loc);
292 VARRAY_PUSH_INT (line_locators_lines, line_number);
293 last_line_number = line_number;
295 if (last_file_name != file_name)
297 loc++;
298 VARRAY_PUSH_INT (file_locators_locs, loc);
299 VARRAY_PUSH_CHAR_PTR (file_locators_files, file_name);
300 last_file_name = file_name;
303 if (!prologue_locator && file_name)
304 prologue_locator = loc;
305 if (!NEXT_INSN (insn))
306 epilogue_locator = loc;
307 if (active_insn_p (insn))
308 INSN_LOCATOR (insn) = loc;
309 else if (GET_CODE (insn) == NOTE)
311 switch (NOTE_LINE_NUMBER (insn))
313 case NOTE_INSN_BLOCK_BEG:
314 block = NOTE_BLOCK (insn);
315 delete_insn (insn);
316 break;
317 case NOTE_INSN_BLOCK_END:
318 block = BLOCK_SUPERCONTEXT (block);
319 if (block && TREE_CODE (block) == FUNCTION_DECL)
320 block = 0;
321 delete_insn (insn);
322 break;
323 default:
324 if (NOTE_LINE_NUMBER (insn) > 0)
326 line_number = NOTE_LINE_NUMBER (insn);
327 file_name = (char *)NOTE_SOURCE_FILE (insn);
329 break;
334 /* Tag the blocks with a depth number so that change_scope can find
335 the common parent easily. */
336 set_block_levels (DECL_INITIAL (cfun->decl), 0);
339 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
340 found in the block tree. */
342 static void
343 set_block_levels (tree block, int level)
345 while (block)
347 BLOCK_NUMBER (block) = level;
348 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
349 block = BLOCK_CHAIN (block);
353 /* Return sope resulting from combination of S1 and S2. */
354 tree
355 choose_inner_scope (tree s1, tree s2)
357 if (!s1)
358 return s2;
359 if (!s2)
360 return s1;
361 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
362 return s1;
363 return s2;
366 /* Emit lexical block notes needed to change scope from S1 to S2. */
368 static void
369 change_scope (rtx orig_insn, tree s1, tree s2)
371 rtx insn = orig_insn;
372 tree com = NULL_TREE;
373 tree ts1 = s1, ts2 = s2;
374 tree s;
376 while (ts1 != ts2)
378 if (ts1 == NULL || ts2 == NULL)
379 abort ();
380 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
381 ts1 = BLOCK_SUPERCONTEXT (ts1);
382 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
383 ts2 = BLOCK_SUPERCONTEXT (ts2);
384 else
386 ts1 = BLOCK_SUPERCONTEXT (ts1);
387 ts2 = BLOCK_SUPERCONTEXT (ts2);
390 com = ts1;
392 /* Close scopes. */
393 s = s1;
394 while (s != com)
396 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
397 NOTE_BLOCK (note) = s;
398 s = BLOCK_SUPERCONTEXT (s);
401 /* Open scopes. */
402 s = s2;
403 while (s != com)
405 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
406 NOTE_BLOCK (insn) = s;
407 s = BLOCK_SUPERCONTEXT (s);
411 /* Return lexical scope block insn belong to. */
412 static tree
413 insn_scope (rtx insn)
415 int max = VARRAY_ACTIVE_SIZE (block_locators_locs);
416 int min = 0;
417 int loc = INSN_LOCATOR (insn);
419 /* When block_locators_locs was initialized, the pro- and epilogue
420 insns didn't exist yet and can therefore not be found this way.
421 But we know that they belong to the outer most block of the
422 current function.
423 Without this test, the prologue would be put inside the block of
424 the first valid instruction in the function and when that first
425 insn is part of an inlined function then the low_pc of that
426 inlined function is messed up. Likewise for the epilogue and
427 the last valid instruction. */
428 if (loc == prologue_locator || loc == epilogue_locator)
429 return DECL_INITIAL (cfun->decl);
431 if (!max || !loc)
432 return NULL;
433 while (1)
435 int pos = (min + max) / 2;
436 int tmp = VARRAY_INT (block_locators_locs, pos);
438 if (tmp <= loc && min != pos)
439 min = pos;
440 else if (tmp > loc && max != pos)
441 max = pos;
442 else
444 min = pos;
445 break;
448 return VARRAY_TREE (block_locators_blocks, min);
451 /* Return line number of the statement specified by the locator. */
453 locator_line (int loc)
455 int max = VARRAY_ACTIVE_SIZE (line_locators_locs);
456 int min = 0;
458 if (!max || !loc)
459 return 0;
460 while (1)
462 int pos = (min + max) / 2;
463 int tmp = VARRAY_INT (line_locators_locs, pos);
465 if (tmp <= loc && min != pos)
466 min = pos;
467 else if (tmp > loc && max != pos)
468 max = pos;
469 else
471 min = pos;
472 break;
475 return VARRAY_INT (line_locators_lines, min);
478 /* Return line number of the statement that produced this insn. */
480 insn_line (rtx insn)
482 return locator_line (INSN_LOCATOR (insn));
485 /* Return source file of the statement specified by LOC. */
486 const char *
487 locator_file (int loc)
489 int max = VARRAY_ACTIVE_SIZE (file_locators_locs);
490 int min = 0;
492 if (!max || !loc)
493 return NULL;
494 while (1)
496 int pos = (min + max) / 2;
497 int tmp = VARRAY_INT (file_locators_locs, pos);
499 if (tmp <= loc && min != pos)
500 min = pos;
501 else if (tmp > loc && max != pos)
502 max = pos;
503 else
505 min = pos;
506 break;
509 return VARRAY_CHAR_PTR (file_locators_files, min);
512 /* Return source file of the statement that produced this insn. */
513 const char *
514 insn_file (rtx insn)
516 return locator_file (INSN_LOCATOR (insn));
519 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
520 on the scope tree and the newly reordered instructions. */
522 void
523 reemit_insn_block_notes (void)
525 tree cur_block = DECL_INITIAL (cfun->decl);
526 rtx insn, note;
528 insn = get_insns ();
529 if (!active_insn_p (insn))
530 insn = next_active_insn (insn);
531 for (; insn; insn = next_active_insn (insn))
533 tree this_block;
535 this_block = insn_scope (insn);
536 /* For sequences compute scope resulting from merging all scopes
537 of instructions nested inside. */
538 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
540 int i;
541 rtx body = PATTERN (insn);
543 this_block = NULL;
544 for (i = 0; i < XVECLEN (body, 0); i++)
545 this_block = choose_inner_scope (this_block,
546 insn_scope (XVECEXP (body, 0, i)));
548 if (! this_block)
549 continue;
551 if (this_block != cur_block)
553 change_scope (insn, cur_block, this_block);
554 cur_block = this_block;
558 /* change_scope emits before the insn, not after. */
559 note = emit_note (NOTE_INSN_DELETED);
560 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
561 delete_insn (note);
563 reorder_blocks ();
566 /* Given a reorder chain, rearrange the code to match. */
568 static void
569 fixup_reorder_chain (void)
571 basic_block bb, prev_bb;
572 int index;
573 rtx insn = NULL;
575 if (cfg_layout_function_header)
577 set_first_insn (cfg_layout_function_header);
578 insn = cfg_layout_function_header;
579 while (NEXT_INSN (insn))
580 insn = NEXT_INSN (insn);
583 /* First do the bulk reordering -- rechain the blocks without regard to
584 the needed changes to jumps and labels. */
586 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
587 bb != 0;
588 bb = bb->rbi->next, index++)
590 if (bb->rbi->header)
592 if (insn)
593 NEXT_INSN (insn) = bb->rbi->header;
594 else
595 set_first_insn (bb->rbi->header);
596 PREV_INSN (bb->rbi->header) = insn;
597 insn = bb->rbi->header;
598 while (NEXT_INSN (insn))
599 insn = NEXT_INSN (insn);
601 if (insn)
602 NEXT_INSN (insn) = BB_HEAD (bb);
603 else
604 set_first_insn (BB_HEAD (bb));
605 PREV_INSN (BB_HEAD (bb)) = insn;
606 insn = BB_END (bb);
607 if (bb->rbi->footer)
609 NEXT_INSN (insn) = bb->rbi->footer;
610 PREV_INSN (bb->rbi->footer) = insn;
611 while (NEXT_INSN (insn))
612 insn = NEXT_INSN (insn);
616 if (index != n_basic_blocks)
617 abort ();
619 NEXT_INSN (insn) = cfg_layout_function_footer;
620 if (cfg_layout_function_footer)
621 PREV_INSN (cfg_layout_function_footer) = insn;
623 while (NEXT_INSN (insn))
624 insn = NEXT_INSN (insn);
626 set_last_insn (insn);
627 #ifdef ENABLE_CHECKING
628 verify_insn_chain ();
629 #endif
630 delete_dead_jumptables ();
632 /* Now add jumps and labels as needed to match the blocks new
633 outgoing edges. */
635 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = bb->rbi->next)
637 edge e_fall, e_taken, e;
638 rtx bb_end_insn;
639 basic_block nb;
640 basic_block old_bb;
642 if (bb->succ == NULL)
643 continue;
645 /* Find the old fallthru edge, and another non-EH edge for
646 a taken jump. */
647 e_taken = e_fall = NULL;
648 for (e = bb->succ; e ; e = e->succ_next)
649 if (e->flags & EDGE_FALLTHRU)
650 e_fall = e;
651 else if (! (e->flags & EDGE_EH))
652 e_taken = e;
654 bb_end_insn = BB_END (bb);
655 if (GET_CODE (bb_end_insn) == JUMP_INSN)
657 if (any_condjump_p (bb_end_insn))
659 /* If the old fallthru is still next, nothing to do. */
660 if (bb->rbi->next == e_fall->dest
661 || (!bb->rbi->next
662 && e_fall->dest == EXIT_BLOCK_PTR))
663 continue;
665 /* The degenerated case of conditional jump jumping to the next
666 instruction can happen on target having jumps with side
667 effects.
669 Create temporarily the duplicated edge representing branch.
670 It will get unidentified by force_nonfallthru_and_redirect
671 that would otherwise get confused by fallthru edge not pointing
672 to the next basic block. */
673 if (!e_taken)
675 rtx note;
676 edge e_fake;
678 e_fake = unchecked_make_edge (bb, e_fall->dest, 0);
680 if (!redirect_jump (BB_END (bb), block_label (bb), 0))
681 abort ();
682 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
683 if (note)
685 int prob = INTVAL (XEXP (note, 0));
687 e_fake->probability = prob;
688 e_fake->count = e_fall->count * prob / REG_BR_PROB_BASE;
689 e_fall->probability -= e_fall->probability;
690 e_fall->count -= e_fake->count;
691 if (e_fall->probability < 0)
692 e_fall->probability = 0;
693 if (e_fall->count < 0)
694 e_fall->count = 0;
697 /* There is one special case: if *neither* block is next,
698 such as happens at the very end of a function, then we'll
699 need to add a new unconditional jump. Choose the taken
700 edge based on known or assumed probability. */
701 else if (bb->rbi->next != e_taken->dest)
703 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
705 if (note
706 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
707 && invert_jump (bb_end_insn,
708 label_for_bb (e_fall->dest), 0))
710 e_fall->flags &= ~EDGE_FALLTHRU;
711 e_taken->flags |= EDGE_FALLTHRU;
712 update_br_prob_note (bb);
713 e = e_fall, e_fall = e_taken, e_taken = e;
717 /* If the "jumping" edge is a crossing edge, and the fall
718 through edge is non-crossing, leave things as they are. */
719 else if (e_taken->crossing_edge && !e_fall->crossing_edge)
720 continue;
722 /* Otherwise we can try to invert the jump. This will
723 basically never fail, however, keep up the pretense. */
724 else if (invert_jump (bb_end_insn,
725 label_for_bb (e_fall->dest), 0))
727 e_fall->flags &= ~EDGE_FALLTHRU;
728 e_taken->flags |= EDGE_FALLTHRU;
729 update_br_prob_note (bb);
730 continue;
733 else if (returnjump_p (bb_end_insn))
734 continue;
735 else
737 /* Otherwise we have some switch or computed jump. In the
738 99% case, there should not have been a fallthru edge. */
739 if (! e_fall)
740 continue;
742 #ifdef CASE_DROPS_THROUGH
743 /* Except for VAX. Since we didn't have predication for the
744 tablejump, the fallthru block should not have moved. */
745 if (bb->rbi->next == e_fall->dest)
746 continue;
747 bb_end_insn = skip_insns_after_block (bb);
748 #else
749 abort ();
750 #endif
753 else
755 /* No fallthru implies a noreturn function with EH edges, or
756 something similarly bizarre. In any case, we don't need to
757 do anything. */
758 if (! e_fall)
759 continue;
761 /* If the fallthru block is still next, nothing to do. */
762 if (bb->rbi->next == e_fall->dest)
763 continue;
765 /* A fallthru to exit block. */
766 if (!bb->rbi->next && e_fall->dest == EXIT_BLOCK_PTR)
767 continue;
770 /* We got here if we need to add a new jump insn. */
771 nb = force_nonfallthru (e_fall);
772 if (nb)
774 cfg_layout_initialize_rbi (nb);
775 nb->rbi->visited = 1;
776 nb->rbi->next = bb->rbi->next;
777 bb->rbi->next = nb;
778 /* Don't process this new block. */
779 old_bb = bb;
780 bb = nb;
782 /* Make sure new bb is tagged for correct section (same as
783 fall-thru source). */
784 e_fall->src->partition = bb->pred->src->partition;
785 if (flag_reorder_blocks_and_partition)
787 if (bb->pred->src->partition == COLD_PARTITION)
789 rtx new_note;
790 rtx note = BB_HEAD (e_fall->src);
792 while (!INSN_P (note)
793 && note != BB_END (e_fall->src))
794 note = NEXT_INSN (note);
796 new_note = emit_note_before
797 (NOTE_INSN_UNLIKELY_EXECUTED_CODE,
798 note);
799 NOTE_BASIC_BLOCK (new_note) = bb;
801 if (GET_CODE (BB_END (bb)) == JUMP_INSN
802 && !any_condjump_p (BB_END (bb))
803 && bb->succ->crossing_edge )
804 REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
805 (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
810 /* Put basic_block_info in the new order. */
812 if (dump_file)
814 fprintf (dump_file, "Reordered sequence:\n");
815 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
817 bb = bb->rbi->next, index++)
819 fprintf (dump_file, " %i ", index);
820 if (bb->rbi->original)
821 fprintf (dump_file, "duplicate of %i ",
822 bb->rbi->original->index);
823 else if (forwarder_block_p (bb)
824 && GET_CODE (BB_HEAD (bb)) != CODE_LABEL)
825 fprintf (dump_file, "compensation ");
826 else
827 fprintf (dump_file, "bb %i ", bb->index);
828 fprintf (dump_file, " [%i]\n", bb->frequency);
832 prev_bb = ENTRY_BLOCK_PTR;
833 bb = ENTRY_BLOCK_PTR->next_bb;
834 index = 0;
836 for (; bb; prev_bb = bb, bb = bb->rbi->next, index ++)
838 bb->index = index;
839 BASIC_BLOCK (index) = bb;
841 update_unlikely_executed_notes (bb);
843 bb->prev_bb = prev_bb;
844 prev_bb->next_bb = bb;
846 prev_bb->next_bb = EXIT_BLOCK_PTR;
847 EXIT_BLOCK_PTR->prev_bb = prev_bb;
849 /* Annoying special case - jump around dead jumptables left in the code. */
850 FOR_EACH_BB (bb)
852 edge e;
853 for (e = bb->succ; e && !(e->flags & EDGE_FALLTHRU); e = e->succ_next)
854 continue;
855 if (e && !can_fallthru (e->src, e->dest))
856 force_nonfallthru (e);
860 /* Update the basic block number information in any
861 NOTE_INSN_UNLIKELY_EXECUTED_CODE notes within the basic block. */
863 static void
864 update_unlikely_executed_notes (basic_block bb)
866 rtx cur_insn;
868 for (cur_insn = BB_HEAD (bb); cur_insn != BB_END (bb);
869 cur_insn = NEXT_INSN (cur_insn))
870 if (GET_CODE (cur_insn) == NOTE
871 && NOTE_LINE_NUMBER (cur_insn) == NOTE_INSN_UNLIKELY_EXECUTED_CODE)
872 NOTE_BASIC_BLOCK (cur_insn) = bb;
875 /* Perform sanity checks on the insn chain.
876 1. Check that next/prev pointers are consistent in both the forward and
877 reverse direction.
878 2. Count insns in chain, going both directions, and check if equal.
879 3. Check that get_last_insn () returns the actual end of chain. */
881 void
882 verify_insn_chain (void)
884 rtx x, prevx, nextx;
885 int insn_cnt1, insn_cnt2;
887 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
888 x != 0;
889 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
890 if (PREV_INSN (x) != prevx)
891 abort ();
893 if (prevx != get_last_insn ())
894 abort ();
896 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
897 x != 0;
898 nextx = x, insn_cnt2++, x = PREV_INSN (x))
899 if (NEXT_INSN (x) != nextx)
900 abort ();
902 if (insn_cnt1 != insn_cnt2)
903 abort ();
906 /* The block falling through to exit must be the last one in the
907 reordered chain. Ensure that this condition is met. */
908 static void
909 fixup_fallthru_exit_predecessor (void)
911 edge e;
912 basic_block bb = NULL;
914 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
915 if (e->flags & EDGE_FALLTHRU)
916 bb = e->src;
918 if (bb && bb->rbi->next)
920 basic_block c = ENTRY_BLOCK_PTR->next_bb;
922 while (c->rbi->next != bb)
923 c = c->rbi->next;
925 c->rbi->next = bb->rbi->next;
926 while (c->rbi->next)
927 c = c->rbi->next;
929 c->rbi->next = bb;
930 bb->rbi->next = NULL;
934 /* Return true in case it is possible to duplicate the basic block BB. */
936 bool
937 cfg_layout_can_duplicate_bb_p (basic_block bb)
939 edge s;
941 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR)
942 return false;
944 /* Duplicating fallthru block to exit would require adding a jump
945 and splitting the real last BB. */
946 for (s = bb->succ; s; s = s->succ_next)
947 if (s->dest == EXIT_BLOCK_PTR && s->flags & EDGE_FALLTHRU)
948 return false;
950 /* Do not attempt to duplicate tablejumps, as we need to unshare
951 the dispatch table. This is difficult to do, as the instructions
952 computing jump destination may be hoisted outside the basic block. */
953 if (tablejump_p (BB_END (bb), NULL, NULL))
954 return false;
956 /* Do not duplicate blocks containing insns that can't be copied. */
957 if (targetm.cannot_copy_insn_p)
959 rtx insn = BB_HEAD (bb);
960 while (1)
962 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
963 return false;
964 if (insn == BB_END (bb))
965 break;
966 insn = NEXT_INSN (insn);
970 return true;
973 static rtx
974 duplicate_insn_chain (rtx from, rtx to)
976 rtx insn, last;
978 /* Avoid updating of boundaries of previous basic block. The
979 note will get removed from insn stream in fixup. */
980 last = emit_note (NOTE_INSN_DELETED);
982 /* Create copy at the end of INSN chain. The chain will
983 be reordered later. */
984 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
986 switch (GET_CODE (insn))
988 case INSN:
989 case CALL_INSN:
990 case JUMP_INSN:
991 /* Avoid copying of dispatch tables. We never duplicate
992 tablejumps, so this can hit only in case the table got
993 moved far from original jump. */
994 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
995 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
996 break;
997 emit_copy_of_insn_after (insn, get_last_insn ());
998 break;
1000 case CODE_LABEL:
1001 break;
1003 case BARRIER:
1004 emit_barrier ();
1005 break;
1007 case NOTE:
1008 switch (NOTE_LINE_NUMBER (insn))
1010 /* In case prologue is empty and function contain label
1011 in first BB, we may want to copy the block. */
1012 case NOTE_INSN_PROLOGUE_END:
1014 case NOTE_INSN_LOOP_VTOP:
1015 case NOTE_INSN_LOOP_CONT:
1016 case NOTE_INSN_LOOP_BEG:
1017 case NOTE_INSN_LOOP_END:
1018 /* Strip down the loop notes - we don't really want to keep
1019 them consistent in loop copies. */
1020 case NOTE_INSN_DELETED:
1021 case NOTE_INSN_DELETED_LABEL:
1022 /* No problem to strip these. */
1023 case NOTE_INSN_EPILOGUE_BEG:
1024 case NOTE_INSN_FUNCTION_END:
1025 /* Debug code expect these notes to exist just once.
1026 Keep them in the master copy.
1027 ??? It probably makes more sense to duplicate them for each
1028 epilogue copy. */
1029 case NOTE_INSN_FUNCTION_BEG:
1030 /* There is always just single entry to function. */
1031 case NOTE_INSN_BASIC_BLOCK:
1032 break;
1034 /* There is no purpose to duplicate prologue. */
1035 case NOTE_INSN_BLOCK_BEG:
1036 case NOTE_INSN_BLOCK_END:
1037 /* The BLOCK_BEG/BLOCK_END notes should be eliminated when BB
1038 reordering is in the progress. */
1039 case NOTE_INSN_EH_REGION_BEG:
1040 case NOTE_INSN_EH_REGION_END:
1041 /* Should never exist at BB duplication time. */
1042 abort ();
1043 break;
1044 case NOTE_INSN_REPEATED_LINE_NUMBER:
1045 case NOTE_INSN_UNLIKELY_EXECUTED_CODE:
1046 emit_note_copy (insn);
1047 break;
1049 default:
1050 if (NOTE_LINE_NUMBER (insn) < 0)
1051 abort ();
1052 /* It is possible that no_line_number is set and the note
1053 won't be emitted. */
1054 emit_note_copy (insn);
1056 break;
1057 default:
1058 abort ();
1061 insn = NEXT_INSN (last);
1062 delete_insn (last);
1063 return insn;
1065 /* Create a duplicate of the basic block BB and redirect edge E into it.
1066 If E is not specified, BB is just copied, but updating the frequencies
1067 etc. is left to the caller. */
1069 basic_block
1070 cfg_layout_duplicate_bb (basic_block bb, edge e)
1072 rtx insn;
1073 edge s, n;
1074 basic_block new_bb;
1075 gcov_type new_count = e ? e->count : 0;
1077 if (bb->count < new_count)
1078 new_count = bb->count;
1079 if (!bb->pred)
1080 abort ();
1081 #ifdef ENABLE_CHECKING
1082 if (!cfg_layout_can_duplicate_bb_p (bb))
1083 abort ();
1084 #endif
1086 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1087 new_bb = create_basic_block (insn,
1088 insn ? get_last_insn () : NULL,
1089 EXIT_BLOCK_PTR->prev_bb);
1091 if (bb->rbi->header)
1093 insn = bb->rbi->header;
1094 while (NEXT_INSN (insn))
1095 insn = NEXT_INSN (insn);
1096 insn = duplicate_insn_chain (bb->rbi->header, insn);
1097 if (insn)
1098 new_bb->rbi->header = unlink_insn_chain (insn, get_last_insn ());
1101 if (bb->rbi->footer)
1103 insn = bb->rbi->footer;
1104 while (NEXT_INSN (insn))
1105 insn = NEXT_INSN (insn);
1106 insn = duplicate_insn_chain (bb->rbi->footer, insn);
1107 if (insn)
1108 new_bb->rbi->footer = unlink_insn_chain (insn, get_last_insn ());
1111 if (bb->global_live_at_start)
1113 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1114 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1115 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
1116 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1119 new_bb->loop_depth = bb->loop_depth;
1120 new_bb->flags = bb->flags;
1121 for (s = bb->succ; s; s = s->succ_next)
1123 /* Since we are creating edges from a new block to successors
1124 of another block (which therefore are known to be disjoint), there
1125 is no need to actually check for duplicated edges. */
1126 n = unchecked_make_edge (new_bb, s->dest, s->flags);
1127 n->probability = s->probability;
1128 if (e && bb->count)
1130 /* Take care for overflows! */
1131 n->count = s->count * (new_count * 10000 / bb->count) / 10000;
1132 s->count -= n->count;
1134 else
1135 n->count = s->count;
1136 n->aux = s->aux;
1139 if (e)
1141 new_bb->count = new_count;
1142 bb->count -= new_count;
1144 new_bb->frequency = EDGE_FREQUENCY (e);
1145 bb->frequency -= EDGE_FREQUENCY (e);
1147 redirect_edge_and_branch_force (e, new_bb);
1149 if (bb->count < 0)
1150 bb->count = 0;
1151 if (bb->frequency < 0)
1152 bb->frequency = 0;
1154 else
1156 new_bb->count = bb->count;
1157 new_bb->frequency = bb->frequency;
1160 new_bb->rbi->original = bb;
1161 bb->rbi->copy = new_bb;
1163 return new_bb;
1166 void
1167 cfg_layout_initialize_rbi (basic_block bb)
1169 if (bb->rbi)
1170 abort ();
1171 bb->rbi = pool_alloc (cfg_layout_pool);
1172 memset (bb->rbi, 0, sizeof (struct reorder_block_def));
1175 /* Main entry point to this module - initialize the data structures for
1176 CFG layout changes. It keeps LOOPS up-to-date if not null. */
1178 void
1179 cfg_layout_initialize (void)
1181 basic_block bb;
1183 /* Our algorithm depends on fact that there are now dead jumptables
1184 around the code. */
1185 cfg_layout_pool =
1186 create_alloc_pool ("cfg layout pool", sizeof (struct reorder_block_def),
1187 n_basic_blocks + 2);
1188 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1189 cfg_layout_initialize_rbi (bb);
1191 cfg_layout_rtl_register_cfg_hooks ();
1193 record_effective_endpoints ();
1195 cleanup_cfg (CLEANUP_CFGLAYOUT);
1198 /* Splits superblocks. */
1199 void
1200 break_superblocks (void)
1202 sbitmap superblocks;
1203 bool need = false;
1204 basic_block bb;
1206 superblocks = sbitmap_alloc (last_basic_block);
1207 sbitmap_zero (superblocks);
1209 FOR_EACH_BB (bb)
1210 if (bb->flags & BB_SUPERBLOCK)
1212 bb->flags &= ~BB_SUPERBLOCK;
1213 SET_BIT (superblocks, bb->index);
1214 need = true;
1217 if (need)
1219 rebuild_jump_labels (get_insns ());
1220 find_many_sub_basic_blocks (superblocks);
1223 free (superblocks);
1226 /* Finalize the changes: reorder insn list according to the sequence, enter
1227 compensation code, rebuild scope forest. */
1229 void
1230 cfg_layout_finalize (void)
1232 basic_block bb;
1234 #ifdef ENABLE_CHECKING
1235 verify_flow_info ();
1236 #endif
1237 rtl_register_cfg_hooks ();
1238 fixup_fallthru_exit_predecessor ();
1239 fixup_reorder_chain ();
1241 #ifdef ENABLE_CHECKING
1242 verify_insn_chain ();
1243 #endif
1245 free_alloc_pool (cfg_layout_pool);
1246 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1247 bb->rbi = NULL;
1249 break_superblocks ();
1251 #ifdef ENABLE_CHECKING
1252 verify_flow_info ();
1253 #endif
1256 /* Checks whether all N blocks in BBS array can be copied. */
1257 bool
1258 can_copy_bbs_p (basic_block *bbs, unsigned n)
1260 unsigned i;
1261 edge e;
1262 int ret = true;
1264 for (i = 0; i < n; i++)
1265 bbs[i]->rbi->duplicated = 1;
1267 for (i = 0; i < n; i++)
1269 /* In case we should redirect abnormal edge during duplication, fail. */
1270 for (e = bbs[i]->succ; e; e = e->succ_next)
1271 if ((e->flags & EDGE_ABNORMAL)
1272 && e->dest->rbi->duplicated)
1274 ret = false;
1275 goto end;
1278 if (!cfg_layout_can_duplicate_bb_p (bbs[i]))
1280 ret = false;
1281 break;
1285 end:
1286 for (i = 0; i < n; i++)
1287 bbs[i]->rbi->duplicated = 0;
1289 return ret;
1292 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1293 are placed into array NEW_BBS in the same order. Edges from basic blocks
1294 in BBS are also duplicated and copies of those of them
1295 that lead into BBS are redirected to appropriate newly created block. The
1296 function assigns bbs into loops (copy of basic block bb is assigned to
1297 bb->loop_father->copy loop, so this must be set up correctly in advance)
1298 and updates dominators locally (LOOPS structure that contains the information
1299 about dominators is passed to enable this).
1301 BASE is the superloop to that basic block belongs; if its header or latch
1302 is copied, we do not set the new blocks as header or latch.
1304 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1305 also in the same order. */
1307 void
1308 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1309 edge *edges, unsigned n_edges, edge *new_edges,
1310 struct loop *base)
1312 unsigned i, j;
1313 basic_block bb, new_bb, dom_bb;
1314 edge e;
1316 /* Duplicate bbs, update dominators, assign bbs to loops. */
1317 for (i = 0; i < n; i++)
1319 /* Duplicate. */
1320 bb = bbs[i];
1321 new_bb = new_bbs[i] = cfg_layout_duplicate_bb (bb, NULL);
1322 bb->rbi->duplicated = 1;
1323 /* Add to loop. */
1324 add_bb_to_loop (new_bb, bb->loop_father->copy);
1325 /* Possibly set header. */
1326 if (bb->loop_father->header == bb && bb->loop_father != base)
1327 new_bb->loop_father->header = new_bb;
1328 /* Or latch. */
1329 if (bb->loop_father->latch == bb && bb->loop_father != base)
1330 new_bb->loop_father->latch = new_bb;
1333 /* Set dominators. */
1334 for (i = 0; i < n; i++)
1336 bb = bbs[i];
1337 new_bb = new_bbs[i];
1339 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1340 if (dom_bb->rbi->duplicated)
1342 dom_bb = dom_bb->rbi->copy;
1343 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1347 /* Redirect edges. */
1348 for (j = 0; j < n_edges; j++)
1349 new_edges[j] = NULL;
1350 for (i = 0; i < n; i++)
1352 new_bb = new_bbs[i];
1353 bb = bbs[i];
1355 for (e = new_bb->succ; e; e = e->succ_next)
1357 for (j = 0; j < n_edges; j++)
1358 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1359 new_edges[j] = e;
1361 if (!e->dest->rbi->duplicated)
1362 continue;
1363 redirect_edge_and_branch_force (e, e->dest->rbi->copy);
1367 /* Clear information about duplicates. */
1368 for (i = 0; i < n; i++)
1369 bbs[i]->rbi->duplicated = 0;
1372 #include "gt-cfglayout.h"