Check in tree-dce enh to trunk
[official-gcc.git] / gcc / cfglayout.c
blob0885af79b3f6134212314d0c2706c6efc0962b2f
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "obstack.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "output.h"
32 #include "function.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
36 #include "ggc.h"
37 #include "alloc-pool.h"
38 #include "flags.h"
39 #include "tree-pass.h"
40 #include "df.h"
41 #include "vecprim.h"
43 /* Holds the interesting trailing notes for the function. */
44 rtx cfg_layout_function_footer;
45 rtx cfg_layout_function_header;
47 static rtx skip_insns_after_block (basic_block);
48 static void record_effective_endpoints (void);
49 static rtx label_for_bb (basic_block);
50 static void fixup_reorder_chain (void);
52 static void change_scope (rtx, tree, tree);
54 void verify_insn_chain (void);
55 static void fixup_fallthru_exit_predecessor (void);
56 static tree insn_scope (const_rtx);
58 rtx
59 unlink_insn_chain (rtx first, rtx last)
61 rtx prevfirst = PREV_INSN (first);
62 rtx nextlast = NEXT_INSN (last);
64 PREV_INSN (first) = NULL;
65 NEXT_INSN (last) = NULL;
66 if (prevfirst)
67 NEXT_INSN (prevfirst) = nextlast;
68 if (nextlast)
69 PREV_INSN (nextlast) = prevfirst;
70 else
71 set_last_insn (prevfirst);
72 if (!prevfirst)
73 set_first_insn (nextlast);
74 return first;
77 /* Skip over inter-block insns occurring after BB which are typically
78 associated with BB (e.g., barriers). If there are any such insns,
79 we return the last one. Otherwise, we return the end of BB. */
81 static rtx
82 skip_insns_after_block (basic_block bb)
84 rtx insn, last_insn, next_head, prev;
86 next_head = NULL_RTX;
87 if (bb->next_bb != EXIT_BLOCK_PTR)
88 next_head = BB_HEAD (bb->next_bb);
90 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
92 if (insn == next_head)
93 break;
95 switch (GET_CODE (insn))
97 case BARRIER:
98 last_insn = insn;
99 continue;
101 case NOTE:
102 switch (NOTE_KIND (insn))
104 case NOTE_INSN_BLOCK_END:
105 gcc_unreachable ();
106 continue;
107 default:
108 continue;
109 break;
111 break;
113 case CODE_LABEL:
114 if (NEXT_INSN (insn)
115 && JUMP_P (NEXT_INSN (insn))
116 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
117 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
119 insn = NEXT_INSN (insn);
120 last_insn = insn;
121 continue;
123 break;
125 default:
126 break;
129 break;
132 /* It is possible to hit contradictory sequence. For instance:
134 jump_insn
135 NOTE_INSN_BLOCK_BEG
136 barrier
138 Where barrier belongs to jump_insn, but the note does not. This can be
139 created by removing the basic block originally following
140 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
142 for (insn = last_insn; insn != BB_END (bb); insn = prev)
144 prev = PREV_INSN (insn);
145 if (NOTE_P (insn))
146 switch (NOTE_KIND (insn))
148 case NOTE_INSN_BLOCK_END:
149 gcc_unreachable ();
150 break;
151 case NOTE_INSN_DELETED:
152 case NOTE_INSN_DELETED_LABEL:
153 continue;
154 default:
155 reorder_insns (insn, insn, last_insn);
159 return last_insn;
162 /* Locate or create a label for a given basic block. */
164 static rtx
165 label_for_bb (basic_block bb)
167 rtx label = BB_HEAD (bb);
169 if (!LABEL_P (label))
171 if (dump_file)
172 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
174 label = block_label (bb);
177 return label;
180 /* Locate the effective beginning and end of the insn chain for each
181 block, as defined by skip_insns_after_block above. */
183 static void
184 record_effective_endpoints (void)
186 rtx next_insn;
187 basic_block bb;
188 rtx insn;
190 for (insn = get_insns ();
191 insn
192 && NOTE_P (insn)
193 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
194 insn = NEXT_INSN (insn))
195 continue;
196 /* No basic blocks at all? */
197 gcc_assert (insn);
199 if (PREV_INSN (insn))
200 cfg_layout_function_header =
201 unlink_insn_chain (get_insns (), PREV_INSN (insn));
202 else
203 cfg_layout_function_header = NULL_RTX;
205 next_insn = get_insns ();
206 FOR_EACH_BB (bb)
208 rtx end;
210 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
211 bb->il.rtl->header = unlink_insn_chain (next_insn,
212 PREV_INSN (BB_HEAD (bb)));
213 end = skip_insns_after_block (bb);
214 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
215 bb->il.rtl->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
216 next_insn = NEXT_INSN (BB_END (bb));
219 cfg_layout_function_footer = next_insn;
220 if (cfg_layout_function_footer)
221 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
224 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
225 numbers and files. In order to be GGC friendly we need to use separate
226 varrays. This also slightly improve the memory locality in binary search.
227 The _locs array contains locators where the given property change. The
228 block_locators_blocks contains the scope block that is used for all insn
229 locator greater than corresponding block_locators_locs value and smaller
230 than the following one. Similarly for the other properties. */
231 static VEC(int,heap) *block_locators_locs;
232 static GTY(()) VEC(tree,gc) *block_locators_blocks;
233 static VEC(int,heap) *locations_locators_locs;
234 DEF_VEC_O(location_t);
235 DEF_VEC_ALLOC_O(location_t,heap);
236 static VEC(location_t,heap) *locations_locators_vals;
237 int prologue_locator;
238 int epilogue_locator;
240 /* Hold current location information and last location information, so the
241 datastructures are built lazily only when some instructions in given
242 place are needed. */
243 location_t curr_location, last_location;
244 static tree curr_block, last_block;
245 static int curr_rtl_loc = -1;
247 /* Allocate insn locator datastructure. */
248 void
249 insn_locators_alloc (void)
251 prologue_locator = epilogue_locator = 0;
253 block_locators_locs = VEC_alloc (int, heap, 32);
254 block_locators_blocks = VEC_alloc (tree, gc, 32);
255 locations_locators_locs = VEC_alloc (int, heap, 32);
256 locations_locators_vals = VEC_alloc (location_t, heap, 32);
258 last_location = -1;
259 curr_location = -1;
260 curr_block = NULL;
261 last_block = NULL;
262 curr_rtl_loc = 0;
265 /* At the end of emit stage, clear current location. */
266 void
267 insn_locators_finalize (void)
269 if (curr_rtl_loc >= 0)
270 epilogue_locator = curr_insn_locator ();
271 curr_rtl_loc = -1;
274 /* Set current location. */
275 void
276 set_curr_insn_source_location (location_t location)
278 /* IV opts calls into RTL expansion to compute costs of operations. At this
279 time locators are not initialized. */
280 if (curr_rtl_loc == -1)
281 return;
282 if (location == last_location)
283 return;
284 curr_location = location;
287 /* Set current scope block. */
288 void
289 set_curr_insn_block (tree b)
291 /* IV opts calls into RTL expansion to compute costs of operations. At this
292 time locators are not initialized. */
293 if (curr_rtl_loc == -1)
294 return;
295 if (b)
296 curr_block = b;
299 /* Return current insn locator. */
301 curr_insn_locator (void)
303 if (curr_rtl_loc == -1)
304 return 0;
305 if (last_block != curr_block)
307 curr_rtl_loc++;
308 VEC_safe_push (int, heap, block_locators_locs, curr_rtl_loc);
309 VEC_safe_push (tree, gc, block_locators_blocks, curr_block);
310 last_block = curr_block;
312 if (last_location != curr_location)
314 curr_rtl_loc++;
315 VEC_safe_push (int, heap, locations_locators_locs, curr_rtl_loc);
316 VEC_safe_push (location_t, heap, locations_locators_vals, &curr_location);
317 last_location = curr_location;
319 return curr_rtl_loc;
322 static unsigned int
323 into_cfg_layout_mode (void)
325 cfg_layout_initialize (0);
326 return 0;
329 static unsigned int
330 outof_cfg_layout_mode (void)
332 basic_block bb;
334 FOR_EACH_BB (bb)
335 if (bb->next_bb != EXIT_BLOCK_PTR)
336 bb->aux = bb->next_bb;
338 cfg_layout_finalize ();
340 return 0;
343 struct rtl_opt_pass pass_into_cfg_layout_mode =
346 RTL_PASS,
347 "into_cfglayout", /* name */
348 NULL, /* gate */
349 into_cfg_layout_mode, /* execute */
350 NULL, /* sub */
351 NULL, /* next */
352 0, /* static_pass_number */
353 0, /* tv_id */
354 0, /* properties_required */
355 0, /* properties_provided */
356 0, /* properties_destroyed */
357 0, /* todo_flags_start */
358 TODO_dump_func, /* todo_flags_finish */
362 struct rtl_opt_pass pass_outof_cfg_layout_mode =
365 RTL_PASS,
366 "outof_cfglayout", /* name */
367 NULL, /* gate */
368 outof_cfg_layout_mode, /* execute */
369 NULL, /* sub */
370 NULL, /* next */
371 0, /* static_pass_number */
372 0, /* tv_id */
373 0, /* properties_required */
374 0, /* properties_provided */
375 0, /* properties_destroyed */
376 0, /* todo_flags_start */
377 TODO_dump_func, /* todo_flags_finish */
381 /* Return sope resulting from combination of S1 and S2. */
382 static tree
383 choose_inner_scope (tree s1, tree s2)
385 if (!s1)
386 return s2;
387 if (!s2)
388 return s1;
389 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
390 return s1;
391 return s2;
394 /* Emit lexical block notes needed to change scope from S1 to S2. */
396 static void
397 change_scope (rtx orig_insn, tree s1, tree s2)
399 rtx insn = orig_insn;
400 tree com = NULL_TREE;
401 tree ts1 = s1, ts2 = s2;
402 tree s;
404 while (ts1 != ts2)
406 gcc_assert (ts1 && ts2);
407 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
408 ts1 = BLOCK_SUPERCONTEXT (ts1);
409 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
410 ts2 = BLOCK_SUPERCONTEXT (ts2);
411 else
413 ts1 = BLOCK_SUPERCONTEXT (ts1);
414 ts2 = BLOCK_SUPERCONTEXT (ts2);
417 com = ts1;
419 /* Close scopes. */
420 s = s1;
421 while (s != com)
423 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
424 NOTE_BLOCK (note) = s;
425 s = BLOCK_SUPERCONTEXT (s);
428 /* Open scopes. */
429 s = s2;
430 while (s != com)
432 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
433 NOTE_BLOCK (insn) = s;
434 s = BLOCK_SUPERCONTEXT (s);
438 /* Return lexical scope block insn belong to. */
439 static tree
440 insn_scope (const_rtx insn)
442 int max = VEC_length (int, block_locators_locs);
443 int min = 0;
444 int loc = INSN_LOCATOR (insn);
446 /* When block_locators_locs was initialized, the pro- and epilogue
447 insns didn't exist yet and can therefore not be found this way.
448 But we know that they belong to the outer most block of the
449 current function.
450 Without this test, the prologue would be put inside the block of
451 the first valid instruction in the function and when that first
452 insn is part of an inlined function then the low_pc of that
453 inlined function is messed up. Likewise for the epilogue and
454 the last valid instruction. */
455 if (loc == prologue_locator || loc == epilogue_locator)
456 return DECL_INITIAL (cfun->decl);
458 if (!max || !loc)
459 return NULL;
460 while (1)
462 int pos = (min + max) / 2;
463 int tmp = VEC_index (int, block_locators_locs, pos);
465 if (tmp <= loc && min != pos)
466 min = pos;
467 else if (tmp > loc && max != pos)
468 max = pos;
469 else
471 min = pos;
472 break;
475 return VEC_index (tree, block_locators_blocks, min);
478 /* Return line number of the statement specified by the locator. */
479 static location_t
480 locator_location (int loc)
482 int max = VEC_length (int, locations_locators_locs);
483 int min = 0;
485 while (1)
487 int pos = (min + max) / 2;
488 int tmp = VEC_index (int, locations_locators_locs, pos);
490 if (tmp <= loc && min != pos)
491 min = pos;
492 else if (tmp > loc && max != pos)
493 max = pos;
494 else
496 min = pos;
497 break;
500 return *VEC_index (location_t, locations_locators_vals, min);
503 /* Return source line of the statement that produced this insn. */
505 locator_line (int loc)
507 expanded_location xloc;
508 if (!loc)
509 return 0;
510 else
511 xloc = expand_location (locator_location (loc));
512 return xloc.line;
515 /* Return line number of the statement that produced this insn. */
517 insn_line (const_rtx insn)
519 return locator_line (INSN_LOCATOR (insn));
522 /* Return source file of the statement specified by LOC. */
523 const char *
524 locator_file (int loc)
526 expanded_location xloc;
527 if (!loc)
528 return 0;
529 else
530 xloc = expand_location (locator_location (loc));
531 return xloc.file;
534 /* Return source file of the statement that produced this insn. */
535 const char *
536 insn_file (const_rtx insn)
538 return locator_file (INSN_LOCATOR (insn));
541 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
542 on the scope tree and the newly reordered instructions. */
544 void
545 reemit_insn_block_notes (void)
547 tree cur_block = DECL_INITIAL (cfun->decl);
548 rtx insn, note;
550 insn = get_insns ();
551 if (!active_insn_p (insn))
552 insn = next_active_insn (insn);
553 for (; insn; insn = next_active_insn (insn))
555 tree this_block;
557 /* Avoid putting scope notes between jump table and its label. */
558 if (JUMP_P (insn)
559 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
560 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
561 continue;
563 this_block = insn_scope (insn);
564 /* For sequences compute scope resulting from merging all scopes
565 of instructions nested inside. */
566 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
568 int i;
569 rtx body = PATTERN (insn);
571 this_block = NULL;
572 for (i = 0; i < XVECLEN (body, 0); i++)
573 this_block = choose_inner_scope (this_block,
574 insn_scope (XVECEXP (body, 0, i)));
576 if (! this_block)
577 continue;
579 if (this_block != cur_block)
581 change_scope (insn, cur_block, this_block);
582 cur_block = this_block;
586 /* change_scope emits before the insn, not after. */
587 note = emit_note (NOTE_INSN_DELETED);
588 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
589 delete_insn (note);
591 reorder_blocks ();
595 /* Link the basic blocks in the correct order, compacting the basic
596 block queue while at it. This also clears the visited flag on
597 all basic blocks. If STAY_IN_CFGLAYOUT_MODE is false, this function
598 also clears the basic block header and footer fields.
600 This function is usually called after a pass (e.g. tracer) finishes
601 some transformations while in cfglayout mode. The required sequence
602 of the basic blocks is in a linked list along the bb->aux field.
603 This functions re-links the basic block prev_bb and next_bb pointers
604 accordingly, and it compacts and renumbers the blocks. */
606 void
607 relink_block_chain (bool stay_in_cfglayout_mode)
609 basic_block bb, prev_bb;
610 int index;
612 /* Maybe dump the re-ordered sequence. */
613 if (dump_file)
615 fprintf (dump_file, "Reordered sequence:\n");
616 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
618 bb = (basic_block) bb->aux, index++)
620 fprintf (dump_file, " %i ", index);
621 if (get_bb_original (bb))
622 fprintf (dump_file, "duplicate of %i ",
623 get_bb_original (bb)->index);
624 else if (forwarder_block_p (bb)
625 && !LABEL_P (BB_HEAD (bb)))
626 fprintf (dump_file, "compensation ");
627 else
628 fprintf (dump_file, "bb %i ", bb->index);
629 fprintf (dump_file, " [%i]\n", bb->frequency);
633 /* Now reorder the blocks. */
634 prev_bb = ENTRY_BLOCK_PTR;
635 bb = ENTRY_BLOCK_PTR->next_bb;
636 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
638 bb->prev_bb = prev_bb;
639 prev_bb->next_bb = bb;
641 prev_bb->next_bb = EXIT_BLOCK_PTR;
642 EXIT_BLOCK_PTR->prev_bb = prev_bb;
644 /* Then, clean up the aux and visited fields. */
645 FOR_ALL_BB (bb)
647 bb->aux = NULL;
648 bb->il.rtl->visited = 0;
649 if (!stay_in_cfglayout_mode)
650 bb->il.rtl->header = bb->il.rtl->footer = NULL;
653 /* Maybe reset the original copy tables, they are not valid anymore
654 when we renumber the basic blocks in compact_blocks. If we are
655 are going out of cfglayout mode, don't re-allocate the tables. */
656 free_original_copy_tables ();
657 if (stay_in_cfglayout_mode)
658 initialize_original_copy_tables ();
660 /* Finally, put basic_block_info in the new order. */
661 compact_blocks ();
665 /* Given a reorder chain, rearrange the code to match. */
667 static void
668 fixup_reorder_chain (void)
670 basic_block bb;
671 rtx insn = NULL;
673 if (cfg_layout_function_header)
675 set_first_insn (cfg_layout_function_header);
676 insn = cfg_layout_function_header;
677 while (NEXT_INSN (insn))
678 insn = NEXT_INSN (insn);
681 /* First do the bulk reordering -- rechain the blocks without regard to
682 the needed changes to jumps and labels. */
684 for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux)
686 if (bb->il.rtl->header)
688 if (insn)
689 NEXT_INSN (insn) = bb->il.rtl->header;
690 else
691 set_first_insn (bb->il.rtl->header);
692 PREV_INSN (bb->il.rtl->header) = insn;
693 insn = bb->il.rtl->header;
694 while (NEXT_INSN (insn))
695 insn = NEXT_INSN (insn);
697 if (insn)
698 NEXT_INSN (insn) = BB_HEAD (bb);
699 else
700 set_first_insn (BB_HEAD (bb));
701 PREV_INSN (BB_HEAD (bb)) = insn;
702 insn = BB_END (bb);
703 if (bb->il.rtl->footer)
705 NEXT_INSN (insn) = bb->il.rtl->footer;
706 PREV_INSN (bb->il.rtl->footer) = insn;
707 while (NEXT_INSN (insn))
708 insn = NEXT_INSN (insn);
712 NEXT_INSN (insn) = cfg_layout_function_footer;
713 if (cfg_layout_function_footer)
714 PREV_INSN (cfg_layout_function_footer) = insn;
716 while (NEXT_INSN (insn))
717 insn = NEXT_INSN (insn);
719 set_last_insn (insn);
720 #ifdef ENABLE_CHECKING
721 verify_insn_chain ();
722 #endif
724 /* Now add jumps and labels as needed to match the blocks new
725 outgoing edges. */
727 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux)
729 edge e_fall, e_taken, e;
730 rtx bb_end_insn;
731 basic_block nb;
732 edge_iterator ei;
734 if (EDGE_COUNT (bb->succs) == 0)
735 continue;
737 /* Find the old fallthru edge, and another non-EH edge for
738 a taken jump. */
739 e_taken = e_fall = NULL;
741 FOR_EACH_EDGE (e, ei, bb->succs)
742 if (e->flags & EDGE_FALLTHRU)
743 e_fall = e;
744 else if (! (e->flags & EDGE_EH))
745 e_taken = e;
747 bb_end_insn = BB_END (bb);
748 if (JUMP_P (bb_end_insn))
750 if (any_condjump_p (bb_end_insn))
752 /* If the old fallthru is still next, nothing to do. */
753 if (bb->aux == e_fall->dest
754 || e_fall->dest == EXIT_BLOCK_PTR)
755 continue;
757 /* The degenerated case of conditional jump jumping to the next
758 instruction can happen for jumps with side effects. We need
759 to construct a forwarder block and this will be done just
760 fine by force_nonfallthru below. */
761 if (!e_taken)
764 /* There is another special case: if *neither* block is next,
765 such as happens at the very end of a function, then we'll
766 need to add a new unconditional jump. Choose the taken
767 edge based on known or assumed probability. */
768 else if (bb->aux != e_taken->dest)
770 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
772 if (note
773 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
774 && invert_jump (bb_end_insn,
775 (e_fall->dest == EXIT_BLOCK_PTR
776 ? NULL_RTX
777 : label_for_bb (e_fall->dest)), 0))
779 e_fall->flags &= ~EDGE_FALLTHRU;
780 #ifdef ENABLE_CHECKING
781 gcc_assert (could_fall_through
782 (e_taken->src, e_taken->dest));
783 #endif
784 e_taken->flags |= EDGE_FALLTHRU;
785 update_br_prob_note (bb);
786 e = e_fall, e_fall = e_taken, e_taken = e;
790 /* If the "jumping" edge is a crossing edge, and the fall
791 through edge is non-crossing, leave things as they are. */
792 else if ((e_taken->flags & EDGE_CROSSING)
793 && !(e_fall->flags & EDGE_CROSSING))
794 continue;
796 /* Otherwise we can try to invert the jump. This will
797 basically never fail, however, keep up the pretense. */
798 else if (invert_jump (bb_end_insn,
799 (e_fall->dest == EXIT_BLOCK_PTR
800 ? NULL_RTX
801 : label_for_bb (e_fall->dest)), 0))
803 e_fall->flags &= ~EDGE_FALLTHRU;
804 #ifdef ENABLE_CHECKING
805 gcc_assert (could_fall_through
806 (e_taken->src, e_taken->dest));
807 #endif
808 e_taken->flags |= EDGE_FALLTHRU;
809 update_br_prob_note (bb);
810 continue;
813 else
815 /* Otherwise we have some return, switch or computed
816 jump. In the 99% case, there should not have been a
817 fallthru edge. */
818 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
819 continue;
822 else
824 /* No fallthru implies a noreturn function with EH edges, or
825 something similarly bizarre. In any case, we don't need to
826 do anything. */
827 if (! e_fall)
828 continue;
830 /* If the fallthru block is still next, nothing to do. */
831 if (bb->aux == e_fall->dest)
832 continue;
834 /* A fallthru to exit block. */
835 if (e_fall->dest == EXIT_BLOCK_PTR)
836 continue;
839 /* We got here if we need to add a new jump insn. */
840 nb = force_nonfallthru (e_fall);
841 if (nb)
843 nb->il.rtl->visited = 1;
844 nb->aux = bb->aux;
845 bb->aux = nb;
846 /* Don't process this new block. */
847 bb = nb;
849 /* Make sure new bb is tagged for correct section (same as
850 fall-thru source, since you cannot fall-throu across
851 section boundaries). */
852 BB_COPY_PARTITION (e_fall->src, single_pred (bb));
853 if (flag_reorder_blocks_and_partition
854 && targetm.have_named_sections
855 && JUMP_P (BB_END (bb))
856 && !any_condjump_p (BB_END (bb))
857 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
858 REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
859 (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
863 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
865 /* Annoying special case - jump around dead jumptables left in the code. */
866 FOR_EACH_BB (bb)
868 edge e;
869 edge_iterator ei;
871 FOR_EACH_EDGE (e, ei, bb->succs)
872 if (e->flags & EDGE_FALLTHRU)
873 break;
875 if (e && !can_fallthru (e->src, e->dest))
876 force_nonfallthru (e);
880 /* Perform sanity checks on the insn chain.
881 1. Check that next/prev pointers are consistent in both the forward and
882 reverse direction.
883 2. Count insns in chain, going both directions, and check if equal.
884 3. Check that get_last_insn () returns the actual end of chain. */
886 void
887 verify_insn_chain (void)
889 rtx x, prevx, nextx;
890 int insn_cnt1, insn_cnt2;
892 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
893 x != 0;
894 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
895 gcc_assert (PREV_INSN (x) == prevx);
897 gcc_assert (prevx == get_last_insn ());
899 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
900 x != 0;
901 nextx = x, insn_cnt2++, x = PREV_INSN (x))
902 gcc_assert (NEXT_INSN (x) == nextx);
904 gcc_assert (insn_cnt1 == insn_cnt2);
907 /* If we have assembler epilogues, the block falling through to exit must
908 be the last one in the reordered chain when we reach final. Ensure
909 that this condition is met. */
910 static void
911 fixup_fallthru_exit_predecessor (void)
913 edge e;
914 edge_iterator ei;
915 basic_block bb = NULL;
917 /* This transformation is not valid before reload, because we might
918 separate a call from the instruction that copies the return
919 value. */
920 gcc_assert (reload_completed);
922 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
923 if (e->flags & EDGE_FALLTHRU)
924 bb = e->src;
926 if (bb && bb->aux)
928 basic_block c = ENTRY_BLOCK_PTR->next_bb;
930 /* If the very first block is the one with the fall-through exit
931 edge, we have to split that block. */
932 if (c == bb)
934 bb = split_block (bb, NULL)->dest;
935 bb->aux = c->aux;
936 c->aux = bb;
937 bb->il.rtl->footer = c->il.rtl->footer;
938 c->il.rtl->footer = NULL;
941 while (c->aux != bb)
942 c = (basic_block) c->aux;
944 c->aux = bb->aux;
945 while (c->aux)
946 c = (basic_block) c->aux;
948 c->aux = bb;
949 bb->aux = NULL;
953 /* In case there are more than one fallthru predecessors of exit, force that
954 there is only one. */
956 static void
957 force_one_exit_fallthru (void)
959 edge e, predecessor = NULL;
960 bool more = false;
961 edge_iterator ei;
962 basic_block forwarder, bb;
964 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
965 if (e->flags & EDGE_FALLTHRU)
967 if (predecessor == NULL)
968 predecessor = e;
969 else
971 more = true;
972 break;
976 if (!more)
977 return;
979 /* Exit has several fallthru predecessors. Create a forwarder block for
980 them. */
981 forwarder = split_edge (predecessor);
982 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
984 if (e->src == forwarder
985 || !(e->flags & EDGE_FALLTHRU))
986 ei_next (&ei);
987 else
988 redirect_edge_and_branch_force (e, forwarder);
991 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
992 exit block. */
993 FOR_EACH_BB (bb)
995 if (bb->aux == NULL && bb != forwarder)
997 bb->aux = forwarder;
998 break;
1003 /* Return true in case it is possible to duplicate the basic block BB. */
1005 /* We do not want to declare the function in a header file, since it should
1006 only be used through the cfghooks interface, and we do not want to move
1007 it to cfgrtl.c since it would require also moving quite a lot of related
1008 code. */
1009 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block);
1011 bool
1012 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
1014 /* Do not attempt to duplicate tablejumps, as we need to unshare
1015 the dispatch table. This is difficult to do, as the instructions
1016 computing jump destination may be hoisted outside the basic block. */
1017 if (tablejump_p (BB_END (bb), NULL, NULL))
1018 return false;
1020 /* Do not duplicate blocks containing insns that can't be copied. */
1021 if (targetm.cannot_copy_insn_p)
1023 rtx insn = BB_HEAD (bb);
1024 while (1)
1026 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
1027 return false;
1028 if (insn == BB_END (bb))
1029 break;
1030 insn = NEXT_INSN (insn);
1034 return true;
1038 duplicate_insn_chain (rtx from, rtx to)
1040 rtx insn, last;
1042 /* Avoid updating of boundaries of previous basic block. The
1043 note will get removed from insn stream in fixup. */
1044 last = emit_note (NOTE_INSN_DELETED);
1046 /* Create copy at the end of INSN chain. The chain will
1047 be reordered later. */
1048 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
1050 switch (GET_CODE (insn))
1052 case INSN:
1053 case CALL_INSN:
1054 case JUMP_INSN:
1055 /* Avoid copying of dispatch tables. We never duplicate
1056 tablejumps, so this can hit only in case the table got
1057 moved far from original jump. */
1058 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
1059 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1060 break;
1061 emit_copy_of_insn_after (insn, get_last_insn ());
1062 break;
1064 case CODE_LABEL:
1065 break;
1067 case BARRIER:
1068 emit_barrier ();
1069 break;
1071 case NOTE:
1072 switch (NOTE_KIND (insn))
1074 /* In case prologue is empty and function contain label
1075 in first BB, we may want to copy the block. */
1076 case NOTE_INSN_PROLOGUE_END:
1078 case NOTE_INSN_DELETED:
1079 case NOTE_INSN_DELETED_LABEL:
1080 /* No problem to strip these. */
1081 case NOTE_INSN_EPILOGUE_BEG:
1082 /* Debug code expect these notes to exist just once.
1083 Keep them in the master copy.
1084 ??? It probably makes more sense to duplicate them for each
1085 epilogue copy. */
1086 case NOTE_INSN_FUNCTION_BEG:
1087 /* There is always just single entry to function. */
1088 case NOTE_INSN_BASIC_BLOCK:
1089 break;
1091 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1092 emit_note_copy (insn);
1093 break;
1095 default:
1096 /* All other notes should have already been eliminated.
1098 gcc_unreachable ();
1100 break;
1101 default:
1102 gcc_unreachable ();
1105 insn = NEXT_INSN (last);
1106 delete_insn (last);
1107 return insn;
1109 /* Create a duplicate of the basic block BB. */
1111 /* We do not want to declare the function in a header file, since it should
1112 only be used through the cfghooks interface, and we do not want to move
1113 it to cfgrtl.c since it would require also moving quite a lot of related
1114 code. */
1115 extern basic_block cfg_layout_duplicate_bb (basic_block);
1117 basic_block
1118 cfg_layout_duplicate_bb (basic_block bb)
1120 rtx insn;
1121 basic_block new_bb;
1123 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1124 new_bb = create_basic_block (insn,
1125 insn ? get_last_insn () : NULL,
1126 EXIT_BLOCK_PTR->prev_bb);
1128 BB_COPY_PARTITION (new_bb, bb);
1129 if (bb->il.rtl->header)
1131 insn = bb->il.rtl->header;
1132 while (NEXT_INSN (insn))
1133 insn = NEXT_INSN (insn);
1134 insn = duplicate_insn_chain (bb->il.rtl->header, insn);
1135 if (insn)
1136 new_bb->il.rtl->header = unlink_insn_chain (insn, get_last_insn ());
1139 if (bb->il.rtl->footer)
1141 insn = bb->il.rtl->footer;
1142 while (NEXT_INSN (insn))
1143 insn = NEXT_INSN (insn);
1144 insn = duplicate_insn_chain (bb->il.rtl->footer, insn);
1145 if (insn)
1146 new_bb->il.rtl->footer = unlink_insn_chain (insn, get_last_insn ());
1149 return new_bb;
1153 /* Main entry point to this module - initialize the datastructures for
1154 CFG layout changes. It keeps LOOPS up-to-date if not null.
1156 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
1158 void
1159 cfg_layout_initialize (unsigned int flags)
1161 rtx x;
1162 basic_block bb;
1164 initialize_original_copy_tables ();
1166 cfg_layout_rtl_register_cfg_hooks ();
1168 record_effective_endpoints ();
1170 /* Make sure that the targets of non local gotos are marked. */
1171 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1173 bb = BLOCK_FOR_INSN (XEXP (x, 0));
1174 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
1177 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1180 /* Splits superblocks. */
1181 void
1182 break_superblocks (void)
1184 sbitmap superblocks;
1185 bool need = false;
1186 basic_block bb;
1188 superblocks = sbitmap_alloc (last_basic_block);
1189 sbitmap_zero (superblocks);
1191 FOR_EACH_BB (bb)
1192 if (bb->flags & BB_SUPERBLOCK)
1194 bb->flags &= ~BB_SUPERBLOCK;
1195 SET_BIT (superblocks, bb->index);
1196 need = true;
1199 if (need)
1201 rebuild_jump_labels (get_insns ());
1202 find_many_sub_basic_blocks (superblocks);
1205 free (superblocks);
1208 /* Finalize the changes: reorder insn list according to the sequence specified
1209 by aux pointers, enter compensation code, rebuild scope forest. */
1211 void
1212 cfg_layout_finalize (void)
1214 #ifdef ENABLE_CHECKING
1215 verify_flow_info ();
1216 #endif
1217 force_one_exit_fallthru ();
1218 rtl_register_cfg_hooks ();
1219 if (reload_completed
1220 #ifdef HAVE_epilogue
1221 && !HAVE_epilogue
1222 #endif
1224 fixup_fallthru_exit_predecessor ();
1225 fixup_reorder_chain ();
1227 rebuild_jump_labels (get_insns ());
1228 delete_dead_jumptables ();
1230 #ifdef ENABLE_CHECKING
1231 verify_insn_chain ();
1232 verify_flow_info ();
1233 #endif
1236 /* Checks whether all N blocks in BBS array can be copied. */
1237 bool
1238 can_copy_bbs_p (basic_block *bbs, unsigned n)
1240 unsigned i;
1241 edge e;
1242 int ret = true;
1244 for (i = 0; i < n; i++)
1245 bbs[i]->flags |= BB_DUPLICATED;
1247 for (i = 0; i < n; i++)
1249 /* In case we should redirect abnormal edge during duplication, fail. */
1250 edge_iterator ei;
1251 FOR_EACH_EDGE (e, ei, bbs[i]->succs)
1252 if ((e->flags & EDGE_ABNORMAL)
1253 && (e->dest->flags & BB_DUPLICATED))
1255 ret = false;
1256 goto end;
1259 if (!can_duplicate_block_p (bbs[i]))
1261 ret = false;
1262 break;
1266 end:
1267 for (i = 0; i < n; i++)
1268 bbs[i]->flags &= ~BB_DUPLICATED;
1270 return ret;
1273 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1274 are placed into array NEW_BBS in the same order. Edges from basic blocks
1275 in BBS are also duplicated and copies of those of them
1276 that lead into BBS are redirected to appropriate newly created block. The
1277 function assigns bbs into loops (copy of basic block bb is assigned to
1278 bb->loop_father->copy loop, so this must be set up correctly in advance)
1279 and updates dominators locally (LOOPS structure that contains the information
1280 about dominators is passed to enable this).
1282 BASE is the superloop to that basic block belongs; if its header or latch
1283 is copied, we do not set the new blocks as header or latch.
1285 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1286 also in the same order.
1288 Newly created basic blocks are put after the basic block AFTER in the
1289 instruction stream, and the order of the blocks in BBS array is preserved. */
1291 void
1292 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1293 edge *edges, unsigned num_edges, edge *new_edges,
1294 struct loop *base, basic_block after)
1296 unsigned i, j;
1297 basic_block bb, new_bb, dom_bb;
1298 edge e;
1300 /* Duplicate bbs, update dominators, assign bbs to loops. */
1301 for (i = 0; i < n; i++)
1303 /* Duplicate. */
1304 bb = bbs[i];
1305 new_bb = new_bbs[i] = duplicate_block (bb, NULL, after);
1306 after = new_bb;
1307 bb->flags |= BB_DUPLICATED;
1308 /* Possibly set loop header. */
1309 if (bb->loop_father->header == bb && bb->loop_father != base)
1310 new_bb->loop_father->header = new_bb;
1311 /* Or latch. */
1312 if (bb->loop_father->latch == bb && bb->loop_father != base)
1313 new_bb->loop_father->latch = new_bb;
1316 /* Set dominators. */
1317 for (i = 0; i < n; i++)
1319 bb = bbs[i];
1320 new_bb = new_bbs[i];
1322 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1323 if (dom_bb->flags & BB_DUPLICATED)
1325 dom_bb = get_bb_copy (dom_bb);
1326 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1330 /* Redirect edges. */
1331 for (j = 0; j < num_edges; j++)
1332 new_edges[j] = NULL;
1333 for (i = 0; i < n; i++)
1335 edge_iterator ei;
1336 new_bb = new_bbs[i];
1337 bb = bbs[i];
1339 FOR_EACH_EDGE (e, ei, new_bb->succs)
1341 for (j = 0; j < num_edges; j++)
1342 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1343 new_edges[j] = e;
1345 if (!(e->dest->flags & BB_DUPLICATED))
1346 continue;
1347 redirect_edge_and_branch_force (e, get_bb_copy (e->dest));
1351 /* Clear information about duplicates. */
1352 for (i = 0; i < n; i++)
1353 bbs[i]->flags &= ~BB_DUPLICATED;
1356 #include "gt-cfglayout.h"