2009-07-17 Richard Guenther <rguenther@suse.de>
[official-gcc.git] / gcc / cfglayout.c
blobf718f1e10dd4c57a4287ddc051958b4dec50ac30
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "obstack.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "output.h"
32 #include "function.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
36 #include "ggc.h"
37 #include "alloc-pool.h"
38 #include "flags.h"
39 #include "tree-pass.h"
40 #include "df.h"
41 #include "vecprim.h"
43 /* Holds the interesting trailing notes for the function. */
44 rtx cfg_layout_function_footer;
45 rtx cfg_layout_function_header;
47 static rtx skip_insns_after_block (basic_block);
48 static void record_effective_endpoints (void);
49 static rtx label_for_bb (basic_block);
50 static void fixup_reorder_chain (void);
52 static void change_scope (rtx, tree, tree);
54 void verify_insn_chain (void);
55 static void fixup_fallthru_exit_predecessor (void);
56 static tree insn_scope (const_rtx);
58 rtx
59 unlink_insn_chain (rtx first, rtx last)
61 rtx prevfirst = PREV_INSN (first);
62 rtx nextlast = NEXT_INSN (last);
64 PREV_INSN (first) = NULL;
65 NEXT_INSN (last) = NULL;
66 if (prevfirst)
67 NEXT_INSN (prevfirst) = nextlast;
68 if (nextlast)
69 PREV_INSN (nextlast) = prevfirst;
70 else
71 set_last_insn (prevfirst);
72 if (!prevfirst)
73 set_first_insn (nextlast);
74 return first;
77 /* Skip over inter-block insns occurring after BB which are typically
78 associated with BB (e.g., barriers). If there are any such insns,
79 we return the last one. Otherwise, we return the end of BB. */
81 static rtx
82 skip_insns_after_block (basic_block bb)
84 rtx insn, last_insn, next_head, prev;
86 next_head = NULL_RTX;
87 if (bb->next_bb != EXIT_BLOCK_PTR)
88 next_head = BB_HEAD (bb->next_bb);
90 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
92 if (insn == next_head)
93 break;
95 switch (GET_CODE (insn))
97 case BARRIER:
98 last_insn = insn;
99 continue;
101 case NOTE:
102 switch (NOTE_KIND (insn))
104 case NOTE_INSN_BLOCK_END:
105 gcc_unreachable ();
106 continue;
107 default:
108 continue;
109 break;
111 break;
113 case CODE_LABEL:
114 if (NEXT_INSN (insn)
115 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
117 insn = NEXT_INSN (insn);
118 last_insn = insn;
119 continue;
121 break;
123 default:
124 break;
127 break;
130 /* It is possible to hit contradictory sequence. For instance:
132 jump_insn
133 NOTE_INSN_BLOCK_BEG
134 barrier
136 Where barrier belongs to jump_insn, but the note does not. This can be
137 created by removing the basic block originally following
138 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
140 for (insn = last_insn; insn != BB_END (bb); insn = prev)
142 prev = PREV_INSN (insn);
143 if (NOTE_P (insn))
144 switch (NOTE_KIND (insn))
146 case NOTE_INSN_BLOCK_END:
147 gcc_unreachable ();
148 break;
149 case NOTE_INSN_DELETED:
150 case NOTE_INSN_DELETED_LABEL:
151 continue;
152 default:
153 reorder_insns (insn, insn, last_insn);
157 return last_insn;
160 /* Locate or create a label for a given basic block. */
162 static rtx
163 label_for_bb (basic_block bb)
165 rtx label = BB_HEAD (bb);
167 if (!LABEL_P (label))
169 if (dump_file)
170 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
172 label = block_label (bb);
175 return label;
178 /* Locate the effective beginning and end of the insn chain for each
179 block, as defined by skip_insns_after_block above. */
181 static void
182 record_effective_endpoints (void)
184 rtx next_insn;
185 basic_block bb;
186 rtx insn;
188 for (insn = get_insns ();
189 insn
190 && NOTE_P (insn)
191 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
192 insn = NEXT_INSN (insn))
193 continue;
194 /* No basic blocks at all? */
195 gcc_assert (insn);
197 if (PREV_INSN (insn))
198 cfg_layout_function_header =
199 unlink_insn_chain (get_insns (), PREV_INSN (insn));
200 else
201 cfg_layout_function_header = NULL_RTX;
203 next_insn = get_insns ();
204 FOR_EACH_BB (bb)
206 rtx end;
208 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
209 bb->il.rtl->header = unlink_insn_chain (next_insn,
210 PREV_INSN (BB_HEAD (bb)));
211 end = skip_insns_after_block (bb);
212 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
213 bb->il.rtl->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
214 next_insn = NEXT_INSN (BB_END (bb));
217 cfg_layout_function_footer = next_insn;
218 if (cfg_layout_function_footer)
219 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
222 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
223 numbers and files. In order to be GGC friendly we need to use separate
224 varrays. This also slightly improve the memory locality in binary search.
225 The _locs array contains locators where the given property change. The
226 block_locators_blocks contains the scope block that is used for all insn
227 locator greater than corresponding block_locators_locs value and smaller
228 than the following one. Similarly for the other properties. */
229 static VEC(int,heap) *block_locators_locs;
230 static GTY(()) VEC(tree,gc) *block_locators_blocks;
231 static VEC(int,heap) *locations_locators_locs;
232 DEF_VEC_O(location_t);
233 DEF_VEC_ALLOC_O(location_t,heap);
234 static VEC(location_t,heap) *locations_locators_vals;
235 int prologue_locator;
236 int epilogue_locator;
238 /* Hold current location information and last location information, so the
239 datastructures are built lazily only when some instructions in given
240 place are needed. */
241 location_t curr_location, last_location;
242 static tree curr_block, last_block;
243 static int curr_rtl_loc = -1;
245 /* Allocate insn locator datastructure. */
246 void
247 insn_locators_alloc (void)
249 prologue_locator = epilogue_locator = 0;
251 block_locators_locs = VEC_alloc (int, heap, 32);
252 block_locators_blocks = VEC_alloc (tree, gc, 32);
253 locations_locators_locs = VEC_alloc (int, heap, 32);
254 locations_locators_vals = VEC_alloc (location_t, heap, 32);
256 last_location = -1;
257 curr_location = -1;
258 curr_block = NULL;
259 last_block = NULL;
260 curr_rtl_loc = 0;
263 /* At the end of emit stage, clear current location. */
264 void
265 insn_locators_finalize (void)
267 if (curr_rtl_loc >= 0)
268 epilogue_locator = curr_insn_locator ();
269 curr_rtl_loc = -1;
272 /* Allocate insn locator datastructure. */
273 void
274 insn_locators_free (void)
276 prologue_locator = epilogue_locator = 0;
278 VEC_free (int, heap, block_locators_locs);
279 VEC_free (tree,gc, block_locators_blocks);
280 VEC_free (int, heap, locations_locators_locs);
281 VEC_free (location_t, heap, locations_locators_vals);
285 /* Set current location. */
286 void
287 set_curr_insn_source_location (location_t location)
289 /* IV opts calls into RTL expansion to compute costs of operations. At this
290 time locators are not initialized. */
291 if (curr_rtl_loc == -1)
292 return;
293 if (location == last_location)
294 return;
295 curr_location = location;
298 /* Set current scope block. */
299 void
300 set_curr_insn_block (tree b)
302 /* IV opts calls into RTL expansion to compute costs of operations. At this
303 time locators are not initialized. */
304 if (curr_rtl_loc == -1)
305 return;
306 if (b)
307 curr_block = b;
310 /* Return current insn locator. */
312 curr_insn_locator (void)
314 if (curr_rtl_loc == -1)
315 return 0;
316 if (last_block != curr_block)
318 curr_rtl_loc++;
319 VEC_safe_push (int, heap, block_locators_locs, curr_rtl_loc);
320 VEC_safe_push (tree, gc, block_locators_blocks, curr_block);
321 last_block = curr_block;
323 if (last_location != curr_location)
325 curr_rtl_loc++;
326 VEC_safe_push (int, heap, locations_locators_locs, curr_rtl_loc);
327 VEC_safe_push (location_t, heap, locations_locators_vals, &curr_location);
328 last_location = curr_location;
330 return curr_rtl_loc;
333 static unsigned int
334 into_cfg_layout_mode (void)
336 cfg_layout_initialize (0);
337 return 0;
340 static unsigned int
341 outof_cfg_layout_mode (void)
343 basic_block bb;
345 FOR_EACH_BB (bb)
346 if (bb->next_bb != EXIT_BLOCK_PTR)
347 bb->aux = bb->next_bb;
349 cfg_layout_finalize ();
351 return 0;
354 struct rtl_opt_pass pass_into_cfg_layout_mode =
357 RTL_PASS,
358 "into_cfglayout", /* name */
359 NULL, /* gate */
360 into_cfg_layout_mode, /* execute */
361 NULL, /* sub */
362 NULL, /* next */
363 0, /* static_pass_number */
364 TV_NONE, /* tv_id */
365 0, /* properties_required */
366 PROP_cfglayout, /* properties_provided */
367 0, /* properties_destroyed */
368 0, /* todo_flags_start */
369 TODO_dump_func, /* todo_flags_finish */
373 struct rtl_opt_pass pass_outof_cfg_layout_mode =
376 RTL_PASS,
377 "outof_cfglayout", /* name */
378 NULL, /* gate */
379 outof_cfg_layout_mode, /* execute */
380 NULL, /* sub */
381 NULL, /* next */
382 0, /* static_pass_number */
383 TV_NONE, /* tv_id */
384 0, /* properties_required */
385 0, /* properties_provided */
386 PROP_cfglayout, /* properties_destroyed */
387 0, /* todo_flags_start */
388 TODO_dump_func, /* todo_flags_finish */
392 /* Return scope resulting from combination of S1 and S2. */
393 static tree
394 choose_inner_scope (tree s1, tree s2)
396 if (!s1)
397 return s2;
398 if (!s2)
399 return s1;
400 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
401 return s1;
402 return s2;
405 /* Emit lexical block notes needed to change scope from S1 to S2. */
407 static void
408 change_scope (rtx orig_insn, tree s1, tree s2)
410 rtx insn = orig_insn;
411 tree com = NULL_TREE;
412 tree ts1 = s1, ts2 = s2;
413 tree s;
415 while (ts1 != ts2)
417 gcc_assert (ts1 && ts2);
418 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
419 ts1 = BLOCK_SUPERCONTEXT (ts1);
420 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
421 ts2 = BLOCK_SUPERCONTEXT (ts2);
422 else
424 ts1 = BLOCK_SUPERCONTEXT (ts1);
425 ts2 = BLOCK_SUPERCONTEXT (ts2);
428 com = ts1;
430 /* Close scopes. */
431 s = s1;
432 while (s != com)
434 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
435 NOTE_BLOCK (note) = s;
436 s = BLOCK_SUPERCONTEXT (s);
439 /* Open scopes. */
440 s = s2;
441 while (s != com)
443 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
444 NOTE_BLOCK (insn) = s;
445 s = BLOCK_SUPERCONTEXT (s);
449 /* Return lexical scope block locator belongs to. */
450 static tree
451 locator_scope (int loc)
453 int max = VEC_length (int, block_locators_locs);
454 int min = 0;
456 /* When block_locators_locs was initialized, the pro- and epilogue
457 insns didn't exist yet and can therefore not be found this way.
458 But we know that they belong to the outer most block of the
459 current function.
460 Without this test, the prologue would be put inside the block of
461 the first valid instruction in the function and when that first
462 insn is part of an inlined function then the low_pc of that
463 inlined function is messed up. Likewise for the epilogue and
464 the last valid instruction. */
465 if (loc == prologue_locator || loc == epilogue_locator)
466 return DECL_INITIAL (cfun->decl);
468 if (!max || !loc)
469 return NULL;
470 while (1)
472 int pos = (min + max) / 2;
473 int tmp = VEC_index (int, block_locators_locs, pos);
475 if (tmp <= loc && min != pos)
476 min = pos;
477 else if (tmp > loc && max != pos)
478 max = pos;
479 else
481 min = pos;
482 break;
485 return VEC_index (tree, block_locators_blocks, min);
488 /* Return lexical scope block insn belongs to. */
489 static tree
490 insn_scope (const_rtx insn)
492 return locator_scope (INSN_LOCATOR (insn));
495 /* Return line number of the statement specified by the locator. */
496 location_t
497 locator_location (int loc)
499 int max = VEC_length (int, locations_locators_locs);
500 int min = 0;
502 while (1)
504 int pos = (min + max) / 2;
505 int tmp = VEC_index (int, locations_locators_locs, pos);
507 if (tmp <= loc && min != pos)
508 min = pos;
509 else if (tmp > loc && max != pos)
510 max = pos;
511 else
513 min = pos;
514 break;
517 return *VEC_index (location_t, locations_locators_vals, min);
520 /* Return source line of the statement that produced this insn. */
522 locator_line (int loc)
524 expanded_location xloc;
525 if (!loc)
526 return 0;
527 else
528 xloc = expand_location (locator_location (loc));
529 return xloc.line;
532 /* Return line number of the statement that produced this insn. */
534 insn_line (const_rtx insn)
536 return locator_line (INSN_LOCATOR (insn));
539 /* Return source file of the statement specified by LOC. */
540 const char *
541 locator_file (int loc)
543 expanded_location xloc;
544 if (!loc)
545 return 0;
546 else
547 xloc = expand_location (locator_location (loc));
548 return xloc.file;
551 /* Return source file of the statement that produced this insn. */
552 const char *
553 insn_file (const_rtx insn)
555 return locator_file (INSN_LOCATOR (insn));
558 /* Return true if LOC1 and LOC2 locators have the same location and scope. */
559 bool
560 locator_eq (int loc1, int loc2)
562 if (loc1 == loc2)
563 return true;
564 if (locator_location (loc1) != locator_location (loc2))
565 return false;
566 return locator_scope (loc1) == locator_scope (loc2);
569 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
570 on the scope tree and the newly reordered instructions. */
572 void
573 reemit_insn_block_notes (void)
575 tree cur_block = DECL_INITIAL (cfun->decl);
576 rtx insn, note;
578 insn = get_insns ();
579 if (!active_insn_p (insn))
580 insn = next_active_insn (insn);
581 for (; insn; insn = next_active_insn (insn))
583 tree this_block;
585 /* Avoid putting scope notes between jump table and its label. */
586 if (JUMP_TABLE_DATA_P (insn))
587 continue;
589 this_block = insn_scope (insn);
590 /* For sequences compute scope resulting from merging all scopes
591 of instructions nested inside. */
592 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
594 int i;
595 rtx body = PATTERN (insn);
597 this_block = NULL;
598 for (i = 0; i < XVECLEN (body, 0); i++)
599 this_block = choose_inner_scope (this_block,
600 insn_scope (XVECEXP (body, 0, i)));
602 if (! this_block)
603 continue;
605 if (this_block != cur_block)
607 change_scope (insn, cur_block, this_block);
608 cur_block = this_block;
612 /* change_scope emits before the insn, not after. */
613 note = emit_note (NOTE_INSN_DELETED);
614 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
615 delete_insn (note);
617 reorder_blocks ();
621 /* Link the basic blocks in the correct order, compacting the basic
622 block queue while at it. This also clears the visited flag on
623 all basic blocks. If STAY_IN_CFGLAYOUT_MODE is false, this function
624 also clears the basic block header and footer fields.
626 This function is usually called after a pass (e.g. tracer) finishes
627 some transformations while in cfglayout mode. The required sequence
628 of the basic blocks is in a linked list along the bb->aux field.
629 This functions re-links the basic block prev_bb and next_bb pointers
630 accordingly, and it compacts and renumbers the blocks. */
632 void
633 relink_block_chain (bool stay_in_cfglayout_mode)
635 basic_block bb, prev_bb;
636 int index;
638 /* Maybe dump the re-ordered sequence. */
639 if (dump_file)
641 fprintf (dump_file, "Reordered sequence:\n");
642 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
644 bb = (basic_block) bb->aux, index++)
646 fprintf (dump_file, " %i ", index);
647 if (get_bb_original (bb))
648 fprintf (dump_file, "duplicate of %i ",
649 get_bb_original (bb)->index);
650 else if (forwarder_block_p (bb)
651 && !LABEL_P (BB_HEAD (bb)))
652 fprintf (dump_file, "compensation ");
653 else
654 fprintf (dump_file, "bb %i ", bb->index);
655 fprintf (dump_file, " [%i]\n", bb->frequency);
659 /* Now reorder the blocks. */
660 prev_bb = ENTRY_BLOCK_PTR;
661 bb = ENTRY_BLOCK_PTR->next_bb;
662 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
664 bb->prev_bb = prev_bb;
665 prev_bb->next_bb = bb;
667 prev_bb->next_bb = EXIT_BLOCK_PTR;
668 EXIT_BLOCK_PTR->prev_bb = prev_bb;
670 /* Then, clean up the aux and visited fields. */
671 FOR_ALL_BB (bb)
673 bb->aux = NULL;
674 bb->il.rtl->visited = 0;
675 if (!stay_in_cfglayout_mode)
676 bb->il.rtl->header = bb->il.rtl->footer = NULL;
679 /* Maybe reset the original copy tables, they are not valid anymore
680 when we renumber the basic blocks in compact_blocks. If we are
681 are going out of cfglayout mode, don't re-allocate the tables. */
682 free_original_copy_tables ();
683 if (stay_in_cfglayout_mode)
684 initialize_original_copy_tables ();
686 /* Finally, put basic_block_info in the new order. */
687 compact_blocks ();
691 /* Given a reorder chain, rearrange the code to match. */
693 static void
694 fixup_reorder_chain (void)
696 basic_block bb;
697 rtx insn = NULL;
699 if (cfg_layout_function_header)
701 set_first_insn (cfg_layout_function_header);
702 insn = cfg_layout_function_header;
703 while (NEXT_INSN (insn))
704 insn = NEXT_INSN (insn);
707 /* First do the bulk reordering -- rechain the blocks without regard to
708 the needed changes to jumps and labels. */
710 for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux)
712 if (bb->il.rtl->header)
714 if (insn)
715 NEXT_INSN (insn) = bb->il.rtl->header;
716 else
717 set_first_insn (bb->il.rtl->header);
718 PREV_INSN (bb->il.rtl->header) = insn;
719 insn = bb->il.rtl->header;
720 while (NEXT_INSN (insn))
721 insn = NEXT_INSN (insn);
723 if (insn)
724 NEXT_INSN (insn) = BB_HEAD (bb);
725 else
726 set_first_insn (BB_HEAD (bb));
727 PREV_INSN (BB_HEAD (bb)) = insn;
728 insn = BB_END (bb);
729 if (bb->il.rtl->footer)
731 NEXT_INSN (insn) = bb->il.rtl->footer;
732 PREV_INSN (bb->il.rtl->footer) = insn;
733 while (NEXT_INSN (insn))
734 insn = NEXT_INSN (insn);
738 NEXT_INSN (insn) = cfg_layout_function_footer;
739 if (cfg_layout_function_footer)
740 PREV_INSN (cfg_layout_function_footer) = insn;
742 while (NEXT_INSN (insn))
743 insn = NEXT_INSN (insn);
745 set_last_insn (insn);
746 #ifdef ENABLE_CHECKING
747 verify_insn_chain ();
748 #endif
750 /* Now add jumps and labels as needed to match the blocks new
751 outgoing edges. */
753 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux)
755 edge e_fall, e_taken, e;
756 rtx bb_end_insn;
757 basic_block nb;
758 edge_iterator ei;
760 if (EDGE_COUNT (bb->succs) == 0)
761 continue;
763 /* Find the old fallthru edge, and another non-EH edge for
764 a taken jump. */
765 e_taken = e_fall = NULL;
767 FOR_EACH_EDGE (e, ei, bb->succs)
768 if (e->flags & EDGE_FALLTHRU)
769 e_fall = e;
770 else if (! (e->flags & EDGE_EH))
771 e_taken = e;
773 bb_end_insn = BB_END (bb);
774 if (JUMP_P (bb_end_insn))
776 if (any_condjump_p (bb_end_insn))
778 /* If the old fallthru is still next, nothing to do. */
779 if (bb->aux == e_fall->dest
780 || e_fall->dest == EXIT_BLOCK_PTR)
781 continue;
783 /* The degenerated case of conditional jump jumping to the next
784 instruction can happen for jumps with side effects. We need
785 to construct a forwarder block and this will be done just
786 fine by force_nonfallthru below. */
787 if (!e_taken)
790 /* There is another special case: if *neither* block is next,
791 such as happens at the very end of a function, then we'll
792 need to add a new unconditional jump. Choose the taken
793 edge based on known or assumed probability. */
794 else if (bb->aux != e_taken->dest)
796 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
798 if (note
799 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
800 && invert_jump (bb_end_insn,
801 (e_fall->dest == EXIT_BLOCK_PTR
802 ? NULL_RTX
803 : label_for_bb (e_fall->dest)), 0))
805 e_fall->flags &= ~EDGE_FALLTHRU;
806 #ifdef ENABLE_CHECKING
807 gcc_assert (could_fall_through
808 (e_taken->src, e_taken->dest));
809 #endif
810 e_taken->flags |= EDGE_FALLTHRU;
811 update_br_prob_note (bb);
812 e = e_fall, e_fall = e_taken, e_taken = e;
816 /* If the "jumping" edge is a crossing edge, and the fall
817 through edge is non-crossing, leave things as they are. */
818 else if ((e_taken->flags & EDGE_CROSSING)
819 && !(e_fall->flags & EDGE_CROSSING))
820 continue;
822 /* Otherwise we can try to invert the jump. This will
823 basically never fail, however, keep up the pretense. */
824 else if (invert_jump (bb_end_insn,
825 (e_fall->dest == EXIT_BLOCK_PTR
826 ? NULL_RTX
827 : label_for_bb (e_fall->dest)), 0))
829 e_fall->flags &= ~EDGE_FALLTHRU;
830 #ifdef ENABLE_CHECKING
831 gcc_assert (could_fall_through
832 (e_taken->src, e_taken->dest));
833 #endif
834 e_taken->flags |= EDGE_FALLTHRU;
835 update_br_prob_note (bb);
836 continue;
839 else
841 /* Otherwise we have some return, switch or computed
842 jump. In the 99% case, there should not have been a
843 fallthru edge. */
844 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
845 continue;
848 else
850 /* No fallthru implies a noreturn function with EH edges, or
851 something similarly bizarre. In any case, we don't need to
852 do anything. */
853 if (! e_fall)
854 continue;
856 /* If the fallthru block is still next, nothing to do. */
857 if (bb->aux == e_fall->dest)
858 continue;
860 /* A fallthru to exit block. */
861 if (e_fall->dest == EXIT_BLOCK_PTR)
862 continue;
865 /* We got here if we need to add a new jump insn. */
866 nb = force_nonfallthru (e_fall);
867 if (nb)
869 nb->il.rtl->visited = 1;
870 nb->aux = bb->aux;
871 bb->aux = nb;
872 /* Don't process this new block. */
873 bb = nb;
875 /* Make sure new bb is tagged for correct section (same as
876 fall-thru source, since you cannot fall-throu across
877 section boundaries). */
878 BB_COPY_PARTITION (e_fall->src, single_pred (bb));
879 if (flag_reorder_blocks_and_partition
880 && targetm.have_named_sections
881 && JUMP_P (BB_END (bb))
882 && !any_condjump_p (BB_END (bb))
883 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
884 add_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX);
888 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
890 /* Annoying special case - jump around dead jumptables left in the code. */
891 FOR_EACH_BB (bb)
893 edge e;
894 edge_iterator ei;
896 FOR_EACH_EDGE (e, ei, bb->succs)
897 if (e->flags & EDGE_FALLTHRU)
898 break;
900 if (e && !can_fallthru (e->src, e->dest))
901 force_nonfallthru (e);
904 /* Ensure goto_locus from edges has some instructions with that locus
905 in RTL. */
906 if (!optimize)
907 FOR_EACH_BB (bb)
909 edge e;
910 edge_iterator ei;
912 FOR_EACH_EDGE (e, ei, bb->succs)
913 if (e->goto_locus && !(e->flags & EDGE_ABNORMAL))
915 basic_block nb;
916 rtx end;
918 insn = BB_END (e->src);
919 end = PREV_INSN (BB_HEAD (e->src));
920 while (insn != end
921 && (!INSN_P (insn) || INSN_LOCATOR (insn) == 0))
922 insn = PREV_INSN (insn);
923 if (insn != end
924 && locator_eq (INSN_LOCATOR (insn), (int) e->goto_locus))
925 continue;
926 if (simplejump_p (BB_END (e->src))
927 && INSN_LOCATOR (BB_END (e->src)) == 0)
929 INSN_LOCATOR (BB_END (e->src)) = e->goto_locus;
930 continue;
932 if (e->dest != EXIT_BLOCK_PTR)
934 insn = BB_HEAD (e->dest);
935 end = NEXT_INSN (BB_END (e->dest));
936 while (insn != end && !INSN_P (insn))
937 insn = NEXT_INSN (insn);
938 if (insn != end && INSN_LOCATOR (insn)
939 && locator_eq (INSN_LOCATOR (insn), (int) e->goto_locus))
940 continue;
942 nb = split_edge (e);
943 if (!INSN_P (BB_END (nb)))
944 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
945 nb);
946 INSN_LOCATOR (BB_END (nb)) = e->goto_locus;
951 /* Perform sanity checks on the insn chain.
952 1. Check that next/prev pointers are consistent in both the forward and
953 reverse direction.
954 2. Count insns in chain, going both directions, and check if equal.
955 3. Check that get_last_insn () returns the actual end of chain. */
957 void
958 verify_insn_chain (void)
960 rtx x, prevx, nextx;
961 int insn_cnt1, insn_cnt2;
963 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
964 x != 0;
965 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
966 gcc_assert (PREV_INSN (x) == prevx);
968 gcc_assert (prevx == get_last_insn ());
970 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
971 x != 0;
972 nextx = x, insn_cnt2++, x = PREV_INSN (x))
973 gcc_assert (NEXT_INSN (x) == nextx);
975 gcc_assert (insn_cnt1 == insn_cnt2);
978 /* If we have assembler epilogues, the block falling through to exit must
979 be the last one in the reordered chain when we reach final. Ensure
980 that this condition is met. */
981 static void
982 fixup_fallthru_exit_predecessor (void)
984 edge e;
985 edge_iterator ei;
986 basic_block bb = NULL;
988 /* This transformation is not valid before reload, because we might
989 separate a call from the instruction that copies the return
990 value. */
991 gcc_assert (reload_completed);
993 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
994 if (e->flags & EDGE_FALLTHRU)
995 bb = e->src;
997 if (bb && bb->aux)
999 basic_block c = ENTRY_BLOCK_PTR->next_bb;
1001 /* If the very first block is the one with the fall-through exit
1002 edge, we have to split that block. */
1003 if (c == bb)
1005 bb = split_block (bb, NULL)->dest;
1006 bb->aux = c->aux;
1007 c->aux = bb;
1008 bb->il.rtl->footer = c->il.rtl->footer;
1009 c->il.rtl->footer = NULL;
1012 while (c->aux != bb)
1013 c = (basic_block) c->aux;
1015 c->aux = bb->aux;
1016 while (c->aux)
1017 c = (basic_block) c->aux;
1019 c->aux = bb;
1020 bb->aux = NULL;
1024 /* In case there are more than one fallthru predecessors of exit, force that
1025 there is only one. */
1027 static void
1028 force_one_exit_fallthru (void)
1030 edge e, predecessor = NULL;
1031 bool more = false;
1032 edge_iterator ei;
1033 basic_block forwarder, bb;
1035 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1036 if (e->flags & EDGE_FALLTHRU)
1038 if (predecessor == NULL)
1039 predecessor = e;
1040 else
1042 more = true;
1043 break;
1047 if (!more)
1048 return;
1050 /* Exit has several fallthru predecessors. Create a forwarder block for
1051 them. */
1052 forwarder = split_edge (predecessor);
1053 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
1055 if (e->src == forwarder
1056 || !(e->flags & EDGE_FALLTHRU))
1057 ei_next (&ei);
1058 else
1059 redirect_edge_and_branch_force (e, forwarder);
1062 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
1063 exit block. */
1064 FOR_EACH_BB (bb)
1066 if (bb->aux == NULL && bb != forwarder)
1068 bb->aux = forwarder;
1069 break;
1074 /* Return true in case it is possible to duplicate the basic block BB. */
1076 /* We do not want to declare the function in a header file, since it should
1077 only be used through the cfghooks interface, and we do not want to move
1078 it to cfgrtl.c since it would require also moving quite a lot of related
1079 code. */
1080 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block);
1082 bool
1083 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
1085 /* Do not attempt to duplicate tablejumps, as we need to unshare
1086 the dispatch table. This is difficult to do, as the instructions
1087 computing jump destination may be hoisted outside the basic block. */
1088 if (tablejump_p (BB_END (bb), NULL, NULL))
1089 return false;
1091 /* Do not duplicate blocks containing insns that can't be copied. */
1092 if (targetm.cannot_copy_insn_p)
1094 rtx insn = BB_HEAD (bb);
1095 while (1)
1097 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
1098 return false;
1099 if (insn == BB_END (bb))
1100 break;
1101 insn = NEXT_INSN (insn);
1105 return true;
1109 duplicate_insn_chain (rtx from, rtx to)
1111 rtx insn, last, copy;
1113 /* Avoid updating of boundaries of previous basic block. The
1114 note will get removed from insn stream in fixup. */
1115 last = emit_note (NOTE_INSN_DELETED);
1117 /* Create copy at the end of INSN chain. The chain will
1118 be reordered later. */
1119 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
1121 switch (GET_CODE (insn))
1123 case INSN:
1124 case CALL_INSN:
1125 case JUMP_INSN:
1126 /* Avoid copying of dispatch tables. We never duplicate
1127 tablejumps, so this can hit only in case the table got
1128 moved far from original jump. */
1129 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
1130 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1131 break;
1132 copy = emit_copy_of_insn_after (insn, get_last_insn ());
1133 maybe_copy_epilogue_insn (insn, copy);
1134 break;
1136 case CODE_LABEL:
1137 break;
1139 case BARRIER:
1140 emit_barrier ();
1141 break;
1143 case NOTE:
1144 switch (NOTE_KIND (insn))
1146 /* In case prologue is empty and function contain label
1147 in first BB, we may want to copy the block. */
1148 case NOTE_INSN_PROLOGUE_END:
1150 case NOTE_INSN_DELETED:
1151 case NOTE_INSN_DELETED_LABEL:
1152 /* No problem to strip these. */
1153 case NOTE_INSN_FUNCTION_BEG:
1154 /* There is always just single entry to function. */
1155 case NOTE_INSN_BASIC_BLOCK:
1156 break;
1158 case NOTE_INSN_EPILOGUE_BEG:
1159 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1160 emit_note_copy (insn);
1161 break;
1163 default:
1164 /* All other notes should have already been eliminated. */
1165 gcc_unreachable ();
1167 break;
1168 default:
1169 gcc_unreachable ();
1172 insn = NEXT_INSN (last);
1173 delete_insn (last);
1174 return insn;
1176 /* Create a duplicate of the basic block BB. */
1178 /* We do not want to declare the function in a header file, since it should
1179 only be used through the cfghooks interface, and we do not want to move
1180 it to cfgrtl.c since it would require also moving quite a lot of related
1181 code. */
1182 extern basic_block cfg_layout_duplicate_bb (basic_block);
1184 basic_block
1185 cfg_layout_duplicate_bb (basic_block bb)
1187 rtx insn;
1188 basic_block new_bb;
1190 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1191 new_bb = create_basic_block (insn,
1192 insn ? get_last_insn () : NULL,
1193 EXIT_BLOCK_PTR->prev_bb);
1195 BB_COPY_PARTITION (new_bb, bb);
1196 if (bb->il.rtl->header)
1198 insn = bb->il.rtl->header;
1199 while (NEXT_INSN (insn))
1200 insn = NEXT_INSN (insn);
1201 insn = duplicate_insn_chain (bb->il.rtl->header, insn);
1202 if (insn)
1203 new_bb->il.rtl->header = unlink_insn_chain (insn, get_last_insn ());
1206 if (bb->il.rtl->footer)
1208 insn = bb->il.rtl->footer;
1209 while (NEXT_INSN (insn))
1210 insn = NEXT_INSN (insn);
1211 insn = duplicate_insn_chain (bb->il.rtl->footer, insn);
1212 if (insn)
1213 new_bb->il.rtl->footer = unlink_insn_chain (insn, get_last_insn ());
1216 return new_bb;
1220 /* Main entry point to this module - initialize the datastructures for
1221 CFG layout changes. It keeps LOOPS up-to-date if not null.
1223 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
1225 void
1226 cfg_layout_initialize (unsigned int flags)
1228 rtx x;
1229 basic_block bb;
1231 initialize_original_copy_tables ();
1233 cfg_layout_rtl_register_cfg_hooks ();
1235 record_effective_endpoints ();
1237 /* Make sure that the targets of non local gotos are marked. */
1238 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1240 bb = BLOCK_FOR_INSN (XEXP (x, 0));
1241 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
1244 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1247 /* Splits superblocks. */
1248 void
1249 break_superblocks (void)
1251 sbitmap superblocks;
1252 bool need = false;
1253 basic_block bb;
1255 superblocks = sbitmap_alloc (last_basic_block);
1256 sbitmap_zero (superblocks);
1258 FOR_EACH_BB (bb)
1259 if (bb->flags & BB_SUPERBLOCK)
1261 bb->flags &= ~BB_SUPERBLOCK;
1262 SET_BIT (superblocks, bb->index);
1263 need = true;
1266 if (need)
1268 rebuild_jump_labels (get_insns ());
1269 find_many_sub_basic_blocks (superblocks);
1272 free (superblocks);
1275 /* Finalize the changes: reorder insn list according to the sequence specified
1276 by aux pointers, enter compensation code, rebuild scope forest. */
1278 void
1279 cfg_layout_finalize (void)
1281 #ifdef ENABLE_CHECKING
1282 verify_flow_info ();
1283 #endif
1284 force_one_exit_fallthru ();
1285 rtl_register_cfg_hooks ();
1286 if (reload_completed
1287 #ifdef HAVE_epilogue
1288 && !HAVE_epilogue
1289 #endif
1291 fixup_fallthru_exit_predecessor ();
1292 fixup_reorder_chain ();
1294 rebuild_jump_labels (get_insns ());
1295 delete_dead_jumptables ();
1297 #ifdef ENABLE_CHECKING
1298 verify_insn_chain ();
1299 verify_flow_info ();
1300 #endif
1303 /* Checks whether all N blocks in BBS array can be copied. */
1304 bool
1305 can_copy_bbs_p (basic_block *bbs, unsigned n)
1307 unsigned i;
1308 edge e;
1309 int ret = true;
1311 for (i = 0; i < n; i++)
1312 bbs[i]->flags |= BB_DUPLICATED;
1314 for (i = 0; i < n; i++)
1316 /* In case we should redirect abnormal edge during duplication, fail. */
1317 edge_iterator ei;
1318 FOR_EACH_EDGE (e, ei, bbs[i]->succs)
1319 if ((e->flags & EDGE_ABNORMAL)
1320 && (e->dest->flags & BB_DUPLICATED))
1322 ret = false;
1323 goto end;
1326 if (!can_duplicate_block_p (bbs[i]))
1328 ret = false;
1329 break;
1333 end:
1334 for (i = 0; i < n; i++)
1335 bbs[i]->flags &= ~BB_DUPLICATED;
1337 return ret;
1340 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1341 are placed into array NEW_BBS in the same order. Edges from basic blocks
1342 in BBS are also duplicated and copies of those of them
1343 that lead into BBS are redirected to appropriate newly created block. The
1344 function assigns bbs into loops (copy of basic block bb is assigned to
1345 bb->loop_father->copy loop, so this must be set up correctly in advance)
1346 and updates dominators locally (LOOPS structure that contains the information
1347 about dominators is passed to enable this).
1349 BASE is the superloop to that basic block belongs; if its header or latch
1350 is copied, we do not set the new blocks as header or latch.
1352 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1353 also in the same order.
1355 Newly created basic blocks are put after the basic block AFTER in the
1356 instruction stream, and the order of the blocks in BBS array is preserved. */
1358 void
1359 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1360 edge *edges, unsigned num_edges, edge *new_edges,
1361 struct loop *base, basic_block after)
1363 unsigned i, j;
1364 basic_block bb, new_bb, dom_bb;
1365 edge e;
1367 /* Duplicate bbs, update dominators, assign bbs to loops. */
1368 for (i = 0; i < n; i++)
1370 /* Duplicate. */
1371 bb = bbs[i];
1372 new_bb = new_bbs[i] = duplicate_block (bb, NULL, after);
1373 after = new_bb;
1374 bb->flags |= BB_DUPLICATED;
1375 /* Possibly set loop header. */
1376 if (bb->loop_father->header == bb && bb->loop_father != base)
1377 new_bb->loop_father->header = new_bb;
1378 /* Or latch. */
1379 if (bb->loop_father->latch == bb && bb->loop_father != base)
1380 new_bb->loop_father->latch = new_bb;
1383 /* Set dominators. */
1384 for (i = 0; i < n; i++)
1386 bb = bbs[i];
1387 new_bb = new_bbs[i];
1389 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1390 if (dom_bb->flags & BB_DUPLICATED)
1392 dom_bb = get_bb_copy (dom_bb);
1393 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1397 /* Redirect edges. */
1398 for (j = 0; j < num_edges; j++)
1399 new_edges[j] = NULL;
1400 for (i = 0; i < n; i++)
1402 edge_iterator ei;
1403 new_bb = new_bbs[i];
1404 bb = bbs[i];
1406 FOR_EACH_EDGE (e, ei, new_bb->succs)
1408 for (j = 0; j < num_edges; j++)
1409 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1410 new_edges[j] = e;
1412 if (!(e->dest->flags & BB_DUPLICATED))
1413 continue;
1414 redirect_edge_and_branch_force (e, get_bb_copy (e->dest));
1418 /* Clear information about duplicates. */
1419 for (i = 0; i < n; i++)
1420 bbs[i]->flags &= ~BB_DUPLICATED;
1423 #include "gt-cfglayout.h"