Enable dumping of alias graphs.
[official-gcc/Ramakrishna.git] / gcc / cfglayout.c
blobca400a8c503e82b530c878a51e172db19026aa29
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "obstack.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "output.h"
32 #include "function.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
36 #include "ggc.h"
37 #include "alloc-pool.h"
38 #include "flags.h"
39 #include "tree-pass.h"
40 #include "df.h"
41 #include "vecprim.h"
43 /* Holds the interesting trailing notes for the function. */
44 rtx cfg_layout_function_footer;
45 rtx cfg_layout_function_header;
47 static rtx skip_insns_after_block (basic_block);
48 static void record_effective_endpoints (void);
49 static rtx label_for_bb (basic_block);
50 static void fixup_reorder_chain (void);
52 static void change_scope (rtx, tree, tree);
54 void verify_insn_chain (void);
55 static void fixup_fallthru_exit_predecessor (void);
56 static tree insn_scope (const_rtx);
58 rtx
59 unlink_insn_chain (rtx first, rtx last)
61 rtx prevfirst = PREV_INSN (first);
62 rtx nextlast = NEXT_INSN (last);
64 PREV_INSN (first) = NULL;
65 NEXT_INSN (last) = NULL;
66 if (prevfirst)
67 NEXT_INSN (prevfirst) = nextlast;
68 if (nextlast)
69 PREV_INSN (nextlast) = prevfirst;
70 else
71 set_last_insn (prevfirst);
72 if (!prevfirst)
73 set_first_insn (nextlast);
74 return first;
77 /* Skip over inter-block insns occurring after BB which are typically
78 associated with BB (e.g., barriers). If there are any such insns,
79 we return the last one. Otherwise, we return the end of BB. */
81 static rtx
82 skip_insns_after_block (basic_block bb)
84 rtx insn, last_insn, next_head, prev;
86 next_head = NULL_RTX;
87 if (bb->next_bb != EXIT_BLOCK_PTR)
88 next_head = BB_HEAD (bb->next_bb);
90 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
92 if (insn == next_head)
93 break;
95 switch (GET_CODE (insn))
97 case BARRIER:
98 last_insn = insn;
99 continue;
101 case NOTE:
102 switch (NOTE_KIND (insn))
104 case NOTE_INSN_BLOCK_END:
105 gcc_unreachable ();
106 continue;
107 default:
108 continue;
109 break;
111 break;
113 case CODE_LABEL:
114 if (NEXT_INSN (insn)
115 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
117 insn = NEXT_INSN (insn);
118 last_insn = insn;
119 continue;
121 break;
123 default:
124 break;
127 break;
130 /* It is possible to hit contradictory sequence. For instance:
132 jump_insn
133 NOTE_INSN_BLOCK_BEG
134 barrier
136 Where barrier belongs to jump_insn, but the note does not. This can be
137 created by removing the basic block originally following
138 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
140 for (insn = last_insn; insn != BB_END (bb); insn = prev)
142 prev = PREV_INSN (insn);
143 if (NOTE_P (insn))
144 switch (NOTE_KIND (insn))
146 case NOTE_INSN_BLOCK_END:
147 gcc_unreachable ();
148 break;
149 case NOTE_INSN_DELETED:
150 case NOTE_INSN_DELETED_LABEL:
151 continue;
152 default:
153 reorder_insns (insn, insn, last_insn);
157 return last_insn;
160 /* Locate or create a label for a given basic block. */
162 static rtx
163 label_for_bb (basic_block bb)
165 rtx label = BB_HEAD (bb);
167 if (!LABEL_P (label))
169 if (dump_file)
170 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
172 label = block_label (bb);
175 return label;
178 /* Locate the effective beginning and end of the insn chain for each
179 block, as defined by skip_insns_after_block above. */
181 static void
182 record_effective_endpoints (void)
184 rtx next_insn;
185 basic_block bb;
186 rtx insn;
188 for (insn = get_insns ();
189 insn
190 && NOTE_P (insn)
191 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
192 insn = NEXT_INSN (insn))
193 continue;
194 /* No basic blocks at all? */
195 gcc_assert (insn);
197 if (PREV_INSN (insn))
198 cfg_layout_function_header =
199 unlink_insn_chain (get_insns (), PREV_INSN (insn));
200 else
201 cfg_layout_function_header = NULL_RTX;
203 next_insn = get_insns ();
204 FOR_EACH_BB (bb)
206 rtx end;
208 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
209 bb->il.rtl->header = unlink_insn_chain (next_insn,
210 PREV_INSN (BB_HEAD (bb)));
211 end = skip_insns_after_block (bb);
212 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
213 bb->il.rtl->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
214 next_insn = NEXT_INSN (BB_END (bb));
217 cfg_layout_function_footer = next_insn;
218 if (cfg_layout_function_footer)
219 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
222 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
223 numbers and files. In order to be GGC friendly we need to use separate
224 varrays. This also slightly improve the memory locality in binary search.
225 The _locs array contains locators where the given property change. The
226 block_locators_blocks contains the scope block that is used for all insn
227 locator greater than corresponding block_locators_locs value and smaller
228 than the following one. Similarly for the other properties. */
229 static VEC(int,heap) *block_locators_locs;
230 static GTY(()) VEC(tree,gc) *block_locators_blocks;
231 static VEC(int,heap) *locations_locators_locs;
232 DEF_VEC_O(location_t);
233 DEF_VEC_ALLOC_O(location_t,heap);
234 static VEC(location_t,heap) *locations_locators_vals;
235 int prologue_locator;
236 int epilogue_locator;
238 /* Hold current location information and last location information, so the
239 datastructures are built lazily only when some instructions in given
240 place are needed. */
241 static location_t curr_location, last_location;
242 static tree curr_block, last_block;
243 static int curr_rtl_loc = -1;
245 /* Allocate insn locator datastructure. */
246 void
247 insn_locators_alloc (void)
249 prologue_locator = epilogue_locator = 0;
251 block_locators_locs = VEC_alloc (int, heap, 32);
252 block_locators_blocks = VEC_alloc (tree, gc, 32);
253 locations_locators_locs = VEC_alloc (int, heap, 32);
254 locations_locators_vals = VEC_alloc (location_t, heap, 32);
256 last_location = -1;
257 curr_location = -1;
258 curr_block = NULL;
259 last_block = NULL;
260 curr_rtl_loc = 0;
263 /* At the end of emit stage, clear current location. */
264 void
265 insn_locators_finalize (void)
267 if (curr_rtl_loc >= 0)
268 epilogue_locator = curr_insn_locator ();
269 curr_rtl_loc = -1;
272 /* Allocate insn locator datastructure. */
273 void
274 insn_locators_free (void)
276 prologue_locator = epilogue_locator = 0;
278 VEC_free (int, heap, block_locators_locs);
279 VEC_free (tree,gc, block_locators_blocks);
280 VEC_free (int, heap, locations_locators_locs);
281 VEC_free (location_t, heap, locations_locators_vals);
285 /* Set current location. */
286 void
287 set_curr_insn_source_location (location_t location)
289 /* IV opts calls into RTL expansion to compute costs of operations. At this
290 time locators are not initialized. */
291 if (curr_rtl_loc == -1)
292 return;
293 curr_location = location;
296 /* Get current location. */
297 location_t
298 get_curr_insn_source_location (void)
300 return curr_location;
303 /* Set current scope block. */
304 void
305 set_curr_insn_block (tree b)
307 /* IV opts calls into RTL expansion to compute costs of operations. At this
308 time locators are not initialized. */
309 if (curr_rtl_loc == -1)
310 return;
311 if (b)
312 curr_block = b;
315 /* Get current scope block. */
316 tree
317 get_curr_insn_block (void)
319 return curr_block;
322 /* Return current insn locator. */
324 curr_insn_locator (void)
326 if (curr_rtl_loc == -1)
327 return 0;
328 if (last_block != curr_block)
330 curr_rtl_loc++;
331 VEC_safe_push (int, heap, block_locators_locs, curr_rtl_loc);
332 VEC_safe_push (tree, gc, block_locators_blocks, curr_block);
333 last_block = curr_block;
335 if (last_location != curr_location)
337 curr_rtl_loc++;
338 VEC_safe_push (int, heap, locations_locators_locs, curr_rtl_loc);
339 VEC_safe_push (location_t, heap, locations_locators_vals, &curr_location);
340 last_location = curr_location;
342 return curr_rtl_loc;
345 static unsigned int
346 into_cfg_layout_mode (void)
348 cfg_layout_initialize (0);
349 return 0;
352 static unsigned int
353 outof_cfg_layout_mode (void)
355 basic_block bb;
357 FOR_EACH_BB (bb)
358 if (bb->next_bb != EXIT_BLOCK_PTR)
359 bb->aux = bb->next_bb;
361 cfg_layout_finalize ();
363 return 0;
366 struct rtl_opt_pass pass_into_cfg_layout_mode =
369 RTL_PASS,
370 "into_cfglayout", /* name */
371 NULL, /* gate */
372 into_cfg_layout_mode, /* execute */
373 NULL, /* sub */
374 NULL, /* next */
375 0, /* static_pass_number */
376 TV_NONE, /* tv_id */
377 0, /* properties_required */
378 PROP_cfglayout, /* properties_provided */
379 0, /* properties_destroyed */
380 0, /* todo_flags_start */
381 TODO_dump_func, /* todo_flags_finish */
385 struct rtl_opt_pass pass_outof_cfg_layout_mode =
388 RTL_PASS,
389 "outof_cfglayout", /* name */
390 NULL, /* gate */
391 outof_cfg_layout_mode, /* execute */
392 NULL, /* sub */
393 NULL, /* next */
394 0, /* static_pass_number */
395 TV_NONE, /* tv_id */
396 0, /* properties_required */
397 0, /* properties_provided */
398 PROP_cfglayout, /* properties_destroyed */
399 0, /* todo_flags_start */
400 TODO_dump_func, /* todo_flags_finish */
404 /* Return scope resulting from combination of S1 and S2. */
405 static tree
406 choose_inner_scope (tree s1, tree s2)
408 if (!s1)
409 return s2;
410 if (!s2)
411 return s1;
412 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
413 return s1;
414 return s2;
417 /* Emit lexical block notes needed to change scope from S1 to S2. */
419 static void
420 change_scope (rtx orig_insn, tree s1, tree s2)
422 rtx insn = orig_insn;
423 tree com = NULL_TREE;
424 tree ts1 = s1, ts2 = s2;
425 tree s;
427 while (ts1 != ts2)
429 gcc_assert (ts1 && ts2);
430 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
431 ts1 = BLOCK_SUPERCONTEXT (ts1);
432 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
433 ts2 = BLOCK_SUPERCONTEXT (ts2);
434 else
436 ts1 = BLOCK_SUPERCONTEXT (ts1);
437 ts2 = BLOCK_SUPERCONTEXT (ts2);
440 com = ts1;
442 /* Close scopes. */
443 s = s1;
444 while (s != com)
446 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
447 NOTE_BLOCK (note) = s;
448 s = BLOCK_SUPERCONTEXT (s);
451 /* Open scopes. */
452 s = s2;
453 while (s != com)
455 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
456 NOTE_BLOCK (insn) = s;
457 s = BLOCK_SUPERCONTEXT (s);
461 /* Return lexical scope block locator belongs to. */
462 static tree
463 locator_scope (int loc)
465 int max = VEC_length (int, block_locators_locs);
466 int min = 0;
468 /* When block_locators_locs was initialized, the pro- and epilogue
469 insns didn't exist yet and can therefore not be found this way.
470 But we know that they belong to the outer most block of the
471 current function.
472 Without this test, the prologue would be put inside the block of
473 the first valid instruction in the function and when that first
474 insn is part of an inlined function then the low_pc of that
475 inlined function is messed up. Likewise for the epilogue and
476 the last valid instruction. */
477 if (loc == prologue_locator || loc == epilogue_locator)
478 return DECL_INITIAL (cfun->decl);
480 if (!max || !loc)
481 return NULL;
482 while (1)
484 int pos = (min + max) / 2;
485 int tmp = VEC_index (int, block_locators_locs, pos);
487 if (tmp <= loc && min != pos)
488 min = pos;
489 else if (tmp > loc && max != pos)
490 max = pos;
491 else
493 min = pos;
494 break;
497 return VEC_index (tree, block_locators_blocks, min);
500 /* Return lexical scope block insn belongs to. */
501 static tree
502 insn_scope (const_rtx insn)
504 return locator_scope (INSN_LOCATOR (insn));
507 /* Return line number of the statement specified by the locator. */
508 location_t
509 locator_location (int loc)
511 int max = VEC_length (int, locations_locators_locs);
512 int min = 0;
514 while (1)
516 int pos = (min + max) / 2;
517 int tmp = VEC_index (int, locations_locators_locs, pos);
519 if (tmp <= loc && min != pos)
520 min = pos;
521 else if (tmp > loc && max != pos)
522 max = pos;
523 else
525 min = pos;
526 break;
529 return *VEC_index (location_t, locations_locators_vals, min);
532 /* Return source line of the statement that produced this insn. */
534 locator_line (int loc)
536 expanded_location xloc;
537 if (!loc)
538 return 0;
539 else
540 xloc = expand_location (locator_location (loc));
541 return xloc.line;
544 /* Return line number of the statement that produced this insn. */
546 insn_line (const_rtx insn)
548 return locator_line (INSN_LOCATOR (insn));
551 /* Return source file of the statement specified by LOC. */
552 const char *
553 locator_file (int loc)
555 expanded_location xloc;
556 if (!loc)
557 return 0;
558 else
559 xloc = expand_location (locator_location (loc));
560 return xloc.file;
563 /* Return source file of the statement that produced this insn. */
564 const char *
565 insn_file (const_rtx insn)
567 return locator_file (INSN_LOCATOR (insn));
570 /* Return true if LOC1 and LOC2 locators have the same location and scope. */
571 bool
572 locator_eq (int loc1, int loc2)
574 if (loc1 == loc2)
575 return true;
576 if (locator_location (loc1) != locator_location (loc2))
577 return false;
578 return locator_scope (loc1) == locator_scope (loc2);
581 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
582 on the scope tree and the newly reordered instructions. */
584 void
585 reemit_insn_block_notes (void)
587 tree cur_block = DECL_INITIAL (cfun->decl);
588 rtx insn, note;
590 insn = get_insns ();
591 if (!active_insn_p (insn))
592 insn = next_active_insn (insn);
593 for (; insn; insn = next_active_insn (insn))
595 tree this_block;
597 /* Avoid putting scope notes between jump table and its label. */
598 if (JUMP_TABLE_DATA_P (insn))
599 continue;
601 this_block = insn_scope (insn);
602 /* For sequences compute scope resulting from merging all scopes
603 of instructions nested inside. */
604 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
606 int i;
607 rtx body = PATTERN (insn);
609 this_block = NULL;
610 for (i = 0; i < XVECLEN (body, 0); i++)
611 this_block = choose_inner_scope (this_block,
612 insn_scope (XVECEXP (body, 0, i)));
614 if (! this_block)
615 continue;
617 if (this_block != cur_block)
619 change_scope (insn, cur_block, this_block);
620 cur_block = this_block;
624 /* change_scope emits before the insn, not after. */
625 note = emit_note (NOTE_INSN_DELETED);
626 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
627 delete_insn (note);
629 reorder_blocks ();
633 /* Link the basic blocks in the correct order, compacting the basic
634 block queue while at it. This also clears the visited flag on
635 all basic blocks. If STAY_IN_CFGLAYOUT_MODE is false, this function
636 also clears the basic block header and footer fields.
638 This function is usually called after a pass (e.g. tracer) finishes
639 some transformations while in cfglayout mode. The required sequence
640 of the basic blocks is in a linked list along the bb->aux field.
641 This functions re-links the basic block prev_bb and next_bb pointers
642 accordingly, and it compacts and renumbers the blocks. */
644 void
645 relink_block_chain (bool stay_in_cfglayout_mode)
647 basic_block bb, prev_bb;
648 int index;
650 /* Maybe dump the re-ordered sequence. */
651 if (dump_file)
653 fprintf (dump_file, "Reordered sequence:\n");
654 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
656 bb = (basic_block) bb->aux, index++)
658 fprintf (dump_file, " %i ", index);
659 if (get_bb_original (bb))
660 fprintf (dump_file, "duplicate of %i ",
661 get_bb_original (bb)->index);
662 else if (forwarder_block_p (bb)
663 && !LABEL_P (BB_HEAD (bb)))
664 fprintf (dump_file, "compensation ");
665 else
666 fprintf (dump_file, "bb %i ", bb->index);
667 fprintf (dump_file, " [%i]\n", bb->frequency);
671 /* Now reorder the blocks. */
672 prev_bb = ENTRY_BLOCK_PTR;
673 bb = ENTRY_BLOCK_PTR->next_bb;
674 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
676 bb->prev_bb = prev_bb;
677 prev_bb->next_bb = bb;
679 prev_bb->next_bb = EXIT_BLOCK_PTR;
680 EXIT_BLOCK_PTR->prev_bb = prev_bb;
682 /* Then, clean up the aux and visited fields. */
683 FOR_ALL_BB (bb)
685 bb->aux = NULL;
686 bb->il.rtl->visited = 0;
687 if (!stay_in_cfglayout_mode)
688 bb->il.rtl->header = bb->il.rtl->footer = NULL;
691 /* Maybe reset the original copy tables, they are not valid anymore
692 when we renumber the basic blocks in compact_blocks. If we are
693 are going out of cfglayout mode, don't re-allocate the tables. */
694 free_original_copy_tables ();
695 if (stay_in_cfglayout_mode)
696 initialize_original_copy_tables ();
698 /* Finally, put basic_block_info in the new order. */
699 compact_blocks ();
703 /* Given a reorder chain, rearrange the code to match. */
705 static void
706 fixup_reorder_chain (void)
708 basic_block bb;
709 rtx insn = NULL;
711 if (cfg_layout_function_header)
713 set_first_insn (cfg_layout_function_header);
714 insn = cfg_layout_function_header;
715 while (NEXT_INSN (insn))
716 insn = NEXT_INSN (insn);
719 /* First do the bulk reordering -- rechain the blocks without regard to
720 the needed changes to jumps and labels. */
722 for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux)
724 if (bb->il.rtl->header)
726 if (insn)
727 NEXT_INSN (insn) = bb->il.rtl->header;
728 else
729 set_first_insn (bb->il.rtl->header);
730 PREV_INSN (bb->il.rtl->header) = insn;
731 insn = bb->il.rtl->header;
732 while (NEXT_INSN (insn))
733 insn = NEXT_INSN (insn);
735 if (insn)
736 NEXT_INSN (insn) = BB_HEAD (bb);
737 else
738 set_first_insn (BB_HEAD (bb));
739 PREV_INSN (BB_HEAD (bb)) = insn;
740 insn = BB_END (bb);
741 if (bb->il.rtl->footer)
743 NEXT_INSN (insn) = bb->il.rtl->footer;
744 PREV_INSN (bb->il.rtl->footer) = insn;
745 while (NEXT_INSN (insn))
746 insn = NEXT_INSN (insn);
750 NEXT_INSN (insn) = cfg_layout_function_footer;
751 if (cfg_layout_function_footer)
752 PREV_INSN (cfg_layout_function_footer) = insn;
754 while (NEXT_INSN (insn))
755 insn = NEXT_INSN (insn);
757 set_last_insn (insn);
758 #ifdef ENABLE_CHECKING
759 verify_insn_chain ();
760 #endif
762 /* Now add jumps and labels as needed to match the blocks new
763 outgoing edges. */
765 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux)
767 edge e_fall, e_taken, e;
768 rtx bb_end_insn;
769 basic_block nb;
770 edge_iterator ei;
772 if (EDGE_COUNT (bb->succs) == 0)
773 continue;
775 /* Find the old fallthru edge, and another non-EH edge for
776 a taken jump. */
777 e_taken = e_fall = NULL;
779 FOR_EACH_EDGE (e, ei, bb->succs)
780 if (e->flags & EDGE_FALLTHRU)
781 e_fall = e;
782 else if (! (e->flags & EDGE_EH))
783 e_taken = e;
785 bb_end_insn = BB_END (bb);
786 if (JUMP_P (bb_end_insn))
788 if (any_condjump_p (bb_end_insn))
790 /* If the old fallthru is still next, nothing to do. */
791 if (bb->aux == e_fall->dest
792 || e_fall->dest == EXIT_BLOCK_PTR)
793 continue;
795 /* The degenerated case of conditional jump jumping to the next
796 instruction can happen for jumps with side effects. We need
797 to construct a forwarder block and this will be done just
798 fine by force_nonfallthru below. */
799 if (!e_taken)
802 /* There is another special case: if *neither* block is next,
803 such as happens at the very end of a function, then we'll
804 need to add a new unconditional jump. Choose the taken
805 edge based on known or assumed probability. */
806 else if (bb->aux != e_taken->dest)
808 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
810 if (note
811 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
812 && invert_jump (bb_end_insn,
813 (e_fall->dest == EXIT_BLOCK_PTR
814 ? NULL_RTX
815 : label_for_bb (e_fall->dest)), 0))
817 e_fall->flags &= ~EDGE_FALLTHRU;
818 #ifdef ENABLE_CHECKING
819 gcc_assert (could_fall_through
820 (e_taken->src, e_taken->dest));
821 #endif
822 e_taken->flags |= EDGE_FALLTHRU;
823 update_br_prob_note (bb);
824 e = e_fall, e_fall = e_taken, e_taken = e;
828 /* If the "jumping" edge is a crossing edge, and the fall
829 through edge is non-crossing, leave things as they are. */
830 else if ((e_taken->flags & EDGE_CROSSING)
831 && !(e_fall->flags & EDGE_CROSSING))
832 continue;
834 /* Otherwise we can try to invert the jump. This will
835 basically never fail, however, keep up the pretense. */
836 else if (invert_jump (bb_end_insn,
837 (e_fall->dest == EXIT_BLOCK_PTR
838 ? NULL_RTX
839 : label_for_bb (e_fall->dest)), 0))
841 e_fall->flags &= ~EDGE_FALLTHRU;
842 #ifdef ENABLE_CHECKING
843 gcc_assert (could_fall_through
844 (e_taken->src, e_taken->dest));
845 #endif
846 e_taken->flags |= EDGE_FALLTHRU;
847 update_br_prob_note (bb);
848 continue;
851 else
853 /* Otherwise we have some return, switch or computed
854 jump. In the 99% case, there should not have been a
855 fallthru edge. */
856 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
857 continue;
860 else
862 /* No fallthru implies a noreturn function with EH edges, or
863 something similarly bizarre. In any case, we don't need to
864 do anything. */
865 if (! e_fall)
866 continue;
868 /* If the fallthru block is still next, nothing to do. */
869 if (bb->aux == e_fall->dest)
870 continue;
872 /* A fallthru to exit block. */
873 if (e_fall->dest == EXIT_BLOCK_PTR)
874 continue;
877 /* We got here if we need to add a new jump insn. */
878 nb = force_nonfallthru (e_fall);
879 if (nb)
881 nb->il.rtl->visited = 1;
882 nb->aux = bb->aux;
883 bb->aux = nb;
884 /* Don't process this new block. */
885 bb = nb;
887 /* Make sure new bb is tagged for correct section (same as
888 fall-thru source, since you cannot fall-throu across
889 section boundaries). */
890 BB_COPY_PARTITION (e_fall->src, single_pred (bb));
891 if (flag_reorder_blocks_and_partition
892 && targetm.have_named_sections
893 && JUMP_P (BB_END (bb))
894 && !any_condjump_p (BB_END (bb))
895 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
896 add_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX);
900 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
902 /* Annoying special case - jump around dead jumptables left in the code. */
903 FOR_EACH_BB (bb)
905 edge e;
906 edge_iterator ei;
908 FOR_EACH_EDGE (e, ei, bb->succs)
909 if (e->flags & EDGE_FALLTHRU)
910 break;
912 if (e && !can_fallthru (e->src, e->dest))
913 force_nonfallthru (e);
916 /* Ensure goto_locus from edges has some instructions with that locus
917 in RTL. */
918 if (!optimize)
919 FOR_EACH_BB (bb)
921 edge e;
922 edge_iterator ei;
924 FOR_EACH_EDGE (e, ei, bb->succs)
925 if (e->goto_locus && !(e->flags & EDGE_ABNORMAL))
927 basic_block nb;
928 rtx end;
930 insn = BB_END (e->src);
931 end = PREV_INSN (BB_HEAD (e->src));
932 while (insn != end
933 && (!INSN_P (insn) || INSN_LOCATOR (insn) == 0))
934 insn = PREV_INSN (insn);
935 if (insn != end
936 && locator_eq (INSN_LOCATOR (insn), (int) e->goto_locus))
937 continue;
938 if (simplejump_p (BB_END (e->src))
939 && INSN_LOCATOR (BB_END (e->src)) == 0)
941 INSN_LOCATOR (BB_END (e->src)) = e->goto_locus;
942 continue;
944 if (e->dest != EXIT_BLOCK_PTR)
946 insn = BB_HEAD (e->dest);
947 end = NEXT_INSN (BB_END (e->dest));
948 while (insn != end && !INSN_P (insn))
949 insn = NEXT_INSN (insn);
950 if (insn != end && INSN_LOCATOR (insn)
951 && locator_eq (INSN_LOCATOR (insn), (int) e->goto_locus))
952 continue;
954 nb = split_edge (e);
955 if (!INSN_P (BB_END (nb)))
956 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
957 nb);
958 INSN_LOCATOR (BB_END (nb)) = e->goto_locus;
963 /* Perform sanity checks on the insn chain.
964 1. Check that next/prev pointers are consistent in both the forward and
965 reverse direction.
966 2. Count insns in chain, going both directions, and check if equal.
967 3. Check that get_last_insn () returns the actual end of chain. */
969 void
970 verify_insn_chain (void)
972 rtx x, prevx, nextx;
973 int insn_cnt1, insn_cnt2;
975 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
976 x != 0;
977 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
978 gcc_assert (PREV_INSN (x) == prevx);
980 gcc_assert (prevx == get_last_insn ());
982 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
983 x != 0;
984 nextx = x, insn_cnt2++, x = PREV_INSN (x))
985 gcc_assert (NEXT_INSN (x) == nextx);
987 gcc_assert (insn_cnt1 == insn_cnt2);
990 /* If we have assembler epilogues, the block falling through to exit must
991 be the last one in the reordered chain when we reach final. Ensure
992 that this condition is met. */
993 static void
994 fixup_fallthru_exit_predecessor (void)
996 edge e;
997 edge_iterator ei;
998 basic_block bb = NULL;
1000 /* This transformation is not valid before reload, because we might
1001 separate a call from the instruction that copies the return
1002 value. */
1003 gcc_assert (reload_completed);
1005 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1006 if (e->flags & EDGE_FALLTHRU)
1007 bb = e->src;
1009 if (bb && bb->aux)
1011 basic_block c = ENTRY_BLOCK_PTR->next_bb;
1013 /* If the very first block is the one with the fall-through exit
1014 edge, we have to split that block. */
1015 if (c == bb)
1017 bb = split_block (bb, NULL)->dest;
1018 bb->aux = c->aux;
1019 c->aux = bb;
1020 bb->il.rtl->footer = c->il.rtl->footer;
1021 c->il.rtl->footer = NULL;
1024 while (c->aux != bb)
1025 c = (basic_block) c->aux;
1027 c->aux = bb->aux;
1028 while (c->aux)
1029 c = (basic_block) c->aux;
1031 c->aux = bb;
1032 bb->aux = NULL;
1036 /* In case there are more than one fallthru predecessors of exit, force that
1037 there is only one. */
1039 static void
1040 force_one_exit_fallthru (void)
1042 edge e, predecessor = NULL;
1043 bool more = false;
1044 edge_iterator ei;
1045 basic_block forwarder, bb;
1047 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1048 if (e->flags & EDGE_FALLTHRU)
1050 if (predecessor == NULL)
1051 predecessor = e;
1052 else
1054 more = true;
1055 break;
1059 if (!more)
1060 return;
1062 /* Exit has several fallthru predecessors. Create a forwarder block for
1063 them. */
1064 forwarder = split_edge (predecessor);
1065 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
1067 if (e->src == forwarder
1068 || !(e->flags & EDGE_FALLTHRU))
1069 ei_next (&ei);
1070 else
1071 redirect_edge_and_branch_force (e, forwarder);
1074 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
1075 exit block. */
1076 FOR_EACH_BB (bb)
1078 if (bb->aux == NULL && bb != forwarder)
1080 bb->aux = forwarder;
1081 break;
1086 /* Return true in case it is possible to duplicate the basic block BB. */
1088 /* We do not want to declare the function in a header file, since it should
1089 only be used through the cfghooks interface, and we do not want to move
1090 it to cfgrtl.c since it would require also moving quite a lot of related
1091 code. */
1092 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block);
1094 bool
1095 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
1097 /* Do not attempt to duplicate tablejumps, as we need to unshare
1098 the dispatch table. This is difficult to do, as the instructions
1099 computing jump destination may be hoisted outside the basic block. */
1100 if (tablejump_p (BB_END (bb), NULL, NULL))
1101 return false;
1103 /* Do not duplicate blocks containing insns that can't be copied. */
1104 if (targetm.cannot_copy_insn_p)
1106 rtx insn = BB_HEAD (bb);
1107 while (1)
1109 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
1110 return false;
1111 if (insn == BB_END (bb))
1112 break;
1113 insn = NEXT_INSN (insn);
1117 return true;
1121 duplicate_insn_chain (rtx from, rtx to)
1123 rtx insn, last, copy;
1125 /* Avoid updating of boundaries of previous basic block. The
1126 note will get removed from insn stream in fixup. */
1127 last = emit_note (NOTE_INSN_DELETED);
1129 /* Create copy at the end of INSN chain. The chain will
1130 be reordered later. */
1131 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
1133 switch (GET_CODE (insn))
1135 case DEBUG_INSN:
1136 case INSN:
1137 case CALL_INSN:
1138 case JUMP_INSN:
1139 /* Avoid copying of dispatch tables. We never duplicate
1140 tablejumps, so this can hit only in case the table got
1141 moved far from original jump. */
1142 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
1143 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1144 break;
1145 copy = emit_copy_of_insn_after (insn, get_last_insn ());
1146 maybe_copy_epilogue_insn (insn, copy);
1147 break;
1149 case CODE_LABEL:
1150 break;
1152 case BARRIER:
1153 emit_barrier ();
1154 break;
1156 case NOTE:
1157 switch (NOTE_KIND (insn))
1159 /* In case prologue is empty and function contain label
1160 in first BB, we may want to copy the block. */
1161 case NOTE_INSN_PROLOGUE_END:
1163 case NOTE_INSN_DELETED:
1164 case NOTE_INSN_DELETED_LABEL:
1165 /* No problem to strip these. */
1166 case NOTE_INSN_FUNCTION_BEG:
1167 /* There is always just single entry to function. */
1168 case NOTE_INSN_BASIC_BLOCK:
1169 break;
1171 case NOTE_INSN_EPILOGUE_BEG:
1172 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1173 emit_note_copy (insn);
1174 break;
1176 default:
1177 /* All other notes should have already been eliminated. */
1178 gcc_unreachable ();
1180 break;
1181 default:
1182 gcc_unreachable ();
1185 insn = NEXT_INSN (last);
1186 delete_insn (last);
1187 return insn;
1189 /* Create a duplicate of the basic block BB. */
1191 /* We do not want to declare the function in a header file, since it should
1192 only be used through the cfghooks interface, and we do not want to move
1193 it to cfgrtl.c since it would require also moving quite a lot of related
1194 code. */
1195 extern basic_block cfg_layout_duplicate_bb (basic_block);
1197 basic_block
1198 cfg_layout_duplicate_bb (basic_block bb)
1200 rtx insn;
1201 basic_block new_bb;
1203 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1204 new_bb = create_basic_block (insn,
1205 insn ? get_last_insn () : NULL,
1206 EXIT_BLOCK_PTR->prev_bb);
1208 BB_COPY_PARTITION (new_bb, bb);
1209 if (bb->il.rtl->header)
1211 insn = bb->il.rtl->header;
1212 while (NEXT_INSN (insn))
1213 insn = NEXT_INSN (insn);
1214 insn = duplicate_insn_chain (bb->il.rtl->header, insn);
1215 if (insn)
1216 new_bb->il.rtl->header = unlink_insn_chain (insn, get_last_insn ());
1219 if (bb->il.rtl->footer)
1221 insn = bb->il.rtl->footer;
1222 while (NEXT_INSN (insn))
1223 insn = NEXT_INSN (insn);
1224 insn = duplicate_insn_chain (bb->il.rtl->footer, insn);
1225 if (insn)
1226 new_bb->il.rtl->footer = unlink_insn_chain (insn, get_last_insn ());
1229 return new_bb;
1233 /* Main entry point to this module - initialize the datastructures for
1234 CFG layout changes. It keeps LOOPS up-to-date if not null.
1236 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
1238 void
1239 cfg_layout_initialize (unsigned int flags)
1241 rtx x;
1242 basic_block bb;
1244 initialize_original_copy_tables ();
1246 cfg_layout_rtl_register_cfg_hooks ();
1248 record_effective_endpoints ();
1250 /* Make sure that the targets of non local gotos are marked. */
1251 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1253 bb = BLOCK_FOR_INSN (XEXP (x, 0));
1254 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
1257 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1260 /* Splits superblocks. */
1261 void
1262 break_superblocks (void)
1264 sbitmap superblocks;
1265 bool need = false;
1266 basic_block bb;
1268 superblocks = sbitmap_alloc (last_basic_block);
1269 sbitmap_zero (superblocks);
1271 FOR_EACH_BB (bb)
1272 if (bb->flags & BB_SUPERBLOCK)
1274 bb->flags &= ~BB_SUPERBLOCK;
1275 SET_BIT (superblocks, bb->index);
1276 need = true;
1279 if (need)
1281 rebuild_jump_labels (get_insns ());
1282 find_many_sub_basic_blocks (superblocks);
1285 free (superblocks);
1288 /* Finalize the changes: reorder insn list according to the sequence specified
1289 by aux pointers, enter compensation code, rebuild scope forest. */
1291 void
1292 cfg_layout_finalize (void)
1294 #ifdef ENABLE_CHECKING
1295 verify_flow_info ();
1296 #endif
1297 force_one_exit_fallthru ();
1298 rtl_register_cfg_hooks ();
1299 if (reload_completed
1300 #ifdef HAVE_epilogue
1301 && !HAVE_epilogue
1302 #endif
1304 fixup_fallthru_exit_predecessor ();
1305 fixup_reorder_chain ();
1307 rebuild_jump_labels (get_insns ());
1308 delete_dead_jumptables ();
1310 #ifdef ENABLE_CHECKING
1311 verify_insn_chain ();
1312 verify_flow_info ();
1313 #endif
1316 /* Checks whether all N blocks in BBS array can be copied. */
1317 bool
1318 can_copy_bbs_p (basic_block *bbs, unsigned n)
1320 unsigned i;
1321 edge e;
1322 int ret = true;
1324 for (i = 0; i < n; i++)
1325 bbs[i]->flags |= BB_DUPLICATED;
1327 for (i = 0; i < n; i++)
1329 /* In case we should redirect abnormal edge during duplication, fail. */
1330 edge_iterator ei;
1331 FOR_EACH_EDGE (e, ei, bbs[i]->succs)
1332 if ((e->flags & EDGE_ABNORMAL)
1333 && (e->dest->flags & BB_DUPLICATED))
1335 ret = false;
1336 goto end;
1339 if (!can_duplicate_block_p (bbs[i]))
1341 ret = false;
1342 break;
1346 end:
1347 for (i = 0; i < n; i++)
1348 bbs[i]->flags &= ~BB_DUPLICATED;
1350 return ret;
1353 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1354 are placed into array NEW_BBS in the same order. Edges from basic blocks
1355 in BBS are also duplicated and copies of those of them
1356 that lead into BBS are redirected to appropriate newly created block. The
1357 function assigns bbs into loops (copy of basic block bb is assigned to
1358 bb->loop_father->copy loop, so this must be set up correctly in advance)
1359 and updates dominators locally (LOOPS structure that contains the information
1360 about dominators is passed to enable this).
1362 BASE is the superloop to that basic block belongs; if its header or latch
1363 is copied, we do not set the new blocks as header or latch.
1365 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1366 also in the same order.
1368 Newly created basic blocks are put after the basic block AFTER in the
1369 instruction stream, and the order of the blocks in BBS array is preserved. */
1371 void
1372 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1373 edge *edges, unsigned num_edges, edge *new_edges,
1374 struct loop *base, basic_block after)
1376 unsigned i, j;
1377 basic_block bb, new_bb, dom_bb;
1378 edge e;
1380 /* Duplicate bbs, update dominators, assign bbs to loops. */
1381 for (i = 0; i < n; i++)
1383 /* Duplicate. */
1384 bb = bbs[i];
1385 new_bb = new_bbs[i] = duplicate_block (bb, NULL, after);
1386 after = new_bb;
1387 bb->flags |= BB_DUPLICATED;
1388 /* Possibly set loop header. */
1389 if (bb->loop_father->header == bb && bb->loop_father != base)
1390 new_bb->loop_father->header = new_bb;
1391 /* Or latch. */
1392 if (bb->loop_father->latch == bb && bb->loop_father != base)
1393 new_bb->loop_father->latch = new_bb;
1396 /* Set dominators. */
1397 for (i = 0; i < n; i++)
1399 bb = bbs[i];
1400 new_bb = new_bbs[i];
1402 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1403 if (dom_bb->flags & BB_DUPLICATED)
1405 dom_bb = get_bb_copy (dom_bb);
1406 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1410 /* Redirect edges. */
1411 for (j = 0; j < num_edges; j++)
1412 new_edges[j] = NULL;
1413 for (i = 0; i < n; i++)
1415 edge_iterator ei;
1416 new_bb = new_bbs[i];
1417 bb = bbs[i];
1419 FOR_EACH_EDGE (e, ei, new_bb->succs)
1421 for (j = 0; j < num_edges; j++)
1422 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1423 new_edges[j] = e;
1425 if (!(e->dest->flags & BB_DUPLICATED))
1426 continue;
1427 redirect_edge_and_branch_force (e, get_bb_copy (e->dest));
1431 /* Clear information about duplicates. */
1432 for (i = 0; i < n; i++)
1433 bbs[i]->flags &= ~BB_DUPLICATED;
1436 #include "gt-cfglayout.h"