* configure.ac (HAVE_GAS_CFI_DIRECTIVE): Always test for assembler
[official-gcc.git] / gcc / cfglayout.c
blobe4049d66465419c1df64873ef4b7c6e534900d58
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "obstack.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "output.h"
32 #include "function.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
36 #include "ggc.h"
37 #include "alloc-pool.h"
38 #include "flags.h"
39 #include "tree-pass.h"
40 #include "df.h"
41 #include "vecprim.h"
43 /* Holds the interesting trailing notes for the function. */
44 rtx cfg_layout_function_footer;
45 rtx cfg_layout_function_header;
47 static rtx skip_insns_after_block (basic_block);
48 static void record_effective_endpoints (void);
49 static rtx label_for_bb (basic_block);
50 static void fixup_reorder_chain (void);
52 static void change_scope (rtx, tree, tree);
54 void verify_insn_chain (void);
55 static void fixup_fallthru_exit_predecessor (void);
56 static tree insn_scope (const_rtx);
58 rtx
59 unlink_insn_chain (rtx first, rtx last)
61 rtx prevfirst = PREV_INSN (first);
62 rtx nextlast = NEXT_INSN (last);
64 PREV_INSN (first) = NULL;
65 NEXT_INSN (last) = NULL;
66 if (prevfirst)
67 NEXT_INSN (prevfirst) = nextlast;
68 if (nextlast)
69 PREV_INSN (nextlast) = prevfirst;
70 else
71 set_last_insn (prevfirst);
72 if (!prevfirst)
73 set_first_insn (nextlast);
74 return first;
77 /* Skip over inter-block insns occurring after BB which are typically
78 associated with BB (e.g., barriers). If there are any such insns,
79 we return the last one. Otherwise, we return the end of BB. */
81 static rtx
82 skip_insns_after_block (basic_block bb)
84 rtx insn, last_insn, next_head, prev;
86 next_head = NULL_RTX;
87 if (bb->next_bb != EXIT_BLOCK_PTR)
88 next_head = BB_HEAD (bb->next_bb);
90 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
92 if (insn == next_head)
93 break;
95 switch (GET_CODE (insn))
97 case BARRIER:
98 last_insn = insn;
99 continue;
101 case NOTE:
102 switch (NOTE_KIND (insn))
104 case NOTE_INSN_BLOCK_END:
105 gcc_unreachable ();
106 continue;
107 default:
108 continue;
109 break;
111 break;
113 case CODE_LABEL:
114 if (NEXT_INSN (insn)
115 && JUMP_P (NEXT_INSN (insn))
116 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
117 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
119 insn = NEXT_INSN (insn);
120 last_insn = insn;
121 continue;
123 break;
125 default:
126 break;
129 break;
132 /* It is possible to hit contradictory sequence. For instance:
134 jump_insn
135 NOTE_INSN_BLOCK_BEG
136 barrier
138 Where barrier belongs to jump_insn, but the note does not. This can be
139 created by removing the basic block originally following
140 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
142 for (insn = last_insn; insn != BB_END (bb); insn = prev)
144 prev = PREV_INSN (insn);
145 if (NOTE_P (insn))
146 switch (NOTE_KIND (insn))
148 case NOTE_INSN_BLOCK_END:
149 gcc_unreachable ();
150 break;
151 case NOTE_INSN_DELETED:
152 case NOTE_INSN_DELETED_LABEL:
153 continue;
154 default:
155 reorder_insns (insn, insn, last_insn);
159 return last_insn;
162 /* Locate or create a label for a given basic block. */
164 static rtx
165 label_for_bb (basic_block bb)
167 rtx label = BB_HEAD (bb);
169 if (!LABEL_P (label))
171 if (dump_file)
172 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
174 label = block_label (bb);
177 return label;
180 /* Locate the effective beginning and end of the insn chain for each
181 block, as defined by skip_insns_after_block above. */
183 static void
184 record_effective_endpoints (void)
186 rtx next_insn;
187 basic_block bb;
188 rtx insn;
190 for (insn = get_insns ();
191 insn
192 && NOTE_P (insn)
193 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
194 insn = NEXT_INSN (insn))
195 continue;
196 /* No basic blocks at all? */
197 gcc_assert (insn);
199 if (PREV_INSN (insn))
200 cfg_layout_function_header =
201 unlink_insn_chain (get_insns (), PREV_INSN (insn));
202 else
203 cfg_layout_function_header = NULL_RTX;
205 next_insn = get_insns ();
206 FOR_EACH_BB (bb)
208 rtx end;
210 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
211 bb->il.rtl->header = unlink_insn_chain (next_insn,
212 PREV_INSN (BB_HEAD (bb)));
213 end = skip_insns_after_block (bb);
214 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
215 bb->il.rtl->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
216 next_insn = NEXT_INSN (BB_END (bb));
219 cfg_layout_function_footer = next_insn;
220 if (cfg_layout_function_footer)
221 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
224 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
225 numbers and files. In order to be GGC friendly we need to use separate
226 varrays. This also slightly improve the memory locality in binary search.
227 The _locs array contains locators where the given property change. The
228 block_locators_blocks contains the scope block that is used for all insn
229 locator greater than corresponding block_locators_locs value and smaller
230 than the following one. Similarly for the other properties. */
231 static VEC(int,heap) *block_locators_locs;
232 static GTY(()) VEC(tree,gc) *block_locators_blocks;
233 static VEC(int,heap) *locations_locators_locs;
234 DEF_VEC_O(location_t);
235 DEF_VEC_ALLOC_O(location_t,heap);
236 static VEC(location_t,heap) *locations_locators_vals;
237 int prologue_locator;
238 int epilogue_locator;
240 /* Hold current location information and last location information, so the
241 datastructures are built lazily only when some instructions in given
242 place are needed. */
243 location_t curr_location, last_location;
244 static tree curr_block, last_block;
245 static int curr_rtl_loc = -1;
247 /* Allocate insn locator datastructure. */
248 void
249 insn_locators_alloc (void)
251 prologue_locator = epilogue_locator = 0;
253 block_locators_locs = VEC_alloc (int, heap, 32);
254 block_locators_blocks = VEC_alloc (tree, gc, 32);
255 locations_locators_locs = VEC_alloc (int, heap, 32);
256 locations_locators_vals = VEC_alloc (location_t, heap, 32);
258 last_location = -1;
259 curr_location = -1;
260 curr_block = NULL;
261 last_block = NULL;
262 curr_rtl_loc = 0;
265 /* At the end of emit stage, clear current location. */
266 void
267 insn_locators_finalize (void)
269 if (curr_rtl_loc >= 0)
270 epilogue_locator = curr_insn_locator ();
271 curr_rtl_loc = -1;
274 /* Set current location. */
275 void
276 set_curr_insn_source_location (location_t location)
278 /* IV opts calls into RTL expansion to compute costs of operations. At this
279 time locators are not initialized. */
280 if (curr_rtl_loc == -1)
281 return;
282 if (location == last_location)
283 return;
284 curr_location = location;
287 /* Set current scope block. */
288 void
289 set_curr_insn_block (tree b)
291 /* IV opts calls into RTL expansion to compute costs of operations. At this
292 time locators are not initialized. */
293 if (curr_rtl_loc == -1)
294 return;
295 if (b)
296 curr_block = b;
299 /* Return current insn locator. */
301 curr_insn_locator (void)
303 if (curr_rtl_loc == -1)
304 return 0;
305 if (last_block != curr_block)
307 curr_rtl_loc++;
308 VEC_safe_push (int, heap, block_locators_locs, curr_rtl_loc);
309 VEC_safe_push (tree, gc, block_locators_blocks, curr_block);
310 last_block = curr_block;
312 if (last_location != curr_location)
314 curr_rtl_loc++;
315 VEC_safe_push (int, heap, locations_locators_locs, curr_rtl_loc);
316 VEC_safe_push (location_t, heap, locations_locators_vals, &curr_location);
317 last_location = curr_location;
319 return curr_rtl_loc;
322 static unsigned int
323 into_cfg_layout_mode (void)
325 cfg_layout_initialize (0);
326 return 0;
329 static unsigned int
330 outof_cfg_layout_mode (void)
332 basic_block bb;
334 FOR_EACH_BB (bb)
335 if (bb->next_bb != EXIT_BLOCK_PTR)
336 bb->aux = bb->next_bb;
338 cfg_layout_finalize ();
340 return 0;
343 struct rtl_opt_pass pass_into_cfg_layout_mode =
346 RTL_PASS,
347 "into_cfglayout", /* name */
348 NULL, /* gate */
349 into_cfg_layout_mode, /* execute */
350 NULL, /* sub */
351 NULL, /* next */
352 0, /* static_pass_number */
353 0, /* tv_id */
354 0, /* properties_required */
355 0, /* properties_provided */
356 0, /* properties_destroyed */
357 0, /* todo_flags_start */
358 TODO_dump_func, /* todo_flags_finish */
362 struct rtl_opt_pass pass_outof_cfg_layout_mode =
365 RTL_PASS,
366 "outof_cfglayout", /* name */
367 NULL, /* gate */
368 outof_cfg_layout_mode, /* execute */
369 NULL, /* sub */
370 NULL, /* next */
371 0, /* static_pass_number */
372 0, /* tv_id */
373 0, /* properties_required */
374 0, /* properties_provided */
375 0, /* properties_destroyed */
376 0, /* todo_flags_start */
377 TODO_dump_func, /* todo_flags_finish */
381 /* Return scope resulting from combination of S1 and S2. */
382 static tree
383 choose_inner_scope (tree s1, tree s2)
385 if (!s1)
386 return s2;
387 if (!s2)
388 return s1;
389 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
390 return s1;
391 return s2;
394 /* Emit lexical block notes needed to change scope from S1 to S2. */
396 static void
397 change_scope (rtx orig_insn, tree s1, tree s2)
399 rtx insn = orig_insn;
400 tree com = NULL_TREE;
401 tree ts1 = s1, ts2 = s2;
402 tree s;
404 while (ts1 != ts2)
406 gcc_assert (ts1 && ts2);
407 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
408 ts1 = BLOCK_SUPERCONTEXT (ts1);
409 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
410 ts2 = BLOCK_SUPERCONTEXT (ts2);
411 else
413 ts1 = BLOCK_SUPERCONTEXT (ts1);
414 ts2 = BLOCK_SUPERCONTEXT (ts2);
417 com = ts1;
419 /* Close scopes. */
420 s = s1;
421 while (s != com)
423 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
424 NOTE_BLOCK (note) = s;
425 s = BLOCK_SUPERCONTEXT (s);
428 /* Open scopes. */
429 s = s2;
430 while (s != com)
432 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
433 NOTE_BLOCK (insn) = s;
434 s = BLOCK_SUPERCONTEXT (s);
438 /* Return lexical scope block insn belong to. */
439 static tree
440 insn_scope (const_rtx insn)
442 int max = VEC_length (int, block_locators_locs);
443 int min = 0;
444 int loc = INSN_LOCATOR (insn);
446 /* When block_locators_locs was initialized, the pro- and epilogue
447 insns didn't exist yet and can therefore not be found this way.
448 But we know that they belong to the outer most block of the
449 current function.
450 Without this test, the prologue would be put inside the block of
451 the first valid instruction in the function and when that first
452 insn is part of an inlined function then the low_pc of that
453 inlined function is messed up. Likewise for the epilogue and
454 the last valid instruction. */
455 if (loc == prologue_locator || loc == epilogue_locator)
456 return DECL_INITIAL (cfun->decl);
458 if (!max || !loc)
459 return NULL;
460 while (1)
462 int pos = (min + max) / 2;
463 int tmp = VEC_index (int, block_locators_locs, pos);
465 if (tmp <= loc && min != pos)
466 min = pos;
467 else if (tmp > loc && max != pos)
468 max = pos;
469 else
471 min = pos;
472 break;
475 return VEC_index (tree, block_locators_blocks, min);
478 /* Return line number of the statement specified by the locator. */
479 static location_t
480 locator_location (int loc)
482 int max = VEC_length (int, locations_locators_locs);
483 int min = 0;
485 while (1)
487 int pos = (min + max) / 2;
488 int tmp = VEC_index (int, locations_locators_locs, pos);
490 if (tmp <= loc && min != pos)
491 min = pos;
492 else if (tmp > loc && max != pos)
493 max = pos;
494 else
496 min = pos;
497 break;
500 return *VEC_index (location_t, locations_locators_vals, min);
503 /* Return source line of the statement that produced this insn. */
505 locator_line (int loc)
507 expanded_location xloc;
508 if (!loc)
509 return 0;
510 else
511 xloc = expand_location (locator_location (loc));
512 return xloc.line;
515 /* Return line number of the statement that produced this insn. */
517 insn_line (const_rtx insn)
519 return locator_line (INSN_LOCATOR (insn));
522 /* Return source file of the statement specified by LOC. */
523 const char *
524 locator_file (int loc)
526 expanded_location xloc;
527 if (!loc)
528 return 0;
529 else
530 xloc = expand_location (locator_location (loc));
531 return xloc.file;
534 /* Return source file of the statement that produced this insn. */
535 const char *
536 insn_file (const_rtx insn)
538 return locator_file (INSN_LOCATOR (insn));
541 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
542 on the scope tree and the newly reordered instructions. */
544 void
545 reemit_insn_block_notes (void)
547 tree cur_block = DECL_INITIAL (cfun->decl);
548 rtx insn, note;
550 insn = get_insns ();
551 if (!active_insn_p (insn))
552 insn = next_active_insn (insn);
553 for (; insn; insn = next_active_insn (insn))
555 tree this_block;
557 /* Avoid putting scope notes between jump table and its label. */
558 if (JUMP_P (insn)
559 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
560 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
561 continue;
563 this_block = insn_scope (insn);
564 /* For sequences compute scope resulting from merging all scopes
565 of instructions nested inside. */
566 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
568 int i;
569 rtx body = PATTERN (insn);
571 this_block = NULL;
572 for (i = 0; i < XVECLEN (body, 0); i++)
573 this_block = choose_inner_scope (this_block,
574 insn_scope (XVECEXP (body, 0, i)));
576 if (! this_block)
577 continue;
579 if (this_block != cur_block)
581 change_scope (insn, cur_block, this_block);
582 cur_block = this_block;
586 /* change_scope emits before the insn, not after. */
587 note = emit_note (NOTE_INSN_DELETED);
588 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
589 delete_insn (note);
591 reorder_blocks ();
595 /* Link the basic blocks in the correct order, compacting the basic
596 block queue while at it. This also clears the visited flag on
597 all basic blocks. If STAY_IN_CFGLAYOUT_MODE is false, this function
598 also clears the basic block header and footer fields.
600 This function is usually called after a pass (e.g. tracer) finishes
601 some transformations while in cfglayout mode. The required sequence
602 of the basic blocks is in a linked list along the bb->aux field.
603 This functions re-links the basic block prev_bb and next_bb pointers
604 accordingly, and it compacts and renumbers the blocks. */
606 void
607 relink_block_chain (bool stay_in_cfglayout_mode)
609 basic_block bb, prev_bb;
610 int index;
612 /* Maybe dump the re-ordered sequence. */
613 if (dump_file)
615 fprintf (dump_file, "Reordered sequence:\n");
616 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
618 bb = (basic_block) bb->aux, index++)
620 fprintf (dump_file, " %i ", index);
621 if (get_bb_original (bb))
622 fprintf (dump_file, "duplicate of %i ",
623 get_bb_original (bb)->index);
624 else if (forwarder_block_p (bb)
625 && !LABEL_P (BB_HEAD (bb)))
626 fprintf (dump_file, "compensation ");
627 else
628 fprintf (dump_file, "bb %i ", bb->index);
629 fprintf (dump_file, " [%i]\n", bb->frequency);
633 /* Now reorder the blocks. */
634 prev_bb = ENTRY_BLOCK_PTR;
635 bb = ENTRY_BLOCK_PTR->next_bb;
636 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
638 bb->prev_bb = prev_bb;
639 prev_bb->next_bb = bb;
641 prev_bb->next_bb = EXIT_BLOCK_PTR;
642 EXIT_BLOCK_PTR->prev_bb = prev_bb;
644 /* Then, clean up the aux and visited fields. */
645 FOR_ALL_BB (bb)
647 bb->aux = NULL;
648 bb->il.rtl->visited = 0;
649 if (!stay_in_cfglayout_mode)
650 bb->il.rtl->header = bb->il.rtl->footer = NULL;
653 /* Maybe reset the original copy tables, they are not valid anymore
654 when we renumber the basic blocks in compact_blocks. If we are
655 are going out of cfglayout mode, don't re-allocate the tables. */
656 free_original_copy_tables ();
657 if (stay_in_cfglayout_mode)
658 initialize_original_copy_tables ();
660 /* Finally, put basic_block_info in the new order. */
661 compact_blocks ();
665 /* Given a reorder chain, rearrange the code to match. */
667 static void
668 fixup_reorder_chain (void)
670 basic_block bb;
671 rtx insn = NULL;
673 if (cfg_layout_function_header)
675 set_first_insn (cfg_layout_function_header);
676 insn = cfg_layout_function_header;
677 while (NEXT_INSN (insn))
678 insn = NEXT_INSN (insn);
681 /* First do the bulk reordering -- rechain the blocks without regard to
682 the needed changes to jumps and labels. */
684 for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux)
686 if (bb->il.rtl->header)
688 if (insn)
689 NEXT_INSN (insn) = bb->il.rtl->header;
690 else
691 set_first_insn (bb->il.rtl->header);
692 PREV_INSN (bb->il.rtl->header) = insn;
693 insn = bb->il.rtl->header;
694 while (NEXT_INSN (insn))
695 insn = NEXT_INSN (insn);
697 if (insn)
698 NEXT_INSN (insn) = BB_HEAD (bb);
699 else
700 set_first_insn (BB_HEAD (bb));
701 PREV_INSN (BB_HEAD (bb)) = insn;
702 insn = BB_END (bb);
703 if (bb->il.rtl->footer)
705 NEXT_INSN (insn) = bb->il.rtl->footer;
706 PREV_INSN (bb->il.rtl->footer) = insn;
707 while (NEXT_INSN (insn))
708 insn = NEXT_INSN (insn);
712 NEXT_INSN (insn) = cfg_layout_function_footer;
713 if (cfg_layout_function_footer)
714 PREV_INSN (cfg_layout_function_footer) = insn;
716 while (NEXT_INSN (insn))
717 insn = NEXT_INSN (insn);
719 set_last_insn (insn);
720 #ifdef ENABLE_CHECKING
721 verify_insn_chain ();
722 #endif
724 /* Now add jumps and labels as needed to match the blocks new
725 outgoing edges. */
727 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux)
729 edge e_fall, e_taken, e;
730 rtx bb_end_insn;
731 basic_block nb;
732 edge_iterator ei;
734 if (EDGE_COUNT (bb->succs) == 0)
735 continue;
737 /* Find the old fallthru edge, and another non-EH edge for
738 a taken jump. */
739 e_taken = e_fall = NULL;
741 FOR_EACH_EDGE (e, ei, bb->succs)
742 if (e->flags & EDGE_FALLTHRU)
743 e_fall = e;
744 else if (! (e->flags & EDGE_EH))
745 e_taken = e;
747 bb_end_insn = BB_END (bb);
748 if (JUMP_P (bb_end_insn))
750 if (any_condjump_p (bb_end_insn))
752 /* If the old fallthru is still next, nothing to do. */
753 if (bb->aux == e_fall->dest
754 || e_fall->dest == EXIT_BLOCK_PTR)
755 continue;
757 /* The degenerated case of conditional jump jumping to the next
758 instruction can happen for jumps with side effects. We need
759 to construct a forwarder block and this will be done just
760 fine by force_nonfallthru below. */
761 if (!e_taken)
764 /* There is another special case: if *neither* block is next,
765 such as happens at the very end of a function, then we'll
766 need to add a new unconditional jump. Choose the taken
767 edge based on known or assumed probability. */
768 else if (bb->aux != e_taken->dest)
770 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
772 if (note
773 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
774 && invert_jump (bb_end_insn,
775 (e_fall->dest == EXIT_BLOCK_PTR
776 ? NULL_RTX
777 : label_for_bb (e_fall->dest)), 0))
779 e_fall->flags &= ~EDGE_FALLTHRU;
780 #ifdef ENABLE_CHECKING
781 gcc_assert (could_fall_through
782 (e_taken->src, e_taken->dest));
783 #endif
784 e_taken->flags |= EDGE_FALLTHRU;
785 update_br_prob_note (bb);
786 e = e_fall, e_fall = e_taken, e_taken = e;
790 /* If the "jumping" edge is a crossing edge, and the fall
791 through edge is non-crossing, leave things as they are. */
792 else if ((e_taken->flags & EDGE_CROSSING)
793 && !(e_fall->flags & EDGE_CROSSING))
794 continue;
796 /* Otherwise we can try to invert the jump. This will
797 basically never fail, however, keep up the pretense. */
798 else if (invert_jump (bb_end_insn,
799 (e_fall->dest == EXIT_BLOCK_PTR
800 ? NULL_RTX
801 : label_for_bb (e_fall->dest)), 0))
803 e_fall->flags &= ~EDGE_FALLTHRU;
804 #ifdef ENABLE_CHECKING
805 gcc_assert (could_fall_through
806 (e_taken->src, e_taken->dest));
807 #endif
808 e_taken->flags |= EDGE_FALLTHRU;
809 update_br_prob_note (bb);
810 continue;
813 else
815 /* Otherwise we have some return, switch or computed
816 jump. In the 99% case, there should not have been a
817 fallthru edge. */
818 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
819 continue;
822 else
824 /* No fallthru implies a noreturn function with EH edges, or
825 something similarly bizarre. In any case, we don't need to
826 do anything. */
827 if (! e_fall)
828 continue;
830 /* If the fallthru block is still next, nothing to do. */
831 if (bb->aux == e_fall->dest)
832 continue;
834 /* A fallthru to exit block. */
835 if (e_fall->dest == EXIT_BLOCK_PTR)
836 continue;
839 /* We got here if we need to add a new jump insn. */
840 nb = force_nonfallthru (e_fall);
841 if (nb)
843 nb->il.rtl->visited = 1;
844 nb->aux = bb->aux;
845 bb->aux = nb;
846 /* Don't process this new block. */
847 bb = nb;
849 /* Make sure new bb is tagged for correct section (same as
850 fall-thru source, since you cannot fall-throu across
851 section boundaries). */
852 BB_COPY_PARTITION (e_fall->src, single_pred (bb));
853 if (flag_reorder_blocks_and_partition
854 && targetm.have_named_sections
855 && JUMP_P (BB_END (bb))
856 && !any_condjump_p (BB_END (bb))
857 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
858 add_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX);
862 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
864 /* Annoying special case - jump around dead jumptables left in the code. */
865 FOR_EACH_BB (bb)
867 edge e;
868 edge_iterator ei;
870 FOR_EACH_EDGE (e, ei, bb->succs)
871 if (e->flags & EDGE_FALLTHRU)
872 break;
874 if (e && !can_fallthru (e->src, e->dest))
875 force_nonfallthru (e);
879 /* Perform sanity checks on the insn chain.
880 1. Check that next/prev pointers are consistent in both the forward and
881 reverse direction.
882 2. Count insns in chain, going both directions, and check if equal.
883 3. Check that get_last_insn () returns the actual end of chain. */
885 void
886 verify_insn_chain (void)
888 rtx x, prevx, nextx;
889 int insn_cnt1, insn_cnt2;
891 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
892 x != 0;
893 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
894 gcc_assert (PREV_INSN (x) == prevx);
896 gcc_assert (prevx == get_last_insn ());
898 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
899 x != 0;
900 nextx = x, insn_cnt2++, x = PREV_INSN (x))
901 gcc_assert (NEXT_INSN (x) == nextx);
903 gcc_assert (insn_cnt1 == insn_cnt2);
906 /* If we have assembler epilogues, the block falling through to exit must
907 be the last one in the reordered chain when we reach final. Ensure
908 that this condition is met. */
909 static void
910 fixup_fallthru_exit_predecessor (void)
912 edge e;
913 edge_iterator ei;
914 basic_block bb = NULL;
916 /* This transformation is not valid before reload, because we might
917 separate a call from the instruction that copies the return
918 value. */
919 gcc_assert (reload_completed);
921 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
922 if (e->flags & EDGE_FALLTHRU)
923 bb = e->src;
925 if (bb && bb->aux)
927 basic_block c = ENTRY_BLOCK_PTR->next_bb;
929 /* If the very first block is the one with the fall-through exit
930 edge, we have to split that block. */
931 if (c == bb)
933 bb = split_block (bb, NULL)->dest;
934 bb->aux = c->aux;
935 c->aux = bb;
936 bb->il.rtl->footer = c->il.rtl->footer;
937 c->il.rtl->footer = NULL;
940 while (c->aux != bb)
941 c = (basic_block) c->aux;
943 c->aux = bb->aux;
944 while (c->aux)
945 c = (basic_block) c->aux;
947 c->aux = bb;
948 bb->aux = NULL;
952 /* In case there are more than one fallthru predecessors of exit, force that
953 there is only one. */
955 static void
956 force_one_exit_fallthru (void)
958 edge e, predecessor = NULL;
959 bool more = false;
960 edge_iterator ei;
961 basic_block forwarder, bb;
963 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
964 if (e->flags & EDGE_FALLTHRU)
966 if (predecessor == NULL)
967 predecessor = e;
968 else
970 more = true;
971 break;
975 if (!more)
976 return;
978 /* Exit has several fallthru predecessors. Create a forwarder block for
979 them. */
980 forwarder = split_edge (predecessor);
981 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
983 if (e->src == forwarder
984 || !(e->flags & EDGE_FALLTHRU))
985 ei_next (&ei);
986 else
987 redirect_edge_and_branch_force (e, forwarder);
990 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
991 exit block. */
992 FOR_EACH_BB (bb)
994 if (bb->aux == NULL && bb != forwarder)
996 bb->aux = forwarder;
997 break;
1002 /* Return true in case it is possible to duplicate the basic block BB. */
1004 /* We do not want to declare the function in a header file, since it should
1005 only be used through the cfghooks interface, and we do not want to move
1006 it to cfgrtl.c since it would require also moving quite a lot of related
1007 code. */
1008 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block);
1010 bool
1011 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
1013 /* Do not attempt to duplicate tablejumps, as we need to unshare
1014 the dispatch table. This is difficult to do, as the instructions
1015 computing jump destination may be hoisted outside the basic block. */
1016 if (tablejump_p (BB_END (bb), NULL, NULL))
1017 return false;
1019 /* Do not duplicate blocks containing insns that can't be copied. */
1020 if (targetm.cannot_copy_insn_p)
1022 rtx insn = BB_HEAD (bb);
1023 while (1)
1025 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
1026 return false;
1027 if (insn == BB_END (bb))
1028 break;
1029 insn = NEXT_INSN (insn);
1033 return true;
1037 duplicate_insn_chain (rtx from, rtx to)
1039 rtx insn, last;
1041 /* Avoid updating of boundaries of previous basic block. The
1042 note will get removed from insn stream in fixup. */
1043 last = emit_note (NOTE_INSN_DELETED);
1045 /* Create copy at the end of INSN chain. The chain will
1046 be reordered later. */
1047 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
1049 switch (GET_CODE (insn))
1051 case INSN:
1052 case CALL_INSN:
1053 case JUMP_INSN:
1054 /* Avoid copying of dispatch tables. We never duplicate
1055 tablejumps, so this can hit only in case the table got
1056 moved far from original jump. */
1057 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
1058 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1059 break;
1060 emit_copy_of_insn_after (insn, get_last_insn ());
1061 break;
1063 case CODE_LABEL:
1064 break;
1066 case BARRIER:
1067 emit_barrier ();
1068 break;
1070 case NOTE:
1071 switch (NOTE_KIND (insn))
1073 /* In case prologue is empty and function contain label
1074 in first BB, we may want to copy the block. */
1075 case NOTE_INSN_PROLOGUE_END:
1077 case NOTE_INSN_DELETED:
1078 case NOTE_INSN_DELETED_LABEL:
1079 /* No problem to strip these. */
1080 case NOTE_INSN_EPILOGUE_BEG:
1081 /* Debug code expect these notes to exist just once.
1082 Keep them in the master copy.
1083 ??? It probably makes more sense to duplicate them for each
1084 epilogue copy. */
1085 case NOTE_INSN_FUNCTION_BEG:
1086 /* There is always just single entry to function. */
1087 case NOTE_INSN_BASIC_BLOCK:
1088 break;
1090 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1091 emit_note_copy (insn);
1092 break;
1094 default:
1095 /* All other notes should have already been eliminated.
1097 gcc_unreachable ();
1099 break;
1100 default:
1101 gcc_unreachable ();
1104 insn = NEXT_INSN (last);
1105 delete_insn (last);
1106 return insn;
1108 /* Create a duplicate of the basic block BB. */
1110 /* We do not want to declare the function in a header file, since it should
1111 only be used through the cfghooks interface, and we do not want to move
1112 it to cfgrtl.c since it would require also moving quite a lot of related
1113 code. */
1114 extern basic_block cfg_layout_duplicate_bb (basic_block);
1116 basic_block
1117 cfg_layout_duplicate_bb (basic_block bb)
1119 rtx insn;
1120 basic_block new_bb;
1122 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1123 new_bb = create_basic_block (insn,
1124 insn ? get_last_insn () : NULL,
1125 EXIT_BLOCK_PTR->prev_bb);
1127 BB_COPY_PARTITION (new_bb, bb);
1128 if (bb->il.rtl->header)
1130 insn = bb->il.rtl->header;
1131 while (NEXT_INSN (insn))
1132 insn = NEXT_INSN (insn);
1133 insn = duplicate_insn_chain (bb->il.rtl->header, insn);
1134 if (insn)
1135 new_bb->il.rtl->header = unlink_insn_chain (insn, get_last_insn ());
1138 if (bb->il.rtl->footer)
1140 insn = bb->il.rtl->footer;
1141 while (NEXT_INSN (insn))
1142 insn = NEXT_INSN (insn);
1143 insn = duplicate_insn_chain (bb->il.rtl->footer, insn);
1144 if (insn)
1145 new_bb->il.rtl->footer = unlink_insn_chain (insn, get_last_insn ());
1148 return new_bb;
1152 /* Main entry point to this module - initialize the datastructures for
1153 CFG layout changes. It keeps LOOPS up-to-date if not null.
1155 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
1157 void
1158 cfg_layout_initialize (unsigned int flags)
1160 rtx x;
1161 basic_block bb;
1163 initialize_original_copy_tables ();
1165 cfg_layout_rtl_register_cfg_hooks ();
1167 record_effective_endpoints ();
1169 /* Make sure that the targets of non local gotos are marked. */
1170 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1172 bb = BLOCK_FOR_INSN (XEXP (x, 0));
1173 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
1176 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1179 /* Splits superblocks. */
1180 void
1181 break_superblocks (void)
1183 sbitmap superblocks;
1184 bool need = false;
1185 basic_block bb;
1187 superblocks = sbitmap_alloc (last_basic_block);
1188 sbitmap_zero (superblocks);
1190 FOR_EACH_BB (bb)
1191 if (bb->flags & BB_SUPERBLOCK)
1193 bb->flags &= ~BB_SUPERBLOCK;
1194 SET_BIT (superblocks, bb->index);
1195 need = true;
1198 if (need)
1200 rebuild_jump_labels (get_insns ());
1201 find_many_sub_basic_blocks (superblocks);
1204 free (superblocks);
1207 /* Finalize the changes: reorder insn list according to the sequence specified
1208 by aux pointers, enter compensation code, rebuild scope forest. */
1210 void
1211 cfg_layout_finalize (void)
1213 #ifdef ENABLE_CHECKING
1214 verify_flow_info ();
1215 #endif
1216 force_one_exit_fallthru ();
1217 rtl_register_cfg_hooks ();
1218 if (reload_completed
1219 #ifdef HAVE_epilogue
1220 && !HAVE_epilogue
1221 #endif
1223 fixup_fallthru_exit_predecessor ();
1224 fixup_reorder_chain ();
1226 rebuild_jump_labels (get_insns ());
1227 delete_dead_jumptables ();
1229 #ifdef ENABLE_CHECKING
1230 verify_insn_chain ();
1231 verify_flow_info ();
1232 #endif
1235 /* Checks whether all N blocks in BBS array can be copied. */
1236 bool
1237 can_copy_bbs_p (basic_block *bbs, unsigned n)
1239 unsigned i;
1240 edge e;
1241 int ret = true;
1243 for (i = 0; i < n; i++)
1244 bbs[i]->flags |= BB_DUPLICATED;
1246 for (i = 0; i < n; i++)
1248 /* In case we should redirect abnormal edge during duplication, fail. */
1249 edge_iterator ei;
1250 FOR_EACH_EDGE (e, ei, bbs[i]->succs)
1251 if ((e->flags & EDGE_ABNORMAL)
1252 && (e->dest->flags & BB_DUPLICATED))
1254 ret = false;
1255 goto end;
1258 if (!can_duplicate_block_p (bbs[i]))
1260 ret = false;
1261 break;
1265 end:
1266 for (i = 0; i < n; i++)
1267 bbs[i]->flags &= ~BB_DUPLICATED;
1269 return ret;
1272 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1273 are placed into array NEW_BBS in the same order. Edges from basic blocks
1274 in BBS are also duplicated and copies of those of them
1275 that lead into BBS are redirected to appropriate newly created block. The
1276 function assigns bbs into loops (copy of basic block bb is assigned to
1277 bb->loop_father->copy loop, so this must be set up correctly in advance)
1278 and updates dominators locally (LOOPS structure that contains the information
1279 about dominators is passed to enable this).
1281 BASE is the superloop to that basic block belongs; if its header or latch
1282 is copied, we do not set the new blocks as header or latch.
1284 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1285 also in the same order.
1287 Newly created basic blocks are put after the basic block AFTER in the
1288 instruction stream, and the order of the blocks in BBS array is preserved. */
1290 void
1291 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1292 edge *edges, unsigned num_edges, edge *new_edges,
1293 struct loop *base, basic_block after)
1295 unsigned i, j;
1296 basic_block bb, new_bb, dom_bb;
1297 edge e;
1299 /* Duplicate bbs, update dominators, assign bbs to loops. */
1300 for (i = 0; i < n; i++)
1302 /* Duplicate. */
1303 bb = bbs[i];
1304 new_bb = new_bbs[i] = duplicate_block (bb, NULL, after);
1305 after = new_bb;
1306 bb->flags |= BB_DUPLICATED;
1307 /* Possibly set loop header. */
1308 if (bb->loop_father->header == bb && bb->loop_father != base)
1309 new_bb->loop_father->header = new_bb;
1310 /* Or latch. */
1311 if (bb->loop_father->latch == bb && bb->loop_father != base)
1312 new_bb->loop_father->latch = new_bb;
1315 /* Set dominators. */
1316 for (i = 0; i < n; i++)
1318 bb = bbs[i];
1319 new_bb = new_bbs[i];
1321 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1322 if (dom_bb->flags & BB_DUPLICATED)
1324 dom_bb = get_bb_copy (dom_bb);
1325 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1329 /* Redirect edges. */
1330 for (j = 0; j < num_edges; j++)
1331 new_edges[j] = NULL;
1332 for (i = 0; i < n; i++)
1334 edge_iterator ei;
1335 new_bb = new_bbs[i];
1336 bb = bbs[i];
1338 FOR_EACH_EDGE (e, ei, new_bb->succs)
1340 for (j = 0; j < num_edges; j++)
1341 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1342 new_edges[j] = e;
1344 if (!(e->dest->flags & BB_DUPLICATED))
1345 continue;
1346 redirect_edge_and_branch_force (e, get_bb_copy (e->dest));
1350 /* Clear information about duplicates. */
1351 for (i = 0; i < n; i++)
1352 bbs[i]->flags &= ~BB_DUPLICATED;
1355 #include "gt-cfglayout.h"