simplify-rtx.c (simplify_unary_operation_1): Extend the handling of SUBREG_PROMOTED_V...
[official-gcc.git] / gcc / cfglayout.c
blob5387b3882601fb8134df6539f6351a3bb114a902
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "obstack.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "output.h"
32 #include "function.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
36 #include "ggc.h"
37 #include "alloc-pool.h"
38 #include "flags.h"
39 #include "tree-pass.h"
40 #include "df.h"
41 #include "vecprim.h"
43 /* Holds the interesting trailing notes for the function. */
44 rtx cfg_layout_function_footer;
45 rtx cfg_layout_function_header;
47 static rtx skip_insns_after_block (basic_block);
48 static void record_effective_endpoints (void);
49 static rtx label_for_bb (basic_block);
50 static void fixup_reorder_chain (void);
52 static void change_scope (rtx, tree, tree);
54 void verify_insn_chain (void);
55 static void fixup_fallthru_exit_predecessor (void);
56 static tree insn_scope (const_rtx);
58 rtx
59 unlink_insn_chain (rtx first, rtx last)
61 rtx prevfirst = PREV_INSN (first);
62 rtx nextlast = NEXT_INSN (last);
64 PREV_INSN (first) = NULL;
65 NEXT_INSN (last) = NULL;
66 if (prevfirst)
67 NEXT_INSN (prevfirst) = nextlast;
68 if (nextlast)
69 PREV_INSN (nextlast) = prevfirst;
70 else
71 set_last_insn (prevfirst);
72 if (!prevfirst)
73 set_first_insn (nextlast);
74 return first;
77 /* Skip over inter-block insns occurring after BB which are typically
78 associated with BB (e.g., barriers). If there are any such insns,
79 we return the last one. Otherwise, we return the end of BB. */
81 static rtx
82 skip_insns_after_block (basic_block bb)
84 rtx insn, last_insn, next_head, prev;
86 next_head = NULL_RTX;
87 if (bb->next_bb != EXIT_BLOCK_PTR)
88 next_head = BB_HEAD (bb->next_bb);
90 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
92 if (insn == next_head)
93 break;
95 switch (GET_CODE (insn))
97 case BARRIER:
98 last_insn = insn;
99 continue;
101 case NOTE:
102 switch (NOTE_KIND (insn))
104 case NOTE_INSN_BLOCK_END:
105 gcc_unreachable ();
106 continue;
107 default:
108 continue;
109 break;
111 break;
113 case CODE_LABEL:
114 if (NEXT_INSN (insn)
115 && JUMP_P (NEXT_INSN (insn))
116 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
117 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
119 insn = NEXT_INSN (insn);
120 last_insn = insn;
121 continue;
123 break;
125 default:
126 break;
129 break;
132 /* It is possible to hit contradictory sequence. For instance:
134 jump_insn
135 NOTE_INSN_BLOCK_BEG
136 barrier
138 Where barrier belongs to jump_insn, but the note does not. This can be
139 created by removing the basic block originally following
140 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
142 for (insn = last_insn; insn != BB_END (bb); insn = prev)
144 prev = PREV_INSN (insn);
145 if (NOTE_P (insn))
146 switch (NOTE_KIND (insn))
148 case NOTE_INSN_BLOCK_END:
149 gcc_unreachable ();
150 break;
151 case NOTE_INSN_DELETED:
152 case NOTE_INSN_DELETED_LABEL:
153 continue;
154 default:
155 reorder_insns (insn, insn, last_insn);
159 return last_insn;
162 /* Locate or create a label for a given basic block. */
164 static rtx
165 label_for_bb (basic_block bb)
167 rtx label = BB_HEAD (bb);
169 if (!LABEL_P (label))
171 if (dump_file)
172 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
174 label = block_label (bb);
177 return label;
180 /* Locate the effective beginning and end of the insn chain for each
181 block, as defined by skip_insns_after_block above. */
183 static void
184 record_effective_endpoints (void)
186 rtx next_insn;
187 basic_block bb;
188 rtx insn;
190 for (insn = get_insns ();
191 insn
192 && NOTE_P (insn)
193 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
194 insn = NEXT_INSN (insn))
195 continue;
196 /* No basic blocks at all? */
197 gcc_assert (insn);
199 if (PREV_INSN (insn))
200 cfg_layout_function_header =
201 unlink_insn_chain (get_insns (), PREV_INSN (insn));
202 else
203 cfg_layout_function_header = NULL_RTX;
205 next_insn = get_insns ();
206 FOR_EACH_BB (bb)
208 rtx end;
210 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
211 bb->il.rtl->header = unlink_insn_chain (next_insn,
212 PREV_INSN (BB_HEAD (bb)));
213 end = skip_insns_after_block (bb);
214 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
215 bb->il.rtl->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
216 next_insn = NEXT_INSN (BB_END (bb));
219 cfg_layout_function_footer = next_insn;
220 if (cfg_layout_function_footer)
221 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
224 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
225 numbers and files. In order to be GGC friendly we need to use separate
226 varrays. This also slightly improve the memory locality in binary search.
227 The _locs array contains locators where the given property change. The
228 block_locators_blocks contains the scope block that is used for all insn
229 locator greater than corresponding block_locators_locs value and smaller
230 than the following one. Similarly for the other properties. */
231 static VEC(int,heap) *block_locators_locs;
232 static GTY(()) VEC(tree,gc) *block_locators_blocks;
233 static VEC(int,heap) *locations_locators_locs;
234 DEF_VEC_O(location_t);
235 DEF_VEC_ALLOC_O(location_t,heap);
236 static VEC(location_t,heap) *locations_locators_vals;
237 int prologue_locator;
238 int epilogue_locator;
240 /* Hold current location information and last location information, so the
241 datastructures are built lazily only when some instructions in given
242 place are needed. */
243 location_t curr_location, last_location;
244 static tree curr_block, last_block;
245 static int curr_rtl_loc = -1;
247 /* Allocate insn locator datastructure. */
248 void
249 insn_locators_alloc (void)
251 prologue_locator = epilogue_locator = 0;
253 block_locators_locs = VEC_alloc (int, heap, 32);
254 block_locators_blocks = VEC_alloc (tree, gc, 32);
255 locations_locators_locs = VEC_alloc (int, heap, 32);
256 locations_locators_vals = VEC_alloc (location_t, heap, 32);
258 last_location = -1;
259 curr_location = -1;
260 curr_block = NULL;
261 last_block = NULL;
262 curr_rtl_loc = 0;
265 /* At the end of emit stage, clear current location. */
266 void
267 insn_locators_finalize (void)
269 if (curr_rtl_loc >= 0)
270 epilogue_locator = curr_insn_locator ();
271 curr_rtl_loc = -1;
274 /* Set current location. */
275 void
276 set_curr_insn_source_location (location_t location)
278 /* IV opts calls into RTL expansion to compute costs of operations. At this
279 time locators are not initialized. */
280 if (curr_rtl_loc == -1)
281 return;
282 if (location == last_location)
283 return;
284 curr_location = location;
287 /* Set current scope block. */
288 void
289 set_curr_insn_block (tree b)
291 /* IV opts calls into RTL expansion to compute costs of operations. At this
292 time locators are not initialized. */
293 if (curr_rtl_loc == -1)
294 return;
295 if (b)
296 curr_block = b;
299 /* Return current insn locator. */
301 curr_insn_locator (void)
303 if (curr_rtl_loc == -1)
304 return 0;
305 if (last_block != curr_block)
307 curr_rtl_loc++;
308 VEC_safe_push (int, heap, block_locators_locs, curr_rtl_loc);
309 VEC_safe_push (tree, gc, block_locators_blocks, curr_block);
310 last_block = curr_block;
312 if (last_location != curr_location)
314 curr_rtl_loc++;
315 VEC_safe_push (int, heap, locations_locators_locs, curr_rtl_loc);
316 VEC_safe_push (location_t, heap, locations_locators_vals, &curr_location);
317 last_location = curr_location;
319 return curr_rtl_loc;
322 static unsigned int
323 into_cfg_layout_mode (void)
325 cfg_layout_initialize (0);
326 return 0;
329 static unsigned int
330 outof_cfg_layout_mode (void)
332 basic_block bb;
334 FOR_EACH_BB (bb)
335 if (bb->next_bb != EXIT_BLOCK_PTR)
336 bb->aux = bb->next_bb;
338 cfg_layout_finalize ();
340 return 0;
343 struct tree_opt_pass pass_into_cfg_layout_mode =
345 "into_cfglayout", /* name */
346 NULL, /* gate */
347 into_cfg_layout_mode, /* execute */
348 NULL, /* sub */
349 NULL, /* next */
350 0, /* static_pass_number */
351 0, /* tv_id */
352 0, /* properties_required */
353 0, /* properties_provided */
354 0, /* properties_destroyed */
355 0, /* todo_flags_start */
356 TODO_dump_func, /* todo_flags_finish */
357 0 /* letter */
360 struct tree_opt_pass pass_outof_cfg_layout_mode =
362 "outof_cfglayout", /* name */
363 NULL, /* gate */
364 outof_cfg_layout_mode, /* execute */
365 NULL, /* sub */
366 NULL, /* next */
367 0, /* static_pass_number */
368 0, /* tv_id */
369 0, /* properties_required */
370 0, /* properties_provided */
371 0, /* properties_destroyed */
372 0, /* todo_flags_start */
373 TODO_dump_func, /* todo_flags_finish */
374 0 /* letter */
377 /* Return sope resulting from combination of S1 and S2. */
378 static tree
379 choose_inner_scope (tree s1, tree s2)
381 if (!s1)
382 return s2;
383 if (!s2)
384 return s1;
385 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
386 return s1;
387 return s2;
390 /* Emit lexical block notes needed to change scope from S1 to S2. */
392 static void
393 change_scope (rtx orig_insn, tree s1, tree s2)
395 rtx insn = orig_insn;
396 tree com = NULL_TREE;
397 tree ts1 = s1, ts2 = s2;
398 tree s;
400 while (ts1 != ts2)
402 gcc_assert (ts1 && ts2);
403 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
404 ts1 = BLOCK_SUPERCONTEXT (ts1);
405 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
406 ts2 = BLOCK_SUPERCONTEXT (ts2);
407 else
409 ts1 = BLOCK_SUPERCONTEXT (ts1);
410 ts2 = BLOCK_SUPERCONTEXT (ts2);
413 com = ts1;
415 /* Close scopes. */
416 s = s1;
417 while (s != com)
419 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
420 NOTE_BLOCK (note) = s;
421 s = BLOCK_SUPERCONTEXT (s);
424 /* Open scopes. */
425 s = s2;
426 while (s != com)
428 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
429 NOTE_BLOCK (insn) = s;
430 s = BLOCK_SUPERCONTEXT (s);
434 /* Return lexical scope block insn belong to. */
435 static tree
436 insn_scope (const_rtx insn)
438 int max = VEC_length (int, block_locators_locs);
439 int min = 0;
440 int loc = INSN_LOCATOR (insn);
442 /* When block_locators_locs was initialized, the pro- and epilogue
443 insns didn't exist yet and can therefore not be found this way.
444 But we know that they belong to the outer most block of the
445 current function.
446 Without this test, the prologue would be put inside the block of
447 the first valid instruction in the function and when that first
448 insn is part of an inlined function then the low_pc of that
449 inlined function is messed up. Likewise for the epilogue and
450 the last valid instruction. */
451 if (loc == prologue_locator || loc == epilogue_locator)
452 return DECL_INITIAL (cfun->decl);
454 if (!max || !loc)
455 return NULL;
456 while (1)
458 int pos = (min + max) / 2;
459 int tmp = VEC_index (int, block_locators_locs, pos);
461 if (tmp <= loc && min != pos)
462 min = pos;
463 else if (tmp > loc && max != pos)
464 max = pos;
465 else
467 min = pos;
468 break;
471 return VEC_index (tree, block_locators_blocks, min);
474 /* Return line number of the statement specified by the locator. */
475 static location_t
476 locator_location (int loc)
478 int max = VEC_length (int, locations_locators_locs);
479 int min = 0;
481 while (1)
483 int pos = (min + max) / 2;
484 int tmp = VEC_index (int, locations_locators_locs, pos);
486 if (tmp <= loc && min != pos)
487 min = pos;
488 else if (tmp > loc && max != pos)
489 max = pos;
490 else
492 min = pos;
493 break;
496 return *VEC_index (location_t, locations_locators_vals, min);
499 /* Return source line of the statement that produced this insn. */
501 locator_line (int loc)
503 expanded_location xloc;
504 if (!loc)
505 return 0;
506 else
507 xloc = expand_location (locator_location (loc));
508 return xloc.line;
511 /* Return line number of the statement that produced this insn. */
513 insn_line (const_rtx insn)
515 return locator_line (INSN_LOCATOR (insn));
518 /* Return source file of the statement specified by LOC. */
519 const char *
520 locator_file (int loc)
522 expanded_location xloc;
523 if (!loc)
524 return 0;
525 else
526 xloc = expand_location (locator_location (loc));
527 return xloc.file;
530 /* Return source file of the statement that produced this insn. */
531 const char *
532 insn_file (const_rtx insn)
534 return locator_file (INSN_LOCATOR (insn));
537 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
538 on the scope tree and the newly reordered instructions. */
540 void
541 reemit_insn_block_notes (void)
543 tree cur_block = DECL_INITIAL (cfun->decl);
544 rtx insn, note;
546 insn = get_insns ();
547 if (!active_insn_p (insn))
548 insn = next_active_insn (insn);
549 for (; insn; insn = next_active_insn (insn))
551 tree this_block;
553 /* Avoid putting scope notes between jump table and its label. */
554 if (JUMP_P (insn)
555 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
556 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
557 continue;
559 this_block = insn_scope (insn);
560 /* For sequences compute scope resulting from merging all scopes
561 of instructions nested inside. */
562 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
564 int i;
565 rtx body = PATTERN (insn);
567 this_block = NULL;
568 for (i = 0; i < XVECLEN (body, 0); i++)
569 this_block = choose_inner_scope (this_block,
570 insn_scope (XVECEXP (body, 0, i)));
572 if (! this_block)
573 continue;
575 if (this_block != cur_block)
577 change_scope (insn, cur_block, this_block);
578 cur_block = this_block;
582 /* change_scope emits before the insn, not after. */
583 note = emit_note (NOTE_INSN_DELETED);
584 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
585 delete_insn (note);
587 reorder_blocks ();
591 /* Link the basic blocks in the correct order, compacting the basic
592 block queue while at it. This also clears the visited flag on
593 all basic blocks. If STAY_IN_CFGLAYOUT_MODE is false, this function
594 also clears the basic block header and footer fields.
596 This function is usually called after a pass (e.g. tracer) finishes
597 some transformations while in cfglayout mode. The required sequence
598 of the basic blocks is in a linked list along the bb->aux field.
599 This functions re-links the basic block prev_bb and next_bb pointers
600 accordingly, and it compacts and renumbers the blocks. */
602 void
603 relink_block_chain (bool stay_in_cfglayout_mode)
605 basic_block bb, prev_bb;
606 int index;
608 /* Maybe dump the re-ordered sequence. */
609 if (dump_file)
611 fprintf (dump_file, "Reordered sequence:\n");
612 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
614 bb = (basic_block) bb->aux, index++)
616 fprintf (dump_file, " %i ", index);
617 if (get_bb_original (bb))
618 fprintf (dump_file, "duplicate of %i ",
619 get_bb_original (bb)->index);
620 else if (forwarder_block_p (bb)
621 && !LABEL_P (BB_HEAD (bb)))
622 fprintf (dump_file, "compensation ");
623 else
624 fprintf (dump_file, "bb %i ", bb->index);
625 fprintf (dump_file, " [%i]\n", bb->frequency);
629 /* Now reorder the blocks. */
630 prev_bb = ENTRY_BLOCK_PTR;
631 bb = ENTRY_BLOCK_PTR->next_bb;
632 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
634 bb->prev_bb = prev_bb;
635 prev_bb->next_bb = bb;
637 prev_bb->next_bb = EXIT_BLOCK_PTR;
638 EXIT_BLOCK_PTR->prev_bb = prev_bb;
640 /* Then, clean up the aux and visited fields. */
641 FOR_ALL_BB (bb)
643 bb->aux = NULL;
644 bb->il.rtl->visited = 0;
645 if (!stay_in_cfglayout_mode)
646 bb->il.rtl->header = bb->il.rtl->footer = NULL;
649 /* Maybe reset the original copy tables, they are not valid anymore
650 when we renumber the basic blocks in compact_blocks. If we are
651 are going out of cfglayout mode, don't re-allocate the tables. */
652 free_original_copy_tables ();
653 if (stay_in_cfglayout_mode)
654 initialize_original_copy_tables ();
656 /* Finally, put basic_block_info in the new order. */
657 compact_blocks ();
661 /* Given a reorder chain, rearrange the code to match. */
663 static void
664 fixup_reorder_chain (void)
666 basic_block bb;
667 rtx insn = NULL;
669 if (cfg_layout_function_header)
671 set_first_insn (cfg_layout_function_header);
672 insn = cfg_layout_function_header;
673 while (NEXT_INSN (insn))
674 insn = NEXT_INSN (insn);
677 /* First do the bulk reordering -- rechain the blocks without regard to
678 the needed changes to jumps and labels. */
680 for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux)
682 if (bb->il.rtl->header)
684 if (insn)
685 NEXT_INSN (insn) = bb->il.rtl->header;
686 else
687 set_first_insn (bb->il.rtl->header);
688 PREV_INSN (bb->il.rtl->header) = insn;
689 insn = bb->il.rtl->header;
690 while (NEXT_INSN (insn))
691 insn = NEXT_INSN (insn);
693 if (insn)
694 NEXT_INSN (insn) = BB_HEAD (bb);
695 else
696 set_first_insn (BB_HEAD (bb));
697 PREV_INSN (BB_HEAD (bb)) = insn;
698 insn = BB_END (bb);
699 if (bb->il.rtl->footer)
701 NEXT_INSN (insn) = bb->il.rtl->footer;
702 PREV_INSN (bb->il.rtl->footer) = insn;
703 while (NEXT_INSN (insn))
704 insn = NEXT_INSN (insn);
708 NEXT_INSN (insn) = cfg_layout_function_footer;
709 if (cfg_layout_function_footer)
710 PREV_INSN (cfg_layout_function_footer) = insn;
712 while (NEXT_INSN (insn))
713 insn = NEXT_INSN (insn);
715 set_last_insn (insn);
716 #ifdef ENABLE_CHECKING
717 verify_insn_chain ();
718 #endif
720 /* Now add jumps and labels as needed to match the blocks new
721 outgoing edges. */
723 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux)
725 edge e_fall, e_taken, e;
726 rtx bb_end_insn;
727 basic_block nb;
728 edge_iterator ei;
730 if (EDGE_COUNT (bb->succs) == 0)
731 continue;
733 /* Find the old fallthru edge, and another non-EH edge for
734 a taken jump. */
735 e_taken = e_fall = NULL;
737 FOR_EACH_EDGE (e, ei, bb->succs)
738 if (e->flags & EDGE_FALLTHRU)
739 e_fall = e;
740 else if (! (e->flags & EDGE_EH))
741 e_taken = e;
743 bb_end_insn = BB_END (bb);
744 if (JUMP_P (bb_end_insn))
746 if (any_condjump_p (bb_end_insn))
748 /* If the old fallthru is still next, nothing to do. */
749 if (bb->aux == e_fall->dest
750 || e_fall->dest == EXIT_BLOCK_PTR)
751 continue;
753 /* The degenerated case of conditional jump jumping to the next
754 instruction can happen for jumps with side effects. We need
755 to construct a forwarder block and this will be done just
756 fine by force_nonfallthru below. */
757 if (!e_taken)
760 /* There is another special case: if *neither* block is next,
761 such as happens at the very end of a function, then we'll
762 need to add a new unconditional jump. Choose the taken
763 edge based on known or assumed probability. */
764 else if (bb->aux != e_taken->dest)
766 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
768 if (note
769 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
770 && invert_jump (bb_end_insn,
771 (e_fall->dest == EXIT_BLOCK_PTR
772 ? NULL_RTX
773 : label_for_bb (e_fall->dest)), 0))
775 e_fall->flags &= ~EDGE_FALLTHRU;
776 #ifdef ENABLE_CHECKING
777 gcc_assert (could_fall_through
778 (e_taken->src, e_taken->dest));
779 #endif
780 e_taken->flags |= EDGE_FALLTHRU;
781 update_br_prob_note (bb);
782 e = e_fall, e_fall = e_taken, e_taken = e;
786 /* If the "jumping" edge is a crossing edge, and the fall
787 through edge is non-crossing, leave things as they are. */
788 else if ((e_taken->flags & EDGE_CROSSING)
789 && !(e_fall->flags & EDGE_CROSSING))
790 continue;
792 /* Otherwise we can try to invert the jump. This will
793 basically never fail, however, keep up the pretense. */
794 else if (invert_jump (bb_end_insn,
795 (e_fall->dest == EXIT_BLOCK_PTR
796 ? NULL_RTX
797 : label_for_bb (e_fall->dest)), 0))
799 e_fall->flags &= ~EDGE_FALLTHRU;
800 #ifdef ENABLE_CHECKING
801 gcc_assert (could_fall_through
802 (e_taken->src, e_taken->dest));
803 #endif
804 e_taken->flags |= EDGE_FALLTHRU;
805 update_br_prob_note (bb);
806 continue;
809 else
811 /* Otherwise we have some return, switch or computed
812 jump. In the 99% case, there should not have been a
813 fallthru edge. */
814 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
815 continue;
818 else
820 /* No fallthru implies a noreturn function with EH edges, or
821 something similarly bizarre. In any case, we don't need to
822 do anything. */
823 if (! e_fall)
824 continue;
826 /* If the fallthru block is still next, nothing to do. */
827 if (bb->aux == e_fall->dest)
828 continue;
830 /* A fallthru to exit block. */
831 if (e_fall->dest == EXIT_BLOCK_PTR)
832 continue;
835 /* We got here if we need to add a new jump insn. */
836 nb = force_nonfallthru (e_fall);
837 if (nb)
839 nb->il.rtl->visited = 1;
840 nb->aux = bb->aux;
841 bb->aux = nb;
842 /* Don't process this new block. */
843 bb = nb;
845 /* Make sure new bb is tagged for correct section (same as
846 fall-thru source, since you cannot fall-throu across
847 section boundaries). */
848 BB_COPY_PARTITION (e_fall->src, single_pred (bb));
849 if (flag_reorder_blocks_and_partition
850 && targetm.have_named_sections
851 && JUMP_P (BB_END (bb))
852 && !any_condjump_p (BB_END (bb))
853 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
854 REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
855 (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
859 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
861 /* Annoying special case - jump around dead jumptables left in the code. */
862 FOR_EACH_BB (bb)
864 edge e;
865 edge_iterator ei;
867 FOR_EACH_EDGE (e, ei, bb->succs)
868 if (e->flags & EDGE_FALLTHRU)
869 break;
871 if (e && !can_fallthru (e->src, e->dest))
872 force_nonfallthru (e);
876 /* Perform sanity checks on the insn chain.
877 1. Check that next/prev pointers are consistent in both the forward and
878 reverse direction.
879 2. Count insns in chain, going both directions, and check if equal.
880 3. Check that get_last_insn () returns the actual end of chain. */
882 void
883 verify_insn_chain (void)
885 rtx x, prevx, nextx;
886 int insn_cnt1, insn_cnt2;
888 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
889 x != 0;
890 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
891 gcc_assert (PREV_INSN (x) == prevx);
893 gcc_assert (prevx == get_last_insn ());
895 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
896 x != 0;
897 nextx = x, insn_cnt2++, x = PREV_INSN (x))
898 gcc_assert (NEXT_INSN (x) == nextx);
900 gcc_assert (insn_cnt1 == insn_cnt2);
903 /* If we have assembler epilogues, the block falling through to exit must
904 be the last one in the reordered chain when we reach final. Ensure
905 that this condition is met. */
906 static void
907 fixup_fallthru_exit_predecessor (void)
909 edge e;
910 edge_iterator ei;
911 basic_block bb = NULL;
913 /* This transformation is not valid before reload, because we might
914 separate a call from the instruction that copies the return
915 value. */
916 gcc_assert (reload_completed);
918 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
919 if (e->flags & EDGE_FALLTHRU)
920 bb = e->src;
922 if (bb && bb->aux)
924 basic_block c = ENTRY_BLOCK_PTR->next_bb;
926 /* If the very first block is the one with the fall-through exit
927 edge, we have to split that block. */
928 if (c == bb)
930 bb = split_block (bb, NULL)->dest;
931 bb->aux = c->aux;
932 c->aux = bb;
933 bb->il.rtl->footer = c->il.rtl->footer;
934 c->il.rtl->footer = NULL;
937 while (c->aux != bb)
938 c = (basic_block) c->aux;
940 c->aux = bb->aux;
941 while (c->aux)
942 c = (basic_block) c->aux;
944 c->aux = bb;
945 bb->aux = NULL;
949 /* In case there are more than one fallthru predecessors of exit, force that
950 there is only one. */
952 static void
953 force_one_exit_fallthru (void)
955 edge e, predecessor = NULL;
956 bool more = false;
957 edge_iterator ei;
958 basic_block forwarder, bb;
960 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
961 if (e->flags & EDGE_FALLTHRU)
963 if (predecessor == NULL)
964 predecessor = e;
965 else
967 more = true;
968 break;
972 if (!more)
973 return;
975 /* Exit has several fallthru predecessors. Create a forwarder block for
976 them. */
977 forwarder = split_edge (predecessor);
978 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
980 if (e->src == forwarder
981 || !(e->flags & EDGE_FALLTHRU))
982 ei_next (&ei);
983 else
984 redirect_edge_and_branch_force (e, forwarder);
987 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
988 exit block. */
989 FOR_EACH_BB (bb)
991 if (bb->aux == NULL && bb != forwarder)
993 bb->aux = forwarder;
994 break;
999 /* Return true in case it is possible to duplicate the basic block BB. */
1001 /* We do not want to declare the function in a header file, since it should
1002 only be used through the cfghooks interface, and we do not want to move
1003 it to cfgrtl.c since it would require also moving quite a lot of related
1004 code. */
1005 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block);
1007 bool
1008 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
1010 /* Do not attempt to duplicate tablejumps, as we need to unshare
1011 the dispatch table. This is difficult to do, as the instructions
1012 computing jump destination may be hoisted outside the basic block. */
1013 if (tablejump_p (BB_END (bb), NULL, NULL))
1014 return false;
1016 /* Do not duplicate blocks containing insns that can't be copied. */
1017 if (targetm.cannot_copy_insn_p)
1019 rtx insn = BB_HEAD (bb);
1020 while (1)
1022 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
1023 return false;
1024 if (insn == BB_END (bb))
1025 break;
1026 insn = NEXT_INSN (insn);
1030 return true;
1034 duplicate_insn_chain (rtx from, rtx to)
1036 rtx insn, last;
1038 /* Avoid updating of boundaries of previous basic block. The
1039 note will get removed from insn stream in fixup. */
1040 last = emit_note (NOTE_INSN_DELETED);
1042 /* Create copy at the end of INSN chain. The chain will
1043 be reordered later. */
1044 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
1046 switch (GET_CODE (insn))
1048 case INSN:
1049 case CALL_INSN:
1050 case JUMP_INSN:
1051 /* Avoid copying of dispatch tables. We never duplicate
1052 tablejumps, so this can hit only in case the table got
1053 moved far from original jump. */
1054 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
1055 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1056 break;
1057 emit_copy_of_insn_after (insn, get_last_insn ());
1058 break;
1060 case CODE_LABEL:
1061 break;
1063 case BARRIER:
1064 emit_barrier ();
1065 break;
1067 case NOTE:
1068 switch (NOTE_KIND (insn))
1070 /* In case prologue is empty and function contain label
1071 in first BB, we may want to copy the block. */
1072 case NOTE_INSN_PROLOGUE_END:
1074 case NOTE_INSN_DELETED:
1075 case NOTE_INSN_DELETED_LABEL:
1076 /* No problem to strip these. */
1077 case NOTE_INSN_EPILOGUE_BEG:
1078 /* Debug code expect these notes to exist just once.
1079 Keep them in the master copy.
1080 ??? It probably makes more sense to duplicate them for each
1081 epilogue copy. */
1082 case NOTE_INSN_FUNCTION_BEG:
1083 /* There is always just single entry to function. */
1084 case NOTE_INSN_BASIC_BLOCK:
1085 break;
1087 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1088 emit_note_copy (insn);
1089 break;
1091 default:
1092 /* All other notes should have already been eliminated.
1094 gcc_unreachable ();
1096 break;
1097 default:
1098 gcc_unreachable ();
1101 insn = NEXT_INSN (last);
1102 delete_insn (last);
1103 return insn;
1105 /* Create a duplicate of the basic block BB. */
1107 /* We do not want to declare the function in a header file, since it should
1108 only be used through the cfghooks interface, and we do not want to move
1109 it to cfgrtl.c since it would require also moving quite a lot of related
1110 code. */
1111 extern basic_block cfg_layout_duplicate_bb (basic_block);
1113 basic_block
1114 cfg_layout_duplicate_bb (basic_block bb)
1116 rtx insn;
1117 basic_block new_bb;
1119 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1120 new_bb = create_basic_block (insn,
1121 insn ? get_last_insn () : NULL,
1122 EXIT_BLOCK_PTR->prev_bb);
1124 BB_COPY_PARTITION (new_bb, bb);
1125 if (bb->il.rtl->header)
1127 insn = bb->il.rtl->header;
1128 while (NEXT_INSN (insn))
1129 insn = NEXT_INSN (insn);
1130 insn = duplicate_insn_chain (bb->il.rtl->header, insn);
1131 if (insn)
1132 new_bb->il.rtl->header = unlink_insn_chain (insn, get_last_insn ());
1135 if (bb->il.rtl->footer)
1137 insn = bb->il.rtl->footer;
1138 while (NEXT_INSN (insn))
1139 insn = NEXT_INSN (insn);
1140 insn = duplicate_insn_chain (bb->il.rtl->footer, insn);
1141 if (insn)
1142 new_bb->il.rtl->footer = unlink_insn_chain (insn, get_last_insn ());
1145 return new_bb;
1149 /* Main entry point to this module - initialize the datastructures for
1150 CFG layout changes. It keeps LOOPS up-to-date if not null.
1152 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
1154 void
1155 cfg_layout_initialize (unsigned int flags)
1157 rtx x;
1158 basic_block bb;
1160 initialize_original_copy_tables ();
1162 cfg_layout_rtl_register_cfg_hooks ();
1164 record_effective_endpoints ();
1166 /* Make sure that the targets of non local gotos are marked. */
1167 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1169 bb = BLOCK_FOR_INSN (XEXP (x, 0));
1170 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
1173 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1176 /* Splits superblocks. */
1177 void
1178 break_superblocks (void)
1180 sbitmap superblocks;
1181 bool need = false;
1182 basic_block bb;
1184 superblocks = sbitmap_alloc (last_basic_block);
1185 sbitmap_zero (superblocks);
1187 FOR_EACH_BB (bb)
1188 if (bb->flags & BB_SUPERBLOCK)
1190 bb->flags &= ~BB_SUPERBLOCK;
1191 SET_BIT (superblocks, bb->index);
1192 need = true;
1195 if (need)
1197 rebuild_jump_labels (get_insns ());
1198 find_many_sub_basic_blocks (superblocks);
1201 free (superblocks);
1204 /* Finalize the changes: reorder insn list according to the sequence specified
1205 by aux pointers, enter compensation code, rebuild scope forest. */
1207 void
1208 cfg_layout_finalize (void)
1210 #ifdef ENABLE_CHECKING
1211 verify_flow_info ();
1212 #endif
1213 force_one_exit_fallthru ();
1214 rtl_register_cfg_hooks ();
1215 if (reload_completed
1216 #ifdef HAVE_epilogue
1217 && !HAVE_epilogue
1218 #endif
1220 fixup_fallthru_exit_predecessor ();
1221 fixup_reorder_chain ();
1223 rebuild_jump_labels (get_insns ());
1224 delete_dead_jumptables ();
1226 #ifdef ENABLE_CHECKING
1227 verify_insn_chain ();
1228 verify_flow_info ();
1229 #endif
1232 /* Checks whether all N blocks in BBS array can be copied. */
1233 bool
1234 can_copy_bbs_p (basic_block *bbs, unsigned n)
1236 unsigned i;
1237 edge e;
1238 int ret = true;
1240 for (i = 0; i < n; i++)
1241 bbs[i]->flags |= BB_DUPLICATED;
1243 for (i = 0; i < n; i++)
1245 /* In case we should redirect abnormal edge during duplication, fail. */
1246 edge_iterator ei;
1247 FOR_EACH_EDGE (e, ei, bbs[i]->succs)
1248 if ((e->flags & EDGE_ABNORMAL)
1249 && (e->dest->flags & BB_DUPLICATED))
1251 ret = false;
1252 goto end;
1255 if (!can_duplicate_block_p (bbs[i]))
1257 ret = false;
1258 break;
1262 end:
1263 for (i = 0; i < n; i++)
1264 bbs[i]->flags &= ~BB_DUPLICATED;
1266 return ret;
1269 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1270 are placed into array NEW_BBS in the same order. Edges from basic blocks
1271 in BBS are also duplicated and copies of those of them
1272 that lead into BBS are redirected to appropriate newly created block. The
1273 function assigns bbs into loops (copy of basic block bb is assigned to
1274 bb->loop_father->copy loop, so this must be set up correctly in advance)
1275 and updates dominators locally (LOOPS structure that contains the information
1276 about dominators is passed to enable this).
1278 BASE is the superloop to that basic block belongs; if its header or latch
1279 is copied, we do not set the new blocks as header or latch.
1281 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1282 also in the same order.
1284 Newly created basic blocks are put after the basic block AFTER in the
1285 instruction stream, and the order of the blocks in BBS array is preserved. */
1287 void
1288 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1289 edge *edges, unsigned num_edges, edge *new_edges,
1290 struct loop *base, basic_block after)
1292 unsigned i, j;
1293 basic_block bb, new_bb, dom_bb;
1294 edge e;
1296 /* Duplicate bbs, update dominators, assign bbs to loops. */
1297 for (i = 0; i < n; i++)
1299 /* Duplicate. */
1300 bb = bbs[i];
1301 new_bb = new_bbs[i] = duplicate_block (bb, NULL, after);
1302 after = new_bb;
1303 bb->flags |= BB_DUPLICATED;
1304 /* Possibly set loop header. */
1305 if (bb->loop_father->header == bb && bb->loop_father != base)
1306 new_bb->loop_father->header = new_bb;
1307 /* Or latch. */
1308 if (bb->loop_father->latch == bb && bb->loop_father != base)
1309 new_bb->loop_father->latch = new_bb;
1312 /* Set dominators. */
1313 for (i = 0; i < n; i++)
1315 bb = bbs[i];
1316 new_bb = new_bbs[i];
1318 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1319 if (dom_bb->flags & BB_DUPLICATED)
1321 dom_bb = get_bb_copy (dom_bb);
1322 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1326 /* Redirect edges. */
1327 for (j = 0; j < num_edges; j++)
1328 new_edges[j] = NULL;
1329 for (i = 0; i < n; i++)
1331 edge_iterator ei;
1332 new_bb = new_bbs[i];
1333 bb = bbs[i];
1335 FOR_EACH_EDGE (e, ei, new_bb->succs)
1337 for (j = 0; j < num_edges; j++)
1338 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1339 new_edges[j] = e;
1341 if (!(e->dest->flags & BB_DUPLICATED))
1342 continue;
1343 redirect_edge_and_branch_force (e, get_bb_copy (e->dest));
1347 /* Clear information about duplicates. */
1348 for (i = 0; i < n; i++)
1349 bbs[i]->flags &= ~BB_DUPLICATED;
1352 #include "gt-cfglayout.h"