Merge from trunk: 215733-215743
[official-gcc.git] / gcc-4_7 / gcc / cfglayout.c
blob84d207f95f3beb29b296f13a072a88454c4268b9
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "obstack.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "output.h"
32 #include "function.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
36 #include "common/common-target.h"
37 #include "ggc.h"
38 #include "alloc-pool.h"
39 #include "flags.h"
40 #include "tree-pass.h"
41 #include "df.h"
42 #include "vecprim.h"
43 #include "emit-rtl.h"
45 /* Holds the interesting trailing notes for the function. */
46 rtx cfg_layout_function_footer;
47 rtx cfg_layout_function_header;
49 static rtx skip_insns_after_block (basic_block);
50 static void record_effective_endpoints (void);
51 static rtx label_for_bb (basic_block);
52 static void fixup_reorder_chain (void);
54 static void change_scope (rtx, tree, tree);
56 void verify_insn_chain (void);
57 static void fixup_fallthru_exit_predecessor (void);
59 rtx
60 unlink_insn_chain (rtx first, rtx last)
62 rtx prevfirst = PREV_INSN (first);
63 rtx nextlast = NEXT_INSN (last);
65 PREV_INSN (first) = NULL;
66 NEXT_INSN (last) = NULL;
67 if (prevfirst)
68 NEXT_INSN (prevfirst) = nextlast;
69 if (nextlast)
70 PREV_INSN (nextlast) = prevfirst;
71 else
72 set_last_insn (prevfirst);
73 if (!prevfirst)
74 set_first_insn (nextlast);
75 return first;
78 /* Skip over inter-block insns occurring after BB which are typically
79 associated with BB (e.g., barriers). If there are any such insns,
80 we return the last one. Otherwise, we return the end of BB. */
82 static rtx
83 skip_insns_after_block (basic_block bb)
85 rtx insn, last_insn, next_head, prev;
87 next_head = NULL_RTX;
88 if (bb->next_bb != EXIT_BLOCK_PTR)
89 next_head = BB_HEAD (bb->next_bb);
91 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
93 if (insn == next_head)
94 break;
96 switch (GET_CODE (insn))
98 case BARRIER:
99 last_insn = insn;
100 continue;
102 case NOTE:
103 switch (NOTE_KIND (insn))
105 case NOTE_INSN_BLOCK_END:
106 gcc_unreachable ();
107 continue;
108 default:
109 continue;
110 break;
112 break;
114 case CODE_LABEL:
115 if (NEXT_INSN (insn)
116 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
118 insn = NEXT_INSN (insn);
119 last_insn = insn;
120 continue;
122 break;
124 default:
125 break;
128 break;
131 /* It is possible to hit contradictory sequence. For instance:
133 jump_insn
134 NOTE_INSN_BLOCK_BEG
135 barrier
137 Where barrier belongs to jump_insn, but the note does not. This can be
138 created by removing the basic block originally following
139 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
141 for (insn = last_insn; insn != BB_END (bb); insn = prev)
143 prev = PREV_INSN (insn);
144 if (NOTE_P (insn))
145 switch (NOTE_KIND (insn))
147 case NOTE_INSN_BLOCK_END:
148 gcc_unreachable ();
149 break;
150 case NOTE_INSN_DELETED:
151 case NOTE_INSN_DELETED_LABEL:
152 case NOTE_INSN_DELETED_DEBUG_LABEL:
153 continue;
154 default:
155 reorder_insns (insn, insn, last_insn);
159 return last_insn;
162 /* Locate or create a label for a given basic block. */
164 static rtx
165 label_for_bb (basic_block bb)
167 rtx label = BB_HEAD (bb);
169 if (!LABEL_P (label))
171 if (dump_file)
172 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
174 label = block_label (bb);
177 return label;
180 /* Locate the effective beginning and end of the insn chain for each
181 block, as defined by skip_insns_after_block above. */
183 static void
184 record_effective_endpoints (void)
186 rtx next_insn;
187 basic_block bb;
188 rtx insn;
190 for (insn = get_insns ();
191 insn
192 && NOTE_P (insn)
193 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
194 insn = NEXT_INSN (insn))
195 continue;
196 /* No basic blocks at all? */
197 gcc_assert (insn);
199 if (PREV_INSN (insn))
200 cfg_layout_function_header =
201 unlink_insn_chain (get_insns (), PREV_INSN (insn));
202 else
203 cfg_layout_function_header = NULL_RTX;
205 next_insn = get_insns ();
206 FOR_EACH_BB (bb)
208 rtx end;
210 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
211 bb->il.rtl->header = unlink_insn_chain (next_insn,
212 PREV_INSN (BB_HEAD (bb)));
213 end = skip_insns_after_block (bb);
214 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
215 bb->il.rtl->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
216 next_insn = NEXT_INSN (BB_END (bb));
219 cfg_layout_function_footer = next_insn;
220 if (cfg_layout_function_footer)
221 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
224 location_t prologue_location, epilogue_location;
225 static location_t curr_location;
227 /* Allocate insn location datastructure. */
228 void
229 insn_locations_init (void)
231 prologue_location = epilogue_location = 0;
232 curr_location = UNKNOWN_LOCATION;
235 /* At the end of emit stage, clear current location. */
236 void
237 insn_locations_finalize (void)
239 epilogue_location = curr_location;
240 curr_location = UNKNOWN_LOCATION;
243 /* Set current location. */
244 void
245 set_curr_insn_location (location_t location)
247 curr_location = location;
250 /* Get current location. */
251 location_t
252 curr_insn_location (void)
254 return curr_location;
257 static unsigned int
258 into_cfg_layout_mode (void)
260 cfg_layout_initialize (0);
261 return 0;
264 static unsigned int
265 outof_cfg_layout_mode (void)
267 basic_block bb;
269 FOR_EACH_BB (bb)
270 if (bb->next_bb != EXIT_BLOCK_PTR)
271 bb->aux = bb->next_bb;
273 cfg_layout_finalize ();
275 return 0;
278 struct rtl_opt_pass pass_into_cfg_layout_mode =
281 RTL_PASS,
282 "into_cfglayout", /* name */
283 NULL, /* gate */
284 into_cfg_layout_mode, /* execute */
285 NULL, /* sub */
286 NULL, /* next */
287 0, /* static_pass_number */
288 TV_CFG, /* tv_id */
289 0, /* properties_required */
290 PROP_cfglayout, /* properties_provided */
291 0, /* properties_destroyed */
292 0, /* todo_flags_start */
293 0 /* todo_flags_finish */
297 struct rtl_opt_pass pass_outof_cfg_layout_mode =
300 RTL_PASS,
301 "outof_cfglayout", /* name */
302 NULL, /* gate */
303 outof_cfg_layout_mode, /* execute */
304 NULL, /* sub */
305 NULL, /* next */
306 0, /* static_pass_number */
307 TV_CFG, /* tv_id */
308 0, /* properties_required */
309 0, /* properties_provided */
310 PROP_cfglayout, /* properties_destroyed */
311 0, /* todo_flags_start */
312 0 /* todo_flags_finish */
316 /* Return scope resulting from combination of S1 and S2. */
317 static tree
318 choose_inner_scope (tree s1, tree s2)
320 if (!s1)
321 return s2;
322 if (!s2)
323 return s1;
324 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
325 return s1;
326 return s2;
329 /* Emit lexical block notes needed to change scope from S1 to S2. */
331 static void
332 change_scope (rtx orig_insn, tree s1, tree s2)
334 rtx insn = orig_insn;
335 tree com = NULL_TREE;
336 tree ts1 = s1, ts2 = s2;
337 tree s;
339 while (ts1 != ts2)
341 gcc_assert (ts1 && ts2);
342 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
343 ts1 = BLOCK_SUPERCONTEXT (ts1);
344 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
345 ts2 = BLOCK_SUPERCONTEXT (ts2);
346 else
348 ts1 = BLOCK_SUPERCONTEXT (ts1);
349 ts2 = BLOCK_SUPERCONTEXT (ts2);
352 com = ts1;
354 /* Close scopes. */
355 s = s1;
356 while (s != com)
358 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
359 NOTE_BLOCK (note) = s;
360 s = BLOCK_SUPERCONTEXT (s);
363 /* Open scopes. */
364 s = s2;
365 while (s != com)
367 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
368 NOTE_BLOCK (insn) = s;
369 s = BLOCK_SUPERCONTEXT (s);
373 /* Return lexical scope block insn belongs to. */
374 tree
375 insn_scope (const_rtx insn)
377 return LOCATION_BLOCK (INSN_LOCATION (insn));
380 /* Return line number of the statement that produced this insn. */
382 insn_line (const_rtx insn)
384 return LOCATION_LINE (INSN_LOCATION (insn));
387 /* Return source file of the statement that produced this insn. */
388 const char *
389 insn_file (const_rtx insn)
391 return LOCATION_FILE (INSN_LOCATION (insn));
394 /* Return discriminator of the statement that produced this insn. */
396 insn_discriminator (const_rtx insn)
398 location_t loc = INSN_LOCATION (insn);
399 if (!loc)
400 return 0;
401 return get_discriminator_from_locus (loc);
404 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
405 on the scope tree and the newly reordered instructions. */
407 void
408 reemit_insn_block_notes (void)
410 tree cur_block = DECL_INITIAL (cfun->decl);
411 rtx insn, note;
413 insn = get_insns ();
414 if (!active_insn_p (insn))
415 insn = next_active_insn (insn);
416 for (; insn; insn = next_active_insn (insn))
418 tree this_block;
420 /* Avoid putting scope notes between jump table and its label. */
421 if (JUMP_TABLE_DATA_P (insn))
422 continue;
424 this_block = insn_scope (insn);
425 /* For sequences compute scope resulting from merging all scopes
426 of instructions nested inside. */
427 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
429 int i;
430 rtx body = PATTERN (insn);
432 this_block = NULL;
433 for (i = 0; i < XVECLEN (body, 0); i++)
434 this_block = choose_inner_scope (this_block,
435 insn_scope (XVECEXP (body, 0, i)));
437 if (! this_block)
439 if (INSN_LOCATION (insn) == UNKNOWN_LOCATION)
440 continue;
441 else
442 this_block = DECL_INITIAL (cfun->decl);
445 if (this_block != cur_block)
447 change_scope (insn, cur_block, this_block);
448 cur_block = this_block;
452 /* change_scope emits before the insn, not after. */
453 note = emit_note (NOTE_INSN_DELETED);
454 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
455 delete_insn (note);
457 reorder_blocks ();
461 /* Link the basic blocks in the correct order, compacting the basic
462 block queue while at it. This also clears the visited flag on
463 all basic blocks. If STAY_IN_CFGLAYOUT_MODE is false, this function
464 also clears the basic block header and footer fields.
466 This function is usually called after a pass (e.g. tracer) finishes
467 some transformations while in cfglayout mode. The required sequence
468 of the basic blocks is in a linked list along the bb->aux field.
469 This functions re-links the basic block prev_bb and next_bb pointers
470 accordingly, and it compacts and renumbers the blocks. */
472 void
473 relink_block_chain (bool stay_in_cfglayout_mode)
475 basic_block bb, prev_bb;
476 int index;
478 /* Maybe dump the re-ordered sequence. */
479 if (dump_file)
481 fprintf (dump_file, "Reordered sequence:\n");
482 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
484 bb = (basic_block) bb->aux, index++)
486 fprintf (dump_file, " %i ", index);
487 if (get_bb_original (bb))
488 fprintf (dump_file, "duplicate of %i ",
489 get_bb_original (bb)->index);
490 else if (forwarder_block_p (bb)
491 && !LABEL_P (BB_HEAD (bb)))
492 fprintf (dump_file, "compensation ");
493 else
494 fprintf (dump_file, "bb %i ", bb->index);
495 fprintf (dump_file, " [%i]\n", bb->frequency);
499 /* Now reorder the blocks. */
500 prev_bb = ENTRY_BLOCK_PTR;
501 bb = ENTRY_BLOCK_PTR->next_bb;
502 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
504 bb->prev_bb = prev_bb;
505 prev_bb->next_bb = bb;
507 prev_bb->next_bb = EXIT_BLOCK_PTR;
508 EXIT_BLOCK_PTR->prev_bb = prev_bb;
510 /* Then, clean up the aux and visited fields. */
511 FOR_ALL_BB (bb)
513 bb->aux = NULL;
514 bb->il.rtl->visited = 0;
515 if (!stay_in_cfglayout_mode)
516 bb->il.rtl->header = bb->il.rtl->footer = NULL;
519 /* Maybe reset the original copy tables, they are not valid anymore
520 when we renumber the basic blocks in compact_blocks. If we are
521 are going out of cfglayout mode, don't re-allocate the tables. */
522 free_original_copy_tables ();
523 if (stay_in_cfglayout_mode)
524 initialize_original_copy_tables ();
526 /* Finally, put basic_block_info in the new order. */
527 compact_blocks ();
531 /* Given a reorder chain, rearrange the code to match. */
533 static void
534 fixup_reorder_chain (void)
536 basic_block bb;
537 rtx insn = NULL;
539 if (cfg_layout_function_header)
541 set_first_insn (cfg_layout_function_header);
542 insn = cfg_layout_function_header;
543 while (NEXT_INSN (insn))
544 insn = NEXT_INSN (insn);
547 /* First do the bulk reordering -- rechain the blocks without regard to
548 the needed changes to jumps and labels. */
550 for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux)
552 if (bb->il.rtl->header)
554 if (insn)
555 NEXT_INSN (insn) = bb->il.rtl->header;
556 else
557 set_first_insn (bb->il.rtl->header);
558 PREV_INSN (bb->il.rtl->header) = insn;
559 insn = bb->il.rtl->header;
560 while (NEXT_INSN (insn))
561 insn = NEXT_INSN (insn);
563 if (insn)
564 NEXT_INSN (insn) = BB_HEAD (bb);
565 else
566 set_first_insn (BB_HEAD (bb));
567 PREV_INSN (BB_HEAD (bb)) = insn;
568 insn = BB_END (bb);
569 if (bb->il.rtl->footer)
571 NEXT_INSN (insn) = bb->il.rtl->footer;
572 PREV_INSN (bb->il.rtl->footer) = insn;
573 while (NEXT_INSN (insn))
574 insn = NEXT_INSN (insn);
578 NEXT_INSN (insn) = cfg_layout_function_footer;
579 if (cfg_layout_function_footer)
580 PREV_INSN (cfg_layout_function_footer) = insn;
582 while (NEXT_INSN (insn))
583 insn = NEXT_INSN (insn);
585 set_last_insn (insn);
586 #ifdef ENABLE_CHECKING
587 verify_insn_chain ();
588 #endif
590 /* Now add jumps and labels as needed to match the blocks new
591 outgoing edges. */
593 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux)
595 edge e_fall, e_taken, e;
596 rtx bb_end_insn;
597 rtx ret_label = NULL_RTX;
598 basic_block nb, src_bb;
599 edge_iterator ei;
601 if (EDGE_COUNT (bb->succs) == 0)
602 continue;
604 /* Find the old fallthru edge, and another non-EH edge for
605 a taken jump. */
606 e_taken = e_fall = NULL;
608 FOR_EACH_EDGE (e, ei, bb->succs)
609 if (e->flags & EDGE_FALLTHRU)
610 e_fall = e;
611 else if (! (e->flags & EDGE_EH))
612 e_taken = e;
614 bb_end_insn = BB_END (bb);
615 if (JUMP_P (bb_end_insn))
617 ret_label = JUMP_LABEL (bb_end_insn);
618 if (any_condjump_p (bb_end_insn))
620 /* This might happen if the conditional jump has side
621 effects and could therefore not be optimized away.
622 Make the basic block to end with a barrier in order
623 to prevent rtl_verify_flow_info from complaining. */
624 if (!e_fall)
626 gcc_assert (!onlyjump_p (bb_end_insn)
627 || returnjump_p (bb_end_insn));
628 bb->il.rtl->footer = emit_barrier_after (bb_end_insn);
629 continue;
632 /* If the old fallthru is still next, nothing to do. */
633 if (bb->aux == e_fall->dest
634 || e_fall->dest == EXIT_BLOCK_PTR)
635 continue;
637 /* The degenerated case of conditional jump jumping to the next
638 instruction can happen for jumps with side effects. We need
639 to construct a forwarder block and this will be done just
640 fine by force_nonfallthru below. */
641 if (!e_taken)
644 /* There is another special case: if *neither* block is next,
645 such as happens at the very end of a function, then we'll
646 need to add a new unconditional jump. Choose the taken
647 edge based on known or assumed probability. */
648 else if (bb->aux != e_taken->dest)
650 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
652 if (note
653 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
654 && invert_jump (bb_end_insn,
655 (e_fall->dest == EXIT_BLOCK_PTR
656 ? NULL_RTX
657 : label_for_bb (e_fall->dest)), 0))
659 e_fall->flags &= ~EDGE_FALLTHRU;
660 gcc_checking_assert (could_fall_through
661 (e_taken->src, e_taken->dest));
662 e_taken->flags |= EDGE_FALLTHRU;
663 update_br_prob_note (bb);
664 e = e_fall, e_fall = e_taken, e_taken = e;
668 /* If the "jumping" edge is a crossing edge, and the fall
669 through edge is non-crossing, leave things as they are. */
670 else if ((e_taken->flags & EDGE_CROSSING)
671 && !(e_fall->flags & EDGE_CROSSING))
672 continue;
674 /* Otherwise we can try to invert the jump. This will
675 basically never fail, however, keep up the pretense. */
676 else if (invert_jump (bb_end_insn,
677 (e_fall->dest == EXIT_BLOCK_PTR
678 ? NULL_RTX
679 : label_for_bb (e_fall->dest)), 0))
681 e_fall->flags &= ~EDGE_FALLTHRU;
682 gcc_checking_assert (could_fall_through
683 (e_taken->src, e_taken->dest));
684 e_taken->flags |= EDGE_FALLTHRU;
685 update_br_prob_note (bb);
686 continue;
689 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
691 /* If the old fallthru is still next or if
692 asm goto doesn't have a fallthru (e.g. when followed by
693 __builtin_unreachable ()), nothing to do. */
694 if (! e_fall
695 || bb->aux == e_fall->dest
696 || e_fall->dest == EXIT_BLOCK_PTR)
697 continue;
699 /* Otherwise we'll have to use the fallthru fixup below. */
701 else
703 /* Otherwise we have some return, switch or computed
704 jump. In the 99% case, there should not have been a
705 fallthru edge. */
706 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
707 continue;
710 else
712 /* No fallthru implies a noreturn function with EH edges, or
713 something similarly bizarre. In any case, we don't need to
714 do anything. */
715 if (! e_fall)
716 continue;
718 /* If the fallthru block is still next, nothing to do. */
719 if (bb->aux == e_fall->dest)
720 continue;
722 /* A fallthru to exit block. */
723 if (e_fall->dest == EXIT_BLOCK_PTR)
724 continue;
727 /* We got here if we need to add a new jump insn.
728 Note force_nonfallthru can delete E_FALL and thus we have to
729 save E_FALL->src prior to the call to force_nonfallthru. */
730 src_bb = e_fall->src;
731 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
732 if (nb)
734 nb->il.rtl->visited = 1;
735 nb->aux = bb->aux;
736 bb->aux = nb;
737 /* Don't process this new block. */
738 bb = nb;
740 /* Make sure new bb is tagged for correct section (same as
741 fall-thru source, since you cannot fall-thru across
742 section boundaries). */
743 BB_COPY_PARTITION (src_bb, single_pred (bb));
744 if (flag_reorder_blocks_and_partition
745 && targetm_common.have_named_sections
746 && JUMP_P (BB_END (bb))
747 && !any_condjump_p (BB_END (bb))
748 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
749 add_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX);
753 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
755 /* Annoying special case - jump around dead jumptables left in the code. */
756 FOR_EACH_BB (bb)
758 edge e = find_fallthru_edge (bb->succs);
760 if (e && !can_fallthru (e->src, e->dest))
761 force_nonfallthru (e);
764 /* Ensure goto_locus from edges has some instructions with that locus
765 in RTL. */
766 if (!optimize)
767 FOR_EACH_BB (bb)
769 edge e;
770 edge_iterator ei;
772 FOR_EACH_EDGE (e, ei, bb->succs)
773 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
774 && !(e->flags & EDGE_ABNORMAL))
776 edge e2;
777 edge_iterator ei2;
778 basic_block dest, nb;
779 rtx end;
781 insn = BB_END (e->src);
782 end = PREV_INSN (BB_HEAD (e->src));
783 while (insn != end
784 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
785 insn = PREV_INSN (insn);
786 if (insn != end
787 && INSN_LOCATION (insn) == e->goto_locus)
788 continue;
789 if (simplejump_p (BB_END (e->src))
790 && !INSN_HAS_LOCATION (BB_END (e->src)))
792 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
793 continue;
795 dest = e->dest;
796 if (dest == EXIT_BLOCK_PTR)
798 /* Non-fallthru edges to the exit block cannot be split. */
799 if (!(e->flags & EDGE_FALLTHRU))
800 continue;
802 else
804 insn = BB_HEAD (dest);
805 end = NEXT_INSN (BB_END (dest));
806 while (insn != end && !NONDEBUG_INSN_P (insn))
807 insn = NEXT_INSN (insn);
808 if (insn != end && INSN_HAS_LOCATION (insn)
809 && INSN_LOCATION (insn) == e->goto_locus)
810 continue;
812 nb = split_edge (e);
813 if (!INSN_P (BB_END (nb)))
814 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
815 nb);
816 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
818 /* If there are other incoming edges to the destination block
819 with the same goto locus, redirect them to the new block as
820 well, this can prevent other such blocks from being created
821 in subsequent iterations of the loop. */
822 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
823 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
824 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
825 && e->goto_locus == e2->goto_locus)
826 redirect_edge_and_branch (e2, nb);
827 else
828 ei_next (&ei2);
833 /* Perform sanity checks on the insn chain.
834 1. Check that next/prev pointers are consistent in both the forward and
835 reverse direction.
836 2. Count insns in chain, going both directions, and check if equal.
837 3. Check that get_last_insn () returns the actual end of chain. */
839 DEBUG_FUNCTION void
840 verify_insn_chain (void)
842 rtx x, prevx, nextx;
843 int insn_cnt1, insn_cnt2;
845 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
846 x != 0;
847 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
848 gcc_assert (PREV_INSN (x) == prevx);
850 gcc_assert (prevx == get_last_insn ());
852 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
853 x != 0;
854 nextx = x, insn_cnt2++, x = PREV_INSN (x))
855 gcc_assert (NEXT_INSN (x) == nextx);
857 gcc_assert (insn_cnt1 == insn_cnt2);
860 /* If we have assembler epilogues, the block falling through to exit must
861 be the last one in the reordered chain when we reach final. Ensure
862 that this condition is met. */
863 static void
864 fixup_fallthru_exit_predecessor (void)
866 edge e;
867 basic_block bb = NULL;
869 /* This transformation is not valid before reload, because we might
870 separate a call from the instruction that copies the return
871 value. */
872 gcc_assert (reload_completed);
874 e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
875 if (e)
876 bb = e->src;
878 if (bb && bb->aux)
880 basic_block c = ENTRY_BLOCK_PTR->next_bb;
882 /* If the very first block is the one with the fall-through exit
883 edge, we have to split that block. */
884 if (c == bb)
886 bb = split_block (bb, NULL)->dest;
887 bb->aux = c->aux;
888 c->aux = bb;
889 bb->il.rtl->footer = c->il.rtl->footer;
890 c->il.rtl->footer = NULL;
893 while (c->aux != bb)
894 c = (basic_block) c->aux;
896 c->aux = bb->aux;
897 while (c->aux)
898 c = (basic_block) c->aux;
900 c->aux = bb;
901 bb->aux = NULL;
905 /* In case there are more than one fallthru predecessors of exit, force that
906 there is only one. */
908 static void
909 force_one_exit_fallthru (void)
911 edge e, predecessor = NULL;
912 bool more = false;
913 edge_iterator ei;
914 basic_block forwarder, bb;
916 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
917 if (e->flags & EDGE_FALLTHRU)
919 if (predecessor == NULL)
920 predecessor = e;
921 else
923 more = true;
924 break;
928 if (!more)
929 return;
931 /* Exit has several fallthru predecessors. Create a forwarder block for
932 them. */
933 forwarder = split_edge (predecessor);
934 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
936 if (e->src == forwarder
937 || !(e->flags & EDGE_FALLTHRU))
938 ei_next (&ei);
939 else
940 redirect_edge_and_branch_force (e, forwarder);
943 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
944 exit block. */
945 FOR_EACH_BB (bb)
947 if (bb->aux == NULL && bb != forwarder)
949 bb->aux = forwarder;
950 break;
955 /* Return true in case it is possible to duplicate the basic block BB. */
957 /* We do not want to declare the function in a header file, since it should
958 only be used through the cfghooks interface, and we do not want to move
959 it to cfgrtl.c since it would require also moving quite a lot of related
960 code. */
961 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block);
963 bool
964 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
966 /* Do not attempt to duplicate tablejumps, as we need to unshare
967 the dispatch table. This is difficult to do, as the instructions
968 computing jump destination may be hoisted outside the basic block. */
969 if (tablejump_p (BB_END (bb), NULL, NULL))
970 return false;
972 /* Do not duplicate blocks containing insns that can't be copied. */
973 if (targetm.cannot_copy_insn_p)
975 rtx insn = BB_HEAD (bb);
976 while (1)
978 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
979 return false;
980 if (insn == BB_END (bb))
981 break;
982 insn = NEXT_INSN (insn);
986 return true;
990 duplicate_insn_chain (rtx from, rtx to)
992 rtx insn, last, copy;
994 /* Avoid updating of boundaries of previous basic block. The
995 note will get removed from insn stream in fixup. */
996 last = emit_note (NOTE_INSN_DELETED);
998 /* Create copy at the end of INSN chain. The chain will
999 be reordered later. */
1000 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
1002 switch (GET_CODE (insn))
1004 case DEBUG_INSN:
1005 /* Don't duplicate label debug insns. */
1006 if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
1007 break;
1008 /* FALLTHRU */
1009 case INSN:
1010 case CALL_INSN:
1011 case JUMP_INSN:
1012 /* Avoid copying of dispatch tables. We never duplicate
1013 tablejumps, so this can hit only in case the table got
1014 moved far from original jump. */
1015 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
1016 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1018 /* Avoid copying following barrier as well if any
1019 (and debug insns in between). */
1020 rtx next;
1022 for (next = NEXT_INSN (insn);
1023 next != NEXT_INSN (to);
1024 next = NEXT_INSN (next))
1025 if (!DEBUG_INSN_P (next))
1026 break;
1027 if (next != NEXT_INSN (to) && BARRIER_P (next))
1028 insn = next;
1029 break;
1031 copy = emit_copy_of_insn_after (insn, get_last_insn ());
1032 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
1033 && ANY_RETURN_P (JUMP_LABEL (insn)))
1034 JUMP_LABEL (copy) = JUMP_LABEL (insn);
1035 maybe_copy_prologue_epilogue_insn (insn, copy);
1036 break;
1038 case CODE_LABEL:
1039 break;
1041 case BARRIER:
1042 emit_barrier ();
1043 break;
1045 case NOTE:
1046 switch (NOTE_KIND (insn))
1048 /* In case prologue is empty and function contain label
1049 in first BB, we may want to copy the block. */
1050 case NOTE_INSN_PROLOGUE_END:
1052 case NOTE_INSN_DELETED:
1053 case NOTE_INSN_DELETED_LABEL:
1054 case NOTE_INSN_DELETED_DEBUG_LABEL:
1055 /* No problem to strip these. */
1056 case NOTE_INSN_FUNCTION_BEG:
1057 /* There is always just single entry to function. */
1058 case NOTE_INSN_BASIC_BLOCK:
1059 break;
1061 case NOTE_INSN_EPILOGUE_BEG:
1062 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1063 emit_note_copy (insn);
1064 break;
1066 default:
1067 /* All other notes should have already been eliminated. */
1068 gcc_unreachable ();
1070 break;
1071 default:
1072 gcc_unreachable ();
1075 insn = NEXT_INSN (last);
1076 delete_insn (last);
1077 return insn;
1079 /* Create a duplicate of the basic block BB. */
1081 /* We do not want to declare the function in a header file, since it should
1082 only be used through the cfghooks interface, and we do not want to move
1083 it to cfgrtl.c since it would require also moving quite a lot of related
1084 code. */
1085 extern basic_block cfg_layout_duplicate_bb (basic_block);
1087 basic_block
1088 cfg_layout_duplicate_bb (basic_block bb)
1090 rtx insn;
1091 basic_block new_bb;
1093 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1094 new_bb = create_basic_block (insn,
1095 insn ? get_last_insn () : NULL,
1096 EXIT_BLOCK_PTR->prev_bb);
1098 BB_COPY_PARTITION (new_bb, bb);
1099 if (bb->il.rtl->header)
1101 insn = bb->il.rtl->header;
1102 while (NEXT_INSN (insn))
1103 insn = NEXT_INSN (insn);
1104 insn = duplicate_insn_chain (bb->il.rtl->header, insn);
1105 if (insn)
1106 new_bb->il.rtl->header = unlink_insn_chain (insn, get_last_insn ());
1109 if (bb->il.rtl->footer)
1111 insn = bb->il.rtl->footer;
1112 while (NEXT_INSN (insn))
1113 insn = NEXT_INSN (insn);
1114 insn = duplicate_insn_chain (bb->il.rtl->footer, insn);
1115 if (insn)
1116 new_bb->il.rtl->footer = unlink_insn_chain (insn, get_last_insn ());
1119 return new_bb;
1123 /* Main entry point to this module - initialize the datastructures for
1124 CFG layout changes. It keeps LOOPS up-to-date if not null.
1126 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
1128 void
1129 cfg_layout_initialize (unsigned int flags)
1131 rtx x;
1132 basic_block bb;
1134 initialize_original_copy_tables ();
1136 cfg_layout_rtl_register_cfg_hooks ();
1138 record_effective_endpoints ();
1140 /* Make sure that the targets of non local gotos are marked. */
1141 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1143 bb = BLOCK_FOR_INSN (XEXP (x, 0));
1144 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
1147 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1150 /* Splits superblocks. */
1151 void
1152 break_superblocks (void)
1154 sbitmap superblocks;
1155 bool need = false;
1156 basic_block bb;
1158 superblocks = sbitmap_alloc (last_basic_block);
1159 sbitmap_zero (superblocks);
1161 FOR_EACH_BB (bb)
1162 if (bb->flags & BB_SUPERBLOCK)
1164 bb->flags &= ~BB_SUPERBLOCK;
1165 SET_BIT (superblocks, bb->index);
1166 need = true;
1169 if (need)
1171 rebuild_jump_labels (get_insns ());
1172 find_many_sub_basic_blocks (superblocks);
1175 free (superblocks);
1178 /* Finalize the changes: reorder insn list according to the sequence specified
1179 by aux pointers, enter compensation code, rebuild scope forest. */
1181 void
1182 cfg_layout_finalize (void)
1184 #ifdef ENABLE_CHECKING
1185 verify_flow_info ();
1186 #endif
1187 force_one_exit_fallthru ();
1188 rtl_register_cfg_hooks ();
1189 if (reload_completed
1190 #ifdef HAVE_epilogue
1191 && !HAVE_epilogue
1192 #endif
1194 fixup_fallthru_exit_predecessor ();
1195 fixup_reorder_chain ();
1197 rebuild_jump_labels (get_insns ());
1198 delete_dead_jumptables ();
1200 #ifdef ENABLE_CHECKING
1201 verify_insn_chain ();
1202 verify_flow_info ();
1203 #endif
1206 /* Checks whether all N blocks in BBS array can be copied. */
1207 bool
1208 can_copy_bbs_p (basic_block *bbs, unsigned n)
1210 unsigned i;
1211 edge e;
1212 int ret = true;
1214 for (i = 0; i < n; i++)
1215 bbs[i]->flags |= BB_DUPLICATED;
1217 for (i = 0; i < n; i++)
1219 /* In case we should redirect abnormal edge during duplication, fail. */
1220 edge_iterator ei;
1221 FOR_EACH_EDGE (e, ei, bbs[i]->succs)
1222 if ((e->flags & EDGE_ABNORMAL)
1223 && (e->dest->flags & BB_DUPLICATED))
1225 ret = false;
1226 goto end;
1229 if (!can_duplicate_block_p (bbs[i]))
1231 ret = false;
1232 break;
1236 end:
1237 for (i = 0; i < n; i++)
1238 bbs[i]->flags &= ~BB_DUPLICATED;
1240 return ret;
1243 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1244 are placed into array NEW_BBS in the same order. Edges from basic blocks
1245 in BBS are also duplicated and copies of those of them
1246 that lead into BBS are redirected to appropriate newly created block. The
1247 function assigns bbs into loops (copy of basic block bb is assigned to
1248 bb->loop_father->copy loop, so this must be set up correctly in advance)
1249 and updates dominators locally (LOOPS structure that contains the information
1250 about dominators is passed to enable this).
1252 BASE is the superloop to that basic block belongs; if its header or latch
1253 is copied, we do not set the new blocks as header or latch.
1255 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1256 also in the same order.
1258 Newly created basic blocks are put after the basic block AFTER in the
1259 instruction stream, and the order of the blocks in BBS array is preserved. */
1261 void
1262 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1263 edge *edges, unsigned num_edges, edge *new_edges,
1264 struct loop *base, basic_block after)
1266 unsigned i, j;
1267 basic_block bb, new_bb, dom_bb;
1268 edge e;
1270 /* Duplicate bbs, update dominators, assign bbs to loops. */
1271 for (i = 0; i < n; i++)
1273 /* Duplicate. */
1274 bb = bbs[i];
1275 new_bb = new_bbs[i] = duplicate_block (bb, NULL, after);
1276 after = new_bb;
1277 bb->flags |= BB_DUPLICATED;
1278 /* Possibly set loop header. */
1279 if (bb->loop_father->header == bb && bb->loop_father != base)
1280 new_bb->loop_father->header = new_bb;
1281 /* Or latch. */
1282 if (bb->loop_father->latch == bb && bb->loop_father != base)
1283 new_bb->loop_father->latch = new_bb;
1286 /* Set dominators. */
1287 for (i = 0; i < n; i++)
1289 bb = bbs[i];
1290 new_bb = new_bbs[i];
1292 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1293 if (dom_bb->flags & BB_DUPLICATED)
1295 dom_bb = get_bb_copy (dom_bb);
1296 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1300 /* Redirect edges. */
1301 for (j = 0; j < num_edges; j++)
1302 new_edges[j] = NULL;
1303 for (i = 0; i < n; i++)
1305 edge_iterator ei;
1306 new_bb = new_bbs[i];
1307 bb = bbs[i];
1309 FOR_EACH_EDGE (e, ei, new_bb->succs)
1311 for (j = 0; j < num_edges; j++)
1312 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1313 new_edges[j] = e;
1315 if (!(e->dest->flags & BB_DUPLICATED))
1316 continue;
1317 redirect_edge_and_branch_force (e, get_bb_copy (e->dest));
1321 /* Clear information about duplicates. */
1322 for (i = 0; i < n; i++)
1323 bbs[i]->flags &= ~BB_DUPLICATED;