* config/rx/rx.c (rx_get_stack_layout): Only save call clobbered
[official-gcc.git] / gcc / cfglayout.c
blobf7d4d10440355a17c69837e80cfde989fd005653
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "obstack.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "output.h"
32 #include "function.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
36 #include "ggc.h"
37 #include "alloc-pool.h"
38 #include "flags.h"
39 #include "tree-pass.h"
40 #include "df.h"
41 #include "vecprim.h"
42 #include "emit-rtl.h"
44 /* Holds the interesting trailing notes for the function. */
45 rtx cfg_layout_function_footer;
46 rtx cfg_layout_function_header;
48 static rtx skip_insns_after_block (basic_block);
49 static void record_effective_endpoints (void);
50 static rtx label_for_bb (basic_block);
51 static void fixup_reorder_chain (void);
53 static void change_scope (rtx, tree, tree);
55 void verify_insn_chain (void);
56 static void fixup_fallthru_exit_predecessor (void);
57 static tree insn_scope (const_rtx);
59 rtx
60 unlink_insn_chain (rtx first, rtx last)
62 rtx prevfirst = PREV_INSN (first);
63 rtx nextlast = NEXT_INSN (last);
65 PREV_INSN (first) = NULL;
66 NEXT_INSN (last) = NULL;
67 if (prevfirst)
68 NEXT_INSN (prevfirst) = nextlast;
69 if (nextlast)
70 PREV_INSN (nextlast) = prevfirst;
71 else
72 set_last_insn (prevfirst);
73 if (!prevfirst)
74 set_first_insn (nextlast);
75 return first;
78 /* Skip over inter-block insns occurring after BB which are typically
79 associated with BB (e.g., barriers). If there are any such insns,
80 we return the last one. Otherwise, we return the end of BB. */
82 static rtx
83 skip_insns_after_block (basic_block bb)
85 rtx insn, last_insn, next_head, prev;
87 next_head = NULL_RTX;
88 if (bb->next_bb != EXIT_BLOCK_PTR)
89 next_head = BB_HEAD (bb->next_bb);
91 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
93 if (insn == next_head)
94 break;
96 switch (GET_CODE (insn))
98 case BARRIER:
99 last_insn = insn;
100 continue;
102 case NOTE:
103 switch (NOTE_KIND (insn))
105 case NOTE_INSN_BLOCK_END:
106 gcc_unreachable ();
107 continue;
108 default:
109 continue;
110 break;
112 break;
114 case CODE_LABEL:
115 if (NEXT_INSN (insn)
116 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
118 insn = NEXT_INSN (insn);
119 last_insn = insn;
120 continue;
122 break;
124 default:
125 break;
128 break;
131 /* It is possible to hit contradictory sequence. For instance:
133 jump_insn
134 NOTE_INSN_BLOCK_BEG
135 barrier
137 Where barrier belongs to jump_insn, but the note does not. This can be
138 created by removing the basic block originally following
139 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
141 for (insn = last_insn; insn != BB_END (bb); insn = prev)
143 prev = PREV_INSN (insn);
144 if (NOTE_P (insn))
145 switch (NOTE_KIND (insn))
147 case NOTE_INSN_BLOCK_END:
148 gcc_unreachable ();
149 break;
150 case NOTE_INSN_DELETED:
151 case NOTE_INSN_DELETED_LABEL:
152 continue;
153 default:
154 reorder_insns (insn, insn, last_insn);
158 return last_insn;
161 /* Locate or create a label for a given basic block. */
163 static rtx
164 label_for_bb (basic_block bb)
166 rtx label = BB_HEAD (bb);
168 if (!LABEL_P (label))
170 if (dump_file)
171 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
173 label = block_label (bb);
176 return label;
179 /* Locate the effective beginning and end of the insn chain for each
180 block, as defined by skip_insns_after_block above. */
182 static void
183 record_effective_endpoints (void)
185 rtx next_insn;
186 basic_block bb;
187 rtx insn;
189 for (insn = get_insns ();
190 insn
191 && NOTE_P (insn)
192 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
193 insn = NEXT_INSN (insn))
194 continue;
195 /* No basic blocks at all? */
196 gcc_assert (insn);
198 if (PREV_INSN (insn))
199 cfg_layout_function_header =
200 unlink_insn_chain (get_insns (), PREV_INSN (insn));
201 else
202 cfg_layout_function_header = NULL_RTX;
204 next_insn = get_insns ();
205 FOR_EACH_BB (bb)
207 rtx end;
209 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
210 bb->il.rtl->header = unlink_insn_chain (next_insn,
211 PREV_INSN (BB_HEAD (bb)));
212 end = skip_insns_after_block (bb);
213 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
214 bb->il.rtl->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
215 next_insn = NEXT_INSN (BB_END (bb));
218 cfg_layout_function_footer = next_insn;
219 if (cfg_layout_function_footer)
220 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
223 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
224 numbers and files. In order to be GGC friendly we need to use separate
225 varrays. This also slightly improve the memory locality in binary search.
226 The _locs array contains locators where the given property change. The
227 block_locators_blocks contains the scope block that is used for all insn
228 locator greater than corresponding block_locators_locs value and smaller
229 than the following one. Similarly for the other properties. */
230 static VEC(int,heap) *block_locators_locs;
231 static GTY(()) VEC(tree,gc) *block_locators_blocks;
232 static VEC(int,heap) *locations_locators_locs;
233 DEF_VEC_O(location_t);
234 DEF_VEC_ALLOC_O(location_t,heap);
235 static VEC(location_t,heap) *locations_locators_vals;
236 int prologue_locator;
237 int epilogue_locator;
239 /* Hold current location information and last location information, so the
240 datastructures are built lazily only when some instructions in given
241 place are needed. */
242 static location_t curr_location, last_location;
243 static tree curr_block, last_block;
244 static int curr_rtl_loc = -1;
246 /* Allocate insn locator datastructure. */
247 void
248 insn_locators_alloc (void)
250 prologue_locator = epilogue_locator = 0;
252 block_locators_locs = VEC_alloc (int, heap, 32);
253 block_locators_blocks = VEC_alloc (tree, gc, 32);
254 locations_locators_locs = VEC_alloc (int, heap, 32);
255 locations_locators_vals = VEC_alloc (location_t, heap, 32);
257 last_location = -1;
258 curr_location = -1;
259 curr_block = NULL;
260 last_block = NULL;
261 curr_rtl_loc = 0;
264 /* At the end of emit stage, clear current location. */
265 void
266 insn_locators_finalize (void)
268 if (curr_rtl_loc >= 0)
269 epilogue_locator = curr_insn_locator ();
270 curr_rtl_loc = -1;
273 /* Allocate insn locator datastructure. */
274 void
275 insn_locators_free (void)
277 prologue_locator = epilogue_locator = 0;
279 VEC_free (int, heap, block_locators_locs);
280 VEC_free (tree,gc, block_locators_blocks);
281 VEC_free (int, heap, locations_locators_locs);
282 VEC_free (location_t, heap, locations_locators_vals);
286 /* Set current location. */
287 void
288 set_curr_insn_source_location (location_t location)
290 /* IV opts calls into RTL expansion to compute costs of operations. At this
291 time locators are not initialized. */
292 if (curr_rtl_loc == -1)
293 return;
294 curr_location = location;
297 /* Get current location. */
298 location_t
299 get_curr_insn_source_location (void)
301 return curr_location;
304 /* Set current scope block. */
305 void
306 set_curr_insn_block (tree b)
308 /* IV opts calls into RTL expansion to compute costs of operations. At this
309 time locators are not initialized. */
310 if (curr_rtl_loc == -1)
311 return;
312 if (b)
313 curr_block = b;
316 /* Get current scope block. */
317 tree
318 get_curr_insn_block (void)
320 return curr_block;
323 /* Return current insn locator. */
325 curr_insn_locator (void)
327 if (curr_rtl_loc == -1)
328 return 0;
329 if (last_block != curr_block)
331 curr_rtl_loc++;
332 VEC_safe_push (int, heap, block_locators_locs, curr_rtl_loc);
333 VEC_safe_push (tree, gc, block_locators_blocks, curr_block);
334 last_block = curr_block;
336 if (last_location != curr_location)
338 curr_rtl_loc++;
339 VEC_safe_push (int, heap, locations_locators_locs, curr_rtl_loc);
340 VEC_safe_push (location_t, heap, locations_locators_vals, &curr_location);
341 last_location = curr_location;
343 return curr_rtl_loc;
346 static unsigned int
347 into_cfg_layout_mode (void)
349 cfg_layout_initialize (0);
350 return 0;
353 static unsigned int
354 outof_cfg_layout_mode (void)
356 basic_block bb;
358 FOR_EACH_BB (bb)
359 if (bb->next_bb != EXIT_BLOCK_PTR)
360 bb->aux = bb->next_bb;
362 cfg_layout_finalize ();
364 return 0;
367 struct rtl_opt_pass pass_into_cfg_layout_mode =
370 RTL_PASS,
371 "into_cfglayout", /* name */
372 NULL, /* gate */
373 into_cfg_layout_mode, /* execute */
374 NULL, /* sub */
375 NULL, /* next */
376 0, /* static_pass_number */
377 TV_CFG, /* tv_id */
378 0, /* properties_required */
379 PROP_cfglayout, /* properties_provided */
380 0, /* properties_destroyed */
381 0, /* todo_flags_start */
382 TODO_dump_func, /* todo_flags_finish */
386 struct rtl_opt_pass pass_outof_cfg_layout_mode =
389 RTL_PASS,
390 "outof_cfglayout", /* name */
391 NULL, /* gate */
392 outof_cfg_layout_mode, /* execute */
393 NULL, /* sub */
394 NULL, /* next */
395 0, /* static_pass_number */
396 TV_CFG, /* tv_id */
397 0, /* properties_required */
398 0, /* properties_provided */
399 PROP_cfglayout, /* properties_destroyed */
400 0, /* todo_flags_start */
401 TODO_dump_func, /* todo_flags_finish */
405 /* Return scope resulting from combination of S1 and S2. */
406 static tree
407 choose_inner_scope (tree s1, tree s2)
409 if (!s1)
410 return s2;
411 if (!s2)
412 return s1;
413 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
414 return s1;
415 return s2;
418 /* Emit lexical block notes needed to change scope from S1 to S2. */
420 static void
421 change_scope (rtx orig_insn, tree s1, tree s2)
423 rtx insn = orig_insn;
424 tree com = NULL_TREE;
425 tree ts1 = s1, ts2 = s2;
426 tree s;
428 while (ts1 != ts2)
430 gcc_assert (ts1 && ts2);
431 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
432 ts1 = BLOCK_SUPERCONTEXT (ts1);
433 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
434 ts2 = BLOCK_SUPERCONTEXT (ts2);
435 else
437 ts1 = BLOCK_SUPERCONTEXT (ts1);
438 ts2 = BLOCK_SUPERCONTEXT (ts2);
441 com = ts1;
443 /* Close scopes. */
444 s = s1;
445 while (s != com)
447 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
448 NOTE_BLOCK (note) = s;
449 s = BLOCK_SUPERCONTEXT (s);
452 /* Open scopes. */
453 s = s2;
454 while (s != com)
456 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
457 NOTE_BLOCK (insn) = s;
458 s = BLOCK_SUPERCONTEXT (s);
462 /* Return lexical scope block locator belongs to. */
463 static tree
464 locator_scope (int loc)
466 int max = VEC_length (int, block_locators_locs);
467 int min = 0;
469 /* When block_locators_locs was initialized, the pro- and epilogue
470 insns didn't exist yet and can therefore not be found this way.
471 But we know that they belong to the outer most block of the
472 current function.
473 Without this test, the prologue would be put inside the block of
474 the first valid instruction in the function and when that first
475 insn is part of an inlined function then the low_pc of that
476 inlined function is messed up. Likewise for the epilogue and
477 the last valid instruction. */
478 if (loc == prologue_locator || loc == epilogue_locator)
479 return DECL_INITIAL (cfun->decl);
481 if (!max || !loc)
482 return NULL;
483 while (1)
485 int pos = (min + max) / 2;
486 int tmp = VEC_index (int, block_locators_locs, pos);
488 if (tmp <= loc && min != pos)
489 min = pos;
490 else if (tmp > loc && max != pos)
491 max = pos;
492 else
494 min = pos;
495 break;
498 return VEC_index (tree, block_locators_blocks, min);
501 /* Return lexical scope block insn belongs to. */
502 static tree
503 insn_scope (const_rtx insn)
505 return locator_scope (INSN_LOCATOR (insn));
508 /* Return line number of the statement specified by the locator. */
509 location_t
510 locator_location (int loc)
512 int max = VEC_length (int, locations_locators_locs);
513 int min = 0;
515 while (1)
517 int pos = (min + max) / 2;
518 int tmp = VEC_index (int, locations_locators_locs, pos);
520 if (tmp <= loc && min != pos)
521 min = pos;
522 else if (tmp > loc && max != pos)
523 max = pos;
524 else
526 min = pos;
527 break;
530 return *VEC_index (location_t, locations_locators_vals, min);
533 /* Return source line of the statement that produced this insn. */
535 locator_line (int loc)
537 expanded_location xloc;
538 if (!loc)
539 return 0;
540 else
541 xloc = expand_location (locator_location (loc));
542 return xloc.line;
545 /* Return line number of the statement that produced this insn. */
547 insn_line (const_rtx insn)
549 return locator_line (INSN_LOCATOR (insn));
552 /* Return source file of the statement specified by LOC. */
553 const char *
554 locator_file (int loc)
556 expanded_location xloc;
557 if (!loc)
558 return 0;
559 else
560 xloc = expand_location (locator_location (loc));
561 return xloc.file;
564 /* Return source file of the statement that produced this insn. */
565 const char *
566 insn_file (const_rtx insn)
568 return locator_file (INSN_LOCATOR (insn));
571 /* Return true if LOC1 and LOC2 locators have the same location and scope. */
572 bool
573 locator_eq (int loc1, int loc2)
575 if (loc1 == loc2)
576 return true;
577 if (locator_location (loc1) != locator_location (loc2))
578 return false;
579 return locator_scope (loc1) == locator_scope (loc2);
582 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
583 on the scope tree and the newly reordered instructions. */
585 void
586 reemit_insn_block_notes (void)
588 tree cur_block = DECL_INITIAL (cfun->decl);
589 rtx insn, note;
591 insn = get_insns ();
592 if (!active_insn_p (insn))
593 insn = next_active_insn (insn);
594 for (; insn; insn = next_active_insn (insn))
596 tree this_block;
598 /* Avoid putting scope notes between jump table and its label. */
599 if (JUMP_TABLE_DATA_P (insn))
600 continue;
602 this_block = insn_scope (insn);
603 /* For sequences compute scope resulting from merging all scopes
604 of instructions nested inside. */
605 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
607 int i;
608 rtx body = PATTERN (insn);
610 this_block = NULL;
611 for (i = 0; i < XVECLEN (body, 0); i++)
612 this_block = choose_inner_scope (this_block,
613 insn_scope (XVECEXP (body, 0, i)));
615 if (! this_block)
616 continue;
618 if (this_block != cur_block)
620 change_scope (insn, cur_block, this_block);
621 cur_block = this_block;
625 /* change_scope emits before the insn, not after. */
626 note = emit_note (NOTE_INSN_DELETED);
627 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
628 delete_insn (note);
630 reorder_blocks ();
634 /* Link the basic blocks in the correct order, compacting the basic
635 block queue while at it. This also clears the visited flag on
636 all basic blocks. If STAY_IN_CFGLAYOUT_MODE is false, this function
637 also clears the basic block header and footer fields.
639 This function is usually called after a pass (e.g. tracer) finishes
640 some transformations while in cfglayout mode. The required sequence
641 of the basic blocks is in a linked list along the bb->aux field.
642 This functions re-links the basic block prev_bb and next_bb pointers
643 accordingly, and it compacts and renumbers the blocks. */
645 void
646 relink_block_chain (bool stay_in_cfglayout_mode)
648 basic_block bb, prev_bb;
649 int index;
651 /* Maybe dump the re-ordered sequence. */
652 if (dump_file)
654 fprintf (dump_file, "Reordered sequence:\n");
655 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
657 bb = (basic_block) bb->aux, index++)
659 fprintf (dump_file, " %i ", index);
660 if (get_bb_original (bb))
661 fprintf (dump_file, "duplicate of %i ",
662 get_bb_original (bb)->index);
663 else if (forwarder_block_p (bb)
664 && !LABEL_P (BB_HEAD (bb)))
665 fprintf (dump_file, "compensation ");
666 else
667 fprintf (dump_file, "bb %i ", bb->index);
668 fprintf (dump_file, " [%i]\n", bb->frequency);
672 /* Now reorder the blocks. */
673 prev_bb = ENTRY_BLOCK_PTR;
674 bb = ENTRY_BLOCK_PTR->next_bb;
675 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
677 bb->prev_bb = prev_bb;
678 prev_bb->next_bb = bb;
680 prev_bb->next_bb = EXIT_BLOCK_PTR;
681 EXIT_BLOCK_PTR->prev_bb = prev_bb;
683 /* Then, clean up the aux and visited fields. */
684 FOR_ALL_BB (bb)
686 bb->aux = NULL;
687 bb->il.rtl->visited = 0;
688 if (!stay_in_cfglayout_mode)
689 bb->il.rtl->header = bb->il.rtl->footer = NULL;
692 /* Maybe reset the original copy tables, they are not valid anymore
693 when we renumber the basic blocks in compact_blocks. If we are
694 are going out of cfglayout mode, don't re-allocate the tables. */
695 free_original_copy_tables ();
696 if (stay_in_cfglayout_mode)
697 initialize_original_copy_tables ();
699 /* Finally, put basic_block_info in the new order. */
700 compact_blocks ();
704 /* Given a reorder chain, rearrange the code to match. */
706 static void
707 fixup_reorder_chain (void)
709 basic_block bb;
710 rtx insn = NULL;
712 if (cfg_layout_function_header)
714 set_first_insn (cfg_layout_function_header);
715 insn = cfg_layout_function_header;
716 while (NEXT_INSN (insn))
717 insn = NEXT_INSN (insn);
720 /* First do the bulk reordering -- rechain the blocks without regard to
721 the needed changes to jumps and labels. */
723 for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux)
725 if (bb->il.rtl->header)
727 if (insn)
728 NEXT_INSN (insn) = bb->il.rtl->header;
729 else
730 set_first_insn (bb->il.rtl->header);
731 PREV_INSN (bb->il.rtl->header) = insn;
732 insn = bb->il.rtl->header;
733 while (NEXT_INSN (insn))
734 insn = NEXT_INSN (insn);
736 if (insn)
737 NEXT_INSN (insn) = BB_HEAD (bb);
738 else
739 set_first_insn (BB_HEAD (bb));
740 PREV_INSN (BB_HEAD (bb)) = insn;
741 insn = BB_END (bb);
742 if (bb->il.rtl->footer)
744 NEXT_INSN (insn) = bb->il.rtl->footer;
745 PREV_INSN (bb->il.rtl->footer) = insn;
746 while (NEXT_INSN (insn))
747 insn = NEXT_INSN (insn);
751 NEXT_INSN (insn) = cfg_layout_function_footer;
752 if (cfg_layout_function_footer)
753 PREV_INSN (cfg_layout_function_footer) = insn;
755 while (NEXT_INSN (insn))
756 insn = NEXT_INSN (insn);
758 set_last_insn (insn);
759 #ifdef ENABLE_CHECKING
760 verify_insn_chain ();
761 #endif
763 /* Now add jumps and labels as needed to match the blocks new
764 outgoing edges. */
766 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux)
768 edge e_fall, e_taken, e;
769 rtx bb_end_insn;
770 basic_block nb;
771 edge_iterator ei;
773 if (EDGE_COUNT (bb->succs) == 0)
774 continue;
776 /* Find the old fallthru edge, and another non-EH edge for
777 a taken jump. */
778 e_taken = e_fall = NULL;
780 FOR_EACH_EDGE (e, ei, bb->succs)
781 if (e->flags & EDGE_FALLTHRU)
782 e_fall = e;
783 else if (! (e->flags & EDGE_EH))
784 e_taken = e;
786 bb_end_insn = BB_END (bb);
787 if (JUMP_P (bb_end_insn))
789 if (any_condjump_p (bb_end_insn))
791 /* This might happen if the conditional jump has side
792 effects and could therefore not be optimized away.
793 Make the basic block to end with a barrier in order
794 to prevent rtl_verify_flow_info from complaining. */
795 if (!e_fall)
797 gcc_assert (!onlyjump_p (bb_end_insn)
798 || returnjump_p (bb_end_insn));
799 bb->il.rtl->footer = emit_barrier_after (bb_end_insn);
800 continue;
803 /* If the old fallthru is still next, nothing to do. */
804 if (bb->aux == e_fall->dest
805 || e_fall->dest == EXIT_BLOCK_PTR)
806 continue;
808 /* The degenerated case of conditional jump jumping to the next
809 instruction can happen for jumps with side effects. We need
810 to construct a forwarder block and this will be done just
811 fine by force_nonfallthru below. */
812 if (!e_taken)
815 /* There is another special case: if *neither* block is next,
816 such as happens at the very end of a function, then we'll
817 need to add a new unconditional jump. Choose the taken
818 edge based on known or assumed probability. */
819 else if (bb->aux != e_taken->dest)
821 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
823 if (note
824 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
825 && invert_jump (bb_end_insn,
826 (e_fall->dest == EXIT_BLOCK_PTR
827 ? NULL_RTX
828 : label_for_bb (e_fall->dest)), 0))
830 e_fall->flags &= ~EDGE_FALLTHRU;
831 gcc_checking_assert (could_fall_through
832 (e_taken->src, e_taken->dest));
833 e_taken->flags |= EDGE_FALLTHRU;
834 update_br_prob_note (bb);
835 e = e_fall, e_fall = e_taken, e_taken = e;
839 /* If the "jumping" edge is a crossing edge, and the fall
840 through edge is non-crossing, leave things as they are. */
841 else if ((e_taken->flags & EDGE_CROSSING)
842 && !(e_fall->flags & EDGE_CROSSING))
843 continue;
845 /* Otherwise we can try to invert the jump. This will
846 basically never fail, however, keep up the pretense. */
847 else if (invert_jump (bb_end_insn,
848 (e_fall->dest == EXIT_BLOCK_PTR
849 ? NULL_RTX
850 : label_for_bb (e_fall->dest)), 0))
852 e_fall->flags &= ~EDGE_FALLTHRU;
853 gcc_checking_assert (could_fall_through
854 (e_taken->src, e_taken->dest));
855 e_taken->flags |= EDGE_FALLTHRU;
856 update_br_prob_note (bb);
857 continue;
860 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
862 /* If the old fallthru is still next or if
863 asm goto doesn't have a fallthru (e.g. when followed by
864 __builtin_unreachable ()), nothing to do. */
865 if (! e_fall
866 || bb->aux == e_fall->dest
867 || e_fall->dest == EXIT_BLOCK_PTR)
868 continue;
870 /* Otherwise we'll have to use the fallthru fixup below. */
872 else
874 /* Otherwise we have some return, switch or computed
875 jump. In the 99% case, there should not have been a
876 fallthru edge. */
877 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
878 continue;
881 else
883 /* No fallthru implies a noreturn function with EH edges, or
884 something similarly bizarre. In any case, we don't need to
885 do anything. */
886 if (! e_fall)
887 continue;
889 /* If the fallthru block is still next, nothing to do. */
890 if (bb->aux == e_fall->dest)
891 continue;
893 /* A fallthru to exit block. */
894 if (e_fall->dest == EXIT_BLOCK_PTR)
895 continue;
898 /* We got here if we need to add a new jump insn. */
899 nb = force_nonfallthru (e_fall);
900 if (nb)
902 nb->il.rtl->visited = 1;
903 nb->aux = bb->aux;
904 bb->aux = nb;
905 /* Don't process this new block. */
906 bb = nb;
908 /* Make sure new bb is tagged for correct section (same as
909 fall-thru source, since you cannot fall-throu across
910 section boundaries). */
911 BB_COPY_PARTITION (e_fall->src, single_pred (bb));
912 if (flag_reorder_blocks_and_partition
913 && targetm.have_named_sections
914 && JUMP_P (BB_END (bb))
915 && !any_condjump_p (BB_END (bb))
916 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
917 add_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX);
921 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
923 /* Annoying special case - jump around dead jumptables left in the code. */
924 FOR_EACH_BB (bb)
926 edge e = find_fallthru_edge (bb->succs);
928 if (e && !can_fallthru (e->src, e->dest))
929 force_nonfallthru (e);
932 /* Ensure goto_locus from edges has some instructions with that locus
933 in RTL. */
934 if (!optimize)
935 FOR_EACH_BB (bb)
937 edge e;
938 edge_iterator ei;
940 FOR_EACH_EDGE (e, ei, bb->succs)
941 if (e->goto_locus && !(e->flags & EDGE_ABNORMAL))
943 edge e2;
944 edge_iterator ei2;
945 basic_block dest, nb;
946 rtx end;
948 insn = BB_END (e->src);
949 end = PREV_INSN (BB_HEAD (e->src));
950 while (insn != end
951 && (!NONDEBUG_INSN_P (insn) || INSN_LOCATOR (insn) == 0))
952 insn = PREV_INSN (insn);
953 if (insn != end
954 && locator_eq (INSN_LOCATOR (insn), (int) e->goto_locus))
955 continue;
956 if (simplejump_p (BB_END (e->src))
957 && INSN_LOCATOR (BB_END (e->src)) == 0)
959 INSN_LOCATOR (BB_END (e->src)) = e->goto_locus;
960 continue;
962 dest = e->dest;
963 if (dest == EXIT_BLOCK_PTR)
965 /* Non-fallthru edges to the exit block cannot be split. */
966 if (!(e->flags & EDGE_FALLTHRU))
967 continue;
969 else
971 insn = BB_HEAD (dest);
972 end = NEXT_INSN (BB_END (dest));
973 while (insn != end && !NONDEBUG_INSN_P (insn))
974 insn = NEXT_INSN (insn);
975 if (insn != end && INSN_LOCATOR (insn)
976 && locator_eq (INSN_LOCATOR (insn), (int) e->goto_locus))
977 continue;
979 nb = split_edge (e);
980 if (!INSN_P (BB_END (nb)))
981 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
982 nb);
983 INSN_LOCATOR (BB_END (nb)) = e->goto_locus;
985 /* If there are other incoming edges to the destination block
986 with the same goto locus, redirect them to the new block as
987 well, this can prevent other such blocks from being created
988 in subsequent iterations of the loop. */
989 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
990 if (e2->goto_locus
991 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
992 && locator_eq (e->goto_locus, e2->goto_locus))
993 redirect_edge_and_branch (e2, nb);
994 else
995 ei_next (&ei2);
1000 /* Perform sanity checks on the insn chain.
1001 1. Check that next/prev pointers are consistent in both the forward and
1002 reverse direction.
1003 2. Count insns in chain, going both directions, and check if equal.
1004 3. Check that get_last_insn () returns the actual end of chain. */
1006 DEBUG_FUNCTION void
1007 verify_insn_chain (void)
1009 rtx x, prevx, nextx;
1010 int insn_cnt1, insn_cnt2;
1012 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
1013 x != 0;
1014 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
1015 gcc_assert (PREV_INSN (x) == prevx);
1017 gcc_assert (prevx == get_last_insn ());
1019 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
1020 x != 0;
1021 nextx = x, insn_cnt2++, x = PREV_INSN (x))
1022 gcc_assert (NEXT_INSN (x) == nextx);
1024 gcc_assert (insn_cnt1 == insn_cnt2);
1027 /* If we have assembler epilogues, the block falling through to exit must
1028 be the last one in the reordered chain when we reach final. Ensure
1029 that this condition is met. */
1030 static void
1031 fixup_fallthru_exit_predecessor (void)
1033 edge e;
1034 basic_block bb = NULL;
1036 /* This transformation is not valid before reload, because we might
1037 separate a call from the instruction that copies the return
1038 value. */
1039 gcc_assert (reload_completed);
1041 e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
1042 if (e)
1043 bb = e->src;
1045 if (bb && bb->aux)
1047 basic_block c = ENTRY_BLOCK_PTR->next_bb;
1049 /* If the very first block is the one with the fall-through exit
1050 edge, we have to split that block. */
1051 if (c == bb)
1053 bb = split_block (bb, NULL)->dest;
1054 bb->aux = c->aux;
1055 c->aux = bb;
1056 bb->il.rtl->footer = c->il.rtl->footer;
1057 c->il.rtl->footer = NULL;
1060 while (c->aux != bb)
1061 c = (basic_block) c->aux;
1063 c->aux = bb->aux;
1064 while (c->aux)
1065 c = (basic_block) c->aux;
1067 c->aux = bb;
1068 bb->aux = NULL;
1072 /* In case there are more than one fallthru predecessors of exit, force that
1073 there is only one. */
1075 static void
1076 force_one_exit_fallthru (void)
1078 edge e, predecessor = NULL;
1079 bool more = false;
1080 edge_iterator ei;
1081 basic_block forwarder, bb;
1083 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1084 if (e->flags & EDGE_FALLTHRU)
1086 if (predecessor == NULL)
1087 predecessor = e;
1088 else
1090 more = true;
1091 break;
1095 if (!more)
1096 return;
1098 /* Exit has several fallthru predecessors. Create a forwarder block for
1099 them. */
1100 forwarder = split_edge (predecessor);
1101 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
1103 if (e->src == forwarder
1104 || !(e->flags & EDGE_FALLTHRU))
1105 ei_next (&ei);
1106 else
1107 redirect_edge_and_branch_force (e, forwarder);
1110 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
1111 exit block. */
1112 FOR_EACH_BB (bb)
1114 if (bb->aux == NULL && bb != forwarder)
1116 bb->aux = forwarder;
1117 break;
1122 /* Return true in case it is possible to duplicate the basic block BB. */
1124 /* We do not want to declare the function in a header file, since it should
1125 only be used through the cfghooks interface, and we do not want to move
1126 it to cfgrtl.c since it would require also moving quite a lot of related
1127 code. */
1128 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block);
1130 bool
1131 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
1133 /* Do not attempt to duplicate tablejumps, as we need to unshare
1134 the dispatch table. This is difficult to do, as the instructions
1135 computing jump destination may be hoisted outside the basic block. */
1136 if (tablejump_p (BB_END (bb), NULL, NULL))
1137 return false;
1139 /* Do not duplicate blocks containing insns that can't be copied. */
1140 if (targetm.cannot_copy_insn_p)
1142 rtx insn = BB_HEAD (bb);
1143 while (1)
1145 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
1146 return false;
1147 if (insn == BB_END (bb))
1148 break;
1149 insn = NEXT_INSN (insn);
1153 return true;
1157 duplicate_insn_chain (rtx from, rtx to)
1159 rtx insn, last, copy;
1161 /* Avoid updating of boundaries of previous basic block. The
1162 note will get removed from insn stream in fixup. */
1163 last = emit_note (NOTE_INSN_DELETED);
1165 /* Create copy at the end of INSN chain. The chain will
1166 be reordered later. */
1167 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
1169 switch (GET_CODE (insn))
1171 case DEBUG_INSN:
1172 case INSN:
1173 case CALL_INSN:
1174 case JUMP_INSN:
1175 /* Avoid copying of dispatch tables. We never duplicate
1176 tablejumps, so this can hit only in case the table got
1177 moved far from original jump. */
1178 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
1179 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1181 /* Avoid copying following barrier as well if any
1182 (and debug insns in between). */
1183 rtx next;
1185 for (next = NEXT_INSN (insn);
1186 next != NEXT_INSN (to);
1187 next = NEXT_INSN (next))
1188 if (!DEBUG_INSN_P (next))
1189 break;
1190 if (next != NEXT_INSN (to) && BARRIER_P (next))
1191 insn = next;
1192 break;
1194 copy = emit_copy_of_insn_after (insn, get_last_insn ());
1195 maybe_copy_prologue_epilogue_insn (insn, copy);
1196 break;
1198 case CODE_LABEL:
1199 break;
1201 case BARRIER:
1202 emit_barrier ();
1203 break;
1205 case NOTE:
1206 switch (NOTE_KIND (insn))
1208 /* In case prologue is empty and function contain label
1209 in first BB, we may want to copy the block. */
1210 case NOTE_INSN_PROLOGUE_END:
1212 case NOTE_INSN_DELETED:
1213 case NOTE_INSN_DELETED_LABEL:
1214 /* No problem to strip these. */
1215 case NOTE_INSN_FUNCTION_BEG:
1216 /* There is always just single entry to function. */
1217 case NOTE_INSN_BASIC_BLOCK:
1218 break;
1220 case NOTE_INSN_EPILOGUE_BEG:
1221 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1222 emit_note_copy (insn);
1223 break;
1225 default:
1226 /* All other notes should have already been eliminated. */
1227 gcc_unreachable ();
1229 break;
1230 default:
1231 gcc_unreachable ();
1234 insn = NEXT_INSN (last);
1235 delete_insn (last);
1236 return insn;
1238 /* Create a duplicate of the basic block BB. */
1240 /* We do not want to declare the function in a header file, since it should
1241 only be used through the cfghooks interface, and we do not want to move
1242 it to cfgrtl.c since it would require also moving quite a lot of related
1243 code. */
1244 extern basic_block cfg_layout_duplicate_bb (basic_block);
1246 basic_block
1247 cfg_layout_duplicate_bb (basic_block bb)
1249 rtx insn;
1250 basic_block new_bb;
1252 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1253 new_bb = create_basic_block (insn,
1254 insn ? get_last_insn () : NULL,
1255 EXIT_BLOCK_PTR->prev_bb);
1257 BB_COPY_PARTITION (new_bb, bb);
1258 if (bb->il.rtl->header)
1260 insn = bb->il.rtl->header;
1261 while (NEXT_INSN (insn))
1262 insn = NEXT_INSN (insn);
1263 insn = duplicate_insn_chain (bb->il.rtl->header, insn);
1264 if (insn)
1265 new_bb->il.rtl->header = unlink_insn_chain (insn, get_last_insn ());
1268 if (bb->il.rtl->footer)
1270 insn = bb->il.rtl->footer;
1271 while (NEXT_INSN (insn))
1272 insn = NEXT_INSN (insn);
1273 insn = duplicate_insn_chain (bb->il.rtl->footer, insn);
1274 if (insn)
1275 new_bb->il.rtl->footer = unlink_insn_chain (insn, get_last_insn ());
1278 return new_bb;
1282 /* Main entry point to this module - initialize the datastructures for
1283 CFG layout changes. It keeps LOOPS up-to-date if not null.
1285 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
1287 void
1288 cfg_layout_initialize (unsigned int flags)
1290 rtx x;
1291 basic_block bb;
1293 initialize_original_copy_tables ();
1295 cfg_layout_rtl_register_cfg_hooks ();
1297 record_effective_endpoints ();
1299 /* Make sure that the targets of non local gotos are marked. */
1300 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1302 bb = BLOCK_FOR_INSN (XEXP (x, 0));
1303 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
1306 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1309 /* Splits superblocks. */
1310 void
1311 break_superblocks (void)
1313 sbitmap superblocks;
1314 bool need = false;
1315 basic_block bb;
1317 superblocks = sbitmap_alloc (last_basic_block);
1318 sbitmap_zero (superblocks);
1320 FOR_EACH_BB (bb)
1321 if (bb->flags & BB_SUPERBLOCK)
1323 bb->flags &= ~BB_SUPERBLOCK;
1324 SET_BIT (superblocks, bb->index);
1325 need = true;
1328 if (need)
1330 rebuild_jump_labels (get_insns ());
1331 find_many_sub_basic_blocks (superblocks);
1334 free (superblocks);
1337 /* Finalize the changes: reorder insn list according to the sequence specified
1338 by aux pointers, enter compensation code, rebuild scope forest. */
1340 void
1341 cfg_layout_finalize (void)
1343 #ifdef ENABLE_CHECKING
1344 verify_flow_info ();
1345 #endif
1346 force_one_exit_fallthru ();
1347 rtl_register_cfg_hooks ();
1348 if (reload_completed
1349 #ifdef HAVE_epilogue
1350 && !HAVE_epilogue
1351 #endif
1353 fixup_fallthru_exit_predecessor ();
1354 fixup_reorder_chain ();
1356 rebuild_jump_labels (get_insns ());
1357 delete_dead_jumptables ();
1359 #ifdef ENABLE_CHECKING
1360 verify_insn_chain ();
1361 verify_flow_info ();
1362 #endif
1365 /* Checks whether all N blocks in BBS array can be copied. */
1366 bool
1367 can_copy_bbs_p (basic_block *bbs, unsigned n)
1369 unsigned i;
1370 edge e;
1371 int ret = true;
1373 for (i = 0; i < n; i++)
1374 bbs[i]->flags |= BB_DUPLICATED;
1376 for (i = 0; i < n; i++)
1378 /* In case we should redirect abnormal edge during duplication, fail. */
1379 edge_iterator ei;
1380 FOR_EACH_EDGE (e, ei, bbs[i]->succs)
1381 if ((e->flags & EDGE_ABNORMAL)
1382 && (e->dest->flags & BB_DUPLICATED))
1384 ret = false;
1385 goto end;
1388 if (!can_duplicate_block_p (bbs[i]))
1390 ret = false;
1391 break;
1395 end:
1396 for (i = 0; i < n; i++)
1397 bbs[i]->flags &= ~BB_DUPLICATED;
1399 return ret;
1402 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1403 are placed into array NEW_BBS in the same order. Edges from basic blocks
1404 in BBS are also duplicated and copies of those of them
1405 that lead into BBS are redirected to appropriate newly created block. The
1406 function assigns bbs into loops (copy of basic block bb is assigned to
1407 bb->loop_father->copy loop, so this must be set up correctly in advance)
1408 and updates dominators locally (LOOPS structure that contains the information
1409 about dominators is passed to enable this).
1411 BASE is the superloop to that basic block belongs; if its header or latch
1412 is copied, we do not set the new blocks as header or latch.
1414 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1415 also in the same order.
1417 Newly created basic blocks are put after the basic block AFTER in the
1418 instruction stream, and the order of the blocks in BBS array is preserved. */
1420 void
1421 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1422 edge *edges, unsigned num_edges, edge *new_edges,
1423 struct loop *base, basic_block after)
1425 unsigned i, j;
1426 basic_block bb, new_bb, dom_bb;
1427 edge e;
1429 /* Duplicate bbs, update dominators, assign bbs to loops. */
1430 for (i = 0; i < n; i++)
1432 /* Duplicate. */
1433 bb = bbs[i];
1434 new_bb = new_bbs[i] = duplicate_block (bb, NULL, after);
1435 after = new_bb;
1436 bb->flags |= BB_DUPLICATED;
1437 /* Possibly set loop header. */
1438 if (bb->loop_father->header == bb && bb->loop_father != base)
1439 new_bb->loop_father->header = new_bb;
1440 /* Or latch. */
1441 if (bb->loop_father->latch == bb && bb->loop_father != base)
1442 new_bb->loop_father->latch = new_bb;
1445 /* Set dominators. */
1446 for (i = 0; i < n; i++)
1448 bb = bbs[i];
1449 new_bb = new_bbs[i];
1451 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1452 if (dom_bb->flags & BB_DUPLICATED)
1454 dom_bb = get_bb_copy (dom_bb);
1455 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1459 /* Redirect edges. */
1460 for (j = 0; j < num_edges; j++)
1461 new_edges[j] = NULL;
1462 for (i = 0; i < n; i++)
1464 edge_iterator ei;
1465 new_bb = new_bbs[i];
1466 bb = bbs[i];
1468 FOR_EACH_EDGE (e, ei, new_bb->succs)
1470 for (j = 0; j < num_edges; j++)
1471 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1472 new_edges[j] = e;
1474 if (!(e->dest->flags & BB_DUPLICATED))
1475 continue;
1476 redirect_edge_and_branch_force (e, get_bb_copy (e->dest));
1480 /* Clear information about duplicates. */
1481 for (i = 0; i < n; i++)
1482 bbs[i]->flags &= ~BB_DUPLICATED;
1485 #include "gt-cfglayout.h"