* cpplib.pot: Regenerate.
[official-gcc.git] / gcc / cfgrtl.c
blob3e4b65ec95b93b097d08be74006790da4f58f8c8
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 2011, 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
25 Available functionality:
26 - Basic CFG/RTL manipulation API documented in cfghooks.h
27 - CFG-aware instruction chain manipulation
28 delete_insn, delete_insn_chain
29 - Edge splitting and committing to edges
30 insert_insn_on_edge, commit_edge_insertions
31 - CFG updating after insn simplification
32 purge_dead_edges, purge_all_dead_edges
33 - CFG fixing after coarse manipulation
34 fixup_abnormal_edges
36 Functions not supposed for generic use:
37 - Infrastructure to determine quickly basic block for insn
38 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
39 - Edge redirection with updating and optimizing of insn chain
40 block_label, tidy_fallthru_edge, force_nonfallthru */
42 #include "config.h"
43 #include "system.h"
44 #include "coretypes.h"
45 #include "tm.h"
46 #include "tree.h"
47 #include "hard-reg-set.h"
48 #include "basic-block.h"
49 #include "regs.h"
50 #include "flags.h"
51 #include "function.h"
52 #include "except.h"
53 #include "rtl-error.h"
54 #include "tm_p.h"
55 #include "obstack.h"
56 #include "insn-attr.h"
57 #include "insn-config.h"
58 #include "cfglayout.h"
59 #include "expr.h"
60 #include "target.h"
61 #include "common/common-target.h"
62 #include "cfgloop.h"
63 #include "ggc.h"
64 #include "tree-pass.h"
65 #include "df.h"
67 static int can_delete_note_p (const_rtx);
68 static int can_delete_label_p (const_rtx);
69 static basic_block rtl_split_edge (edge);
70 static bool rtl_move_block_after (basic_block, basic_block);
71 static int rtl_verify_flow_info (void);
72 static basic_block cfg_layout_split_block (basic_block, void *);
73 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
74 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
75 static void cfg_layout_delete_block (basic_block);
76 static void rtl_delete_block (basic_block);
77 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
78 static edge rtl_redirect_edge_and_branch (edge, basic_block);
79 static basic_block rtl_split_block (basic_block, void *);
80 static void rtl_dump_bb (basic_block, FILE *, int, int);
81 static int rtl_verify_flow_info_1 (void);
82 static void rtl_make_forwarder_block (edge);
84 /* Return true if NOTE is not one of the ones that must be kept paired,
85 so that we may simply delete it. */
87 static int
88 can_delete_note_p (const_rtx note)
90 switch (NOTE_KIND (note))
92 case NOTE_INSN_DELETED:
93 case NOTE_INSN_BASIC_BLOCK:
94 case NOTE_INSN_EPILOGUE_BEG:
95 return true;
97 default:
98 return false;
102 /* True if a given label can be deleted. */
104 static int
105 can_delete_label_p (const_rtx label)
107 return (!LABEL_PRESERVE_P (label)
108 /* User declared labels must be preserved. */
109 && LABEL_NAME (label) == 0
110 && !in_expr_list_p (forced_labels, label));
113 /* Delete INSN by patching it out. */
115 void
116 delete_insn (rtx insn)
118 rtx note;
119 bool really_delete = true;
121 if (LABEL_P (insn))
123 /* Some labels can't be directly removed from the INSN chain, as they
124 might be references via variables, constant pool etc.
125 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
126 if (! can_delete_label_p (insn))
128 const char *name = LABEL_NAME (insn);
129 basic_block bb = BLOCK_FOR_INSN (insn);
130 rtx bb_note = NEXT_INSN (insn);
132 really_delete = false;
133 PUT_CODE (insn, NOTE);
134 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
135 NOTE_DELETED_LABEL_NAME (insn) = name;
137 if (bb_note != NULL_RTX && NOTE_INSN_BASIC_BLOCK_P (bb_note)
138 && BLOCK_FOR_INSN (bb_note) == bb)
140 reorder_insns_nobb (insn, insn, bb_note);
141 BB_HEAD (bb) = bb_note;
142 if (BB_END (bb) == bb_note)
143 BB_END (bb) = insn;
147 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
150 if (really_delete)
152 /* If this insn has already been deleted, something is very wrong. */
153 gcc_assert (!INSN_DELETED_P (insn));
154 remove_insn (insn);
155 INSN_DELETED_P (insn) = 1;
158 /* If deleting a jump, decrement the use count of the label. Deleting
159 the label itself should happen in the normal course of block merging. */
160 if (JUMP_P (insn))
162 if (JUMP_LABEL (insn)
163 && LABEL_P (JUMP_LABEL (insn)))
164 LABEL_NUSES (JUMP_LABEL (insn))--;
166 /* If there are more targets, remove them too. */
167 while ((note
168 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
169 && LABEL_P (XEXP (note, 0)))
171 LABEL_NUSES (XEXP (note, 0))--;
172 remove_note (insn, note);
176 /* Also if deleting any insn that references a label as an operand. */
177 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
178 && LABEL_P (XEXP (note, 0)))
180 LABEL_NUSES (XEXP (note, 0))--;
181 remove_note (insn, note);
184 if (JUMP_TABLE_DATA_P (insn))
186 rtx pat = PATTERN (insn);
187 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
188 int len = XVECLEN (pat, diff_vec_p);
189 int i;
191 for (i = 0; i < len; i++)
193 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
195 /* When deleting code in bulk (e.g. removing many unreachable
196 blocks) we can delete a label that's a target of the vector
197 before deleting the vector itself. */
198 if (!NOTE_P (label))
199 LABEL_NUSES (label)--;
204 /* Like delete_insn but also purge dead edges from BB. */
206 void
207 delete_insn_and_edges (rtx insn)
209 bool purge = false;
211 if (INSN_P (insn)
212 && BLOCK_FOR_INSN (insn)
213 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
214 purge = true;
215 delete_insn (insn);
216 if (purge)
217 purge_dead_edges (BLOCK_FOR_INSN (insn));
220 /* Unlink a chain of insns between START and FINISH, leaving notes
221 that must be paired. If CLEAR_BB is true, we set bb field for
222 insns that cannot be removed to NULL. */
224 void
225 delete_insn_chain (rtx start, rtx finish, bool clear_bb)
227 rtx prev, current;
229 /* Unchain the insns one by one. It would be quicker to delete all of these
230 with a single unchaining, rather than one at a time, but we need to keep
231 the NOTE's. */
232 current = finish;
233 while (1)
235 prev = PREV_INSN (current);
236 if (NOTE_P (current) && !can_delete_note_p (current))
238 else
239 delete_insn (current);
241 if (clear_bb && !INSN_DELETED_P (current))
242 set_block_for_insn (current, NULL);
244 if (current == start)
245 break;
246 current = prev;
250 /* Create a new basic block consisting of the instructions between HEAD and END
251 inclusive. This function is designed to allow fast BB construction - reuses
252 the note and basic block struct in BB_NOTE, if any and do not grow
253 BASIC_BLOCK chain and should be used directly only by CFG construction code.
254 END can be NULL in to create new empty basic block before HEAD. Both END
255 and HEAD can be NULL to create basic block at the end of INSN chain.
256 AFTER is the basic block we should be put after. */
258 basic_block
259 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
261 basic_block bb;
263 if (bb_note
264 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
265 && bb->aux == NULL)
267 /* If we found an existing note, thread it back onto the chain. */
269 rtx after;
271 if (LABEL_P (head))
272 after = head;
273 else
275 after = PREV_INSN (head);
276 head = bb_note;
279 if (after != bb_note && NEXT_INSN (after) != bb_note)
280 reorder_insns_nobb (bb_note, bb_note, after);
282 else
284 /* Otherwise we must create a note and a basic block structure. */
286 bb = alloc_block ();
288 init_rtl_bb_info (bb);
289 if (!head && !end)
290 head = end = bb_note
291 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
292 else if (LABEL_P (head) && end)
294 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
295 if (head == end)
296 end = bb_note;
298 else
300 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
301 head = bb_note;
302 if (!end)
303 end = head;
306 NOTE_BASIC_BLOCK (bb_note) = bb;
309 /* Always include the bb note in the block. */
310 if (NEXT_INSN (end) == bb_note)
311 end = bb_note;
313 BB_HEAD (bb) = head;
314 BB_END (bb) = end;
315 bb->index = last_basic_block++;
316 bb->flags = BB_NEW | BB_RTL;
317 link_block (bb, after);
318 SET_BASIC_BLOCK (bb->index, bb);
319 df_bb_refs_record (bb->index, false);
320 update_bb_for_insn (bb);
321 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
323 /* Tag the block so that we know it has been used when considering
324 other basic block notes. */
325 bb->aux = bb;
327 return bb;
330 /* Create new basic block consisting of instructions in between HEAD and END
331 and place it to the BB chain after block AFTER. END can be NULL to
332 create a new empty basic block before HEAD. Both END and HEAD can be
333 NULL to create basic block at the end of INSN chain. */
335 static basic_block
336 rtl_create_basic_block (void *headp, void *endp, basic_block after)
338 rtx head = (rtx) headp, end = (rtx) endp;
339 basic_block bb;
341 /* Grow the basic block array if needed. */
342 if ((size_t) last_basic_block >= VEC_length (basic_block, basic_block_info))
344 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
345 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
348 n_basic_blocks++;
350 bb = create_basic_block_structure (head, end, NULL, after);
351 bb->aux = NULL;
352 return bb;
355 static basic_block
356 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
358 basic_block newbb = rtl_create_basic_block (head, end, after);
360 return newbb;
363 /* Delete the insns in a (non-live) block. We physically delete every
364 non-deleted-note insn, and update the flow graph appropriately.
366 Return nonzero if we deleted an exception handler. */
368 /* ??? Preserving all such notes strikes me as wrong. It would be nice
369 to post-process the stream to remove empty blocks, loops, ranges, etc. */
371 static void
372 rtl_delete_block (basic_block b)
374 rtx insn, end;
376 /* If the head of this block is a CODE_LABEL, then it might be the
377 label for an exception handler which can't be reached. We need
378 to remove the label from the exception_handler_label list. */
379 insn = BB_HEAD (b);
381 end = get_last_bb_insn (b);
383 /* Selectively delete the entire chain. */
384 BB_HEAD (b) = NULL;
385 delete_insn_chain (insn, end, true);
388 if (dump_file)
389 fprintf (dump_file, "deleting block %d\n", b->index);
390 df_bb_delete (b->index);
393 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
395 void
396 compute_bb_for_insn (void)
398 basic_block bb;
400 FOR_EACH_BB (bb)
402 rtx end = BB_END (bb);
403 rtx insn;
405 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
407 BLOCK_FOR_INSN (insn) = bb;
408 if (insn == end)
409 break;
414 /* Release the basic_block_for_insn array. */
416 unsigned int
417 free_bb_for_insn (void)
419 rtx insn;
420 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
421 if (!BARRIER_P (insn))
422 BLOCK_FOR_INSN (insn) = NULL;
423 return 0;
426 static unsigned int
427 rest_of_pass_free_cfg (void)
429 #ifdef DELAY_SLOTS
430 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
431 valid at that point so it would be too late to call df_analyze. */
432 if (optimize > 0 && flag_delayed_branch)
434 df_note_add_problem ();
435 df_analyze ();
437 #endif
439 free_bb_for_insn ();
440 return 0;
443 struct rtl_opt_pass pass_free_cfg =
446 RTL_PASS,
447 "*free_cfg", /* name */
448 NULL, /* gate */
449 rest_of_pass_free_cfg, /* execute */
450 NULL, /* sub */
451 NULL, /* next */
452 0, /* static_pass_number */
453 TV_NONE, /* tv_id */
454 0, /* properties_required */
455 0, /* properties_provided */
456 PROP_cfg, /* properties_destroyed */
457 0, /* todo_flags_start */
458 0, /* todo_flags_finish */
462 /* Return RTX to emit after when we want to emit code on the entry of function. */
464 entry_of_function (void)
466 return (n_basic_blocks > NUM_FIXED_BLOCKS ?
467 BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
470 /* Emit INSN at the entry point of the function, ensuring that it is only
471 executed once per function. */
472 void
473 emit_insn_at_entry (rtx insn)
475 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
476 edge e = ei_safe_edge (ei);
477 gcc_assert (e->flags & EDGE_FALLTHRU);
479 insert_insn_on_edge (insn, e);
480 commit_edge_insertions ();
483 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
484 (or BARRIER if found) and notify df of the bb change.
485 The insn chain range is inclusive
486 (i.e. both BEGIN and END will be updated. */
488 static void
489 update_bb_for_insn_chain (rtx begin, rtx end, basic_block bb)
491 rtx insn;
493 end = NEXT_INSN (end);
494 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
495 if (!BARRIER_P (insn))
496 df_insn_change_bb (insn, bb);
499 /* Update BLOCK_FOR_INSN of insns in BB to BB,
500 and notify df of the change. */
502 void
503 update_bb_for_insn (basic_block bb)
505 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
509 /* Return the NOTE_INSN_BASIC_BLOCK of BB. */
511 bb_note (basic_block bb)
513 rtx note;
515 note = BB_HEAD (bb);
516 if (LABEL_P (note))
517 note = NEXT_INSN (note);
519 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
520 return note;
523 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
524 note associated with the BLOCK. */
526 static rtx
527 first_insn_after_basic_block_note (basic_block block)
529 rtx insn;
531 /* Get the first instruction in the block. */
532 insn = BB_HEAD (block);
534 if (insn == NULL_RTX)
535 return NULL_RTX;
536 if (LABEL_P (insn))
537 insn = NEXT_INSN (insn);
538 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
540 return NEXT_INSN (insn);
543 /* Creates a new basic block just after basic block B by splitting
544 everything after specified instruction I. */
546 static basic_block
547 rtl_split_block (basic_block bb, void *insnp)
549 basic_block new_bb;
550 rtx insn = (rtx) insnp;
551 edge e;
552 edge_iterator ei;
554 if (!insn)
556 insn = first_insn_after_basic_block_note (bb);
558 if (insn)
560 rtx next = insn;
562 insn = PREV_INSN (insn);
564 /* If the block contains only debug insns, insn would have
565 been NULL in a non-debug compilation, and then we'd end
566 up emitting a DELETED note. For -fcompare-debug
567 stability, emit the note too. */
568 if (insn != BB_END (bb)
569 && DEBUG_INSN_P (next)
570 && DEBUG_INSN_P (BB_END (bb)))
572 while (next != BB_END (bb) && DEBUG_INSN_P (next))
573 next = NEXT_INSN (next);
575 if (next == BB_END (bb))
576 emit_note_after (NOTE_INSN_DELETED, next);
579 else
580 insn = get_last_insn ();
583 /* We probably should check type of the insn so that we do not create
584 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
585 bother. */
586 if (insn == BB_END (bb))
587 emit_note_after (NOTE_INSN_DELETED, insn);
589 /* Create the new basic block. */
590 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
591 BB_COPY_PARTITION (new_bb, bb);
592 BB_END (bb) = insn;
594 /* Redirect the outgoing edges. */
595 new_bb->succs = bb->succs;
596 bb->succs = NULL;
597 FOR_EACH_EDGE (e, ei, new_bb->succs)
598 e->src = new_bb;
600 /* The new block starts off being dirty. */
601 df_set_bb_dirty (bb);
602 return new_bb;
605 /* Return true if the single edge between blocks A and B is the only place
606 in RTL which holds some unique locus. */
608 static bool
609 unique_locus_on_edge_between_p (basic_block a, basic_block b)
611 const int goto_locus = EDGE_SUCC (a, 0)->goto_locus;
612 rtx insn, end;
614 if (!goto_locus)
615 return false;
617 /* First scan block A backward. */
618 insn = BB_END (a);
619 end = PREV_INSN (BB_HEAD (a));
620 while (insn != end && (!NONDEBUG_INSN_P (insn) || INSN_LOCATOR (insn) == 0))
621 insn = PREV_INSN (insn);
623 if (insn != end && locator_eq (INSN_LOCATOR (insn), goto_locus))
624 return false;
626 /* Then scan block B forward. */
627 insn = BB_HEAD (b);
628 if (insn)
630 end = NEXT_INSN (BB_END (b));
631 while (insn != end && !NONDEBUG_INSN_P (insn))
632 insn = NEXT_INSN (insn);
634 if (insn != end && INSN_LOCATOR (insn) != 0
635 && locator_eq (INSN_LOCATOR (insn), goto_locus))
636 return false;
639 return true;
642 /* If the single edge between blocks A and B is the only place in RTL which
643 holds some unique locus, emit a nop with that locus between the blocks. */
645 static void
646 emit_nop_for_unique_locus_between (basic_block a, basic_block b)
648 if (!unique_locus_on_edge_between_p (a, b))
649 return;
651 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
652 INSN_LOCATOR (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
655 /* Blocks A and B are to be merged into a single block A. The insns
656 are already contiguous. */
658 static void
659 rtl_merge_blocks (basic_block a, basic_block b)
661 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
662 rtx del_first = NULL_RTX, del_last = NULL_RTX;
663 rtx b_debug_start = b_end, b_debug_end = b_end;
664 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
665 int b_empty = 0;
667 if (dump_file)
668 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
669 a->index);
671 while (DEBUG_INSN_P (b_end))
672 b_end = PREV_INSN (b_debug_start = b_end);
674 /* If there was a CODE_LABEL beginning B, delete it. */
675 if (LABEL_P (b_head))
677 /* Detect basic blocks with nothing but a label. This can happen
678 in particular at the end of a function. */
679 if (b_head == b_end)
680 b_empty = 1;
682 del_first = del_last = b_head;
683 b_head = NEXT_INSN (b_head);
686 /* Delete the basic block note and handle blocks containing just that
687 note. */
688 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
690 if (b_head == b_end)
691 b_empty = 1;
692 if (! del_last)
693 del_first = b_head;
695 del_last = b_head;
696 b_head = NEXT_INSN (b_head);
699 /* If there was a jump out of A, delete it. */
700 if (JUMP_P (a_end))
702 rtx prev;
704 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
705 if (!NOTE_P (prev)
706 || NOTE_INSN_BASIC_BLOCK_P (prev)
707 || prev == BB_HEAD (a))
708 break;
710 del_first = a_end;
712 #ifdef HAVE_cc0
713 /* If this was a conditional jump, we need to also delete
714 the insn that set cc0. */
715 if (only_sets_cc0_p (prev))
717 rtx tmp = prev;
719 prev = prev_nonnote_insn (prev);
720 if (!prev)
721 prev = BB_HEAD (a);
722 del_first = tmp;
724 #endif
726 a_end = PREV_INSN (del_first);
728 else if (BARRIER_P (NEXT_INSN (a_end)))
729 del_first = NEXT_INSN (a_end);
731 /* Delete everything marked above as well as crap that might be
732 hanging out between the two blocks. */
733 BB_END (a) = a_end;
734 BB_HEAD (b) = b_empty ? NULL_RTX : b_head;
735 delete_insn_chain (del_first, del_last, true);
737 /* When not optimizing CFG and the edge is the only place in RTL which holds
738 some unique locus, emit a nop with that locus in between. */
739 if (!optimize)
741 emit_nop_for_unique_locus_between (a, b);
742 a_end = BB_END (a);
745 /* Reassociate the insns of B with A. */
746 if (!b_empty)
748 update_bb_for_insn_chain (a_end, b_debug_end, a);
750 BB_END (a) = b_debug_end;
751 BB_HEAD (b) = NULL_RTX;
753 else if (b_end != b_debug_end)
755 /* Move any deleted labels and other notes between the end of A
756 and the debug insns that make up B after the debug insns,
757 bringing the debug insns into A while keeping the notes after
758 the end of A. */
759 if (NEXT_INSN (a_end) != b_debug_start)
760 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
761 b_debug_end);
762 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
763 BB_END (a) = b_debug_end;
766 df_bb_delete (b->index);
768 /* If B was a forwarder block, propagate the locus on the edge. */
769 if (forwarder_p && !EDGE_SUCC (b, 0)->goto_locus)
770 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
772 if (dump_file)
773 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
777 /* Return true when block A and B can be merged. */
779 static bool
780 rtl_can_merge_blocks (basic_block a, basic_block b)
782 /* If we are partitioning hot/cold basic blocks, we don't want to
783 mess up unconditional or indirect jumps that cross between hot
784 and cold sections.
786 Basic block partitioning may result in some jumps that appear to
787 be optimizable (or blocks that appear to be mergeable), but which really
788 must be left untouched (they are required to make it safely across
789 partition boundaries). See the comments at the top of
790 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
792 if (BB_PARTITION (a) != BB_PARTITION (b))
793 return false;
795 /* Protect the loop latches. */
796 if (current_loops && b->loop_father->latch == b)
797 return false;
799 /* There must be exactly one edge in between the blocks. */
800 return (single_succ_p (a)
801 && single_succ (a) == b
802 && single_pred_p (b)
803 && a != b
804 /* Must be simple edge. */
805 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
806 && a->next_bb == b
807 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
808 /* If the jump insn has side effects,
809 we can't kill the edge. */
810 && (!JUMP_P (BB_END (a))
811 || (reload_completed
812 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
815 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
816 exist. */
819 block_label (basic_block block)
821 if (block == EXIT_BLOCK_PTR)
822 return NULL_RTX;
824 if (!LABEL_P (BB_HEAD (block)))
826 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
829 return BB_HEAD (block);
832 /* Attempt to perform edge redirection by replacing possibly complex jump
833 instruction by unconditional jump or removing jump completely. This can
834 apply only if all edges now point to the same block. The parameters and
835 return values are equivalent to redirect_edge_and_branch. */
837 edge
838 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
840 basic_block src = e->src;
841 rtx insn = BB_END (src), kill_from;
842 rtx set;
843 int fallthru = 0;
845 /* If we are partitioning hot/cold basic blocks, we don't want to
846 mess up unconditional or indirect jumps that cross between hot
847 and cold sections.
849 Basic block partitioning may result in some jumps that appear to
850 be optimizable (or blocks that appear to be mergeable), but which really
851 must be left untouched (they are required to make it safely across
852 partition boundaries). See the comments at the top of
853 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
855 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
856 || BB_PARTITION (src) != BB_PARTITION (target))
857 return NULL;
859 /* We can replace or remove a complex jump only when we have exactly
860 two edges. Also, if we have exactly one outgoing edge, we can
861 redirect that. */
862 if (EDGE_COUNT (src->succs) >= 3
863 /* Verify that all targets will be TARGET. Specifically, the
864 edge that is not E must also go to TARGET. */
865 || (EDGE_COUNT (src->succs) == 2
866 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
867 return NULL;
869 if (!onlyjump_p (insn))
870 return NULL;
871 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
872 return NULL;
874 /* Avoid removing branch with side effects. */
875 set = single_set (insn);
876 if (!set || side_effects_p (set))
877 return NULL;
879 /* In case we zap a conditional jump, we'll need to kill
880 the cc0 setter too. */
881 kill_from = insn;
882 #ifdef HAVE_cc0
883 if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
884 && only_sets_cc0_p (PREV_INSN (insn)))
885 kill_from = PREV_INSN (insn);
886 #endif
888 /* See if we can create the fallthru edge. */
889 if (in_cfglayout || can_fallthru (src, target))
891 if (dump_file)
892 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
893 fallthru = 1;
895 /* Selectively unlink whole insn chain. */
896 if (in_cfglayout)
898 rtx insn = BB_FOOTER (src);
900 delete_insn_chain (kill_from, BB_END (src), false);
902 /* Remove barriers but keep jumptables. */
903 while (insn)
905 if (BARRIER_P (insn))
907 if (PREV_INSN (insn))
908 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
909 else
910 BB_FOOTER (src) = NEXT_INSN (insn);
911 if (NEXT_INSN (insn))
912 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
914 if (LABEL_P (insn))
915 break;
916 insn = NEXT_INSN (insn);
919 else
920 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
921 false);
924 /* If this already is simplejump, redirect it. */
925 else if (simplejump_p (insn))
927 if (e->dest == target)
928 return NULL;
929 if (dump_file)
930 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
931 INSN_UID (insn), e->dest->index, target->index);
932 if (!redirect_jump (insn, block_label (target), 0))
934 gcc_assert (target == EXIT_BLOCK_PTR);
935 return NULL;
939 /* Cannot do anything for target exit block. */
940 else if (target == EXIT_BLOCK_PTR)
941 return NULL;
943 /* Or replace possibly complicated jump insn by simple jump insn. */
944 else
946 rtx target_label = block_label (target);
947 rtx barrier, label, table;
949 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
950 JUMP_LABEL (BB_END (src)) = target_label;
951 LABEL_NUSES (target_label)++;
952 if (dump_file)
953 fprintf (dump_file, "Replacing insn %i by jump %i\n",
954 INSN_UID (insn), INSN_UID (BB_END (src)));
957 delete_insn_chain (kill_from, insn, false);
959 /* Recognize a tablejump that we are converting to a
960 simple jump and remove its associated CODE_LABEL
961 and ADDR_VEC or ADDR_DIFF_VEC. */
962 if (tablejump_p (insn, &label, &table))
963 delete_insn_chain (label, table, false);
965 barrier = next_nonnote_insn (BB_END (src));
966 if (!barrier || !BARRIER_P (barrier))
967 emit_barrier_after (BB_END (src));
968 else
970 if (barrier != NEXT_INSN (BB_END (src)))
972 /* Move the jump before barrier so that the notes
973 which originally were or were created before jump table are
974 inside the basic block. */
975 rtx new_insn = BB_END (src);
977 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
978 PREV_INSN (barrier), src);
980 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
981 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
983 NEXT_INSN (new_insn) = barrier;
984 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
986 PREV_INSN (new_insn) = PREV_INSN (barrier);
987 PREV_INSN (barrier) = new_insn;
992 /* Keep only one edge out and set proper flags. */
993 if (!single_succ_p (src))
994 remove_edge (e);
995 gcc_assert (single_succ_p (src));
997 e = single_succ_edge (src);
998 if (fallthru)
999 e->flags = EDGE_FALLTHRU;
1000 else
1001 e->flags = 0;
1003 e->probability = REG_BR_PROB_BASE;
1004 e->count = src->count;
1006 if (e->dest != target)
1007 redirect_edge_succ (e, target);
1008 return e;
1011 /* Subroutine of redirect_branch_edge that tries to patch the jump
1012 instruction INSN so that it reaches block NEW. Do this
1013 only when it originally reached block OLD. Return true if this
1014 worked or the original target wasn't OLD, return false if redirection
1015 doesn't work. */
1017 static bool
1018 patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
1020 rtx tmp;
1021 /* Recognize a tablejump and adjust all matching cases. */
1022 if (tablejump_p (insn, NULL, &tmp))
1024 rtvec vec;
1025 int j;
1026 rtx new_label = block_label (new_bb);
1028 if (new_bb == EXIT_BLOCK_PTR)
1029 return false;
1030 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1031 vec = XVEC (PATTERN (tmp), 0);
1032 else
1033 vec = XVEC (PATTERN (tmp), 1);
1035 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1036 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1038 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1039 --LABEL_NUSES (old_label);
1040 ++LABEL_NUSES (new_label);
1043 /* Handle casesi dispatch insns. */
1044 if ((tmp = single_set (insn)) != NULL
1045 && SET_DEST (tmp) == pc_rtx
1046 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1047 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1048 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1050 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
1051 new_label);
1052 --LABEL_NUSES (old_label);
1053 ++LABEL_NUSES (new_label);
1056 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1058 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1059 rtx new_label, note;
1061 if (new_bb == EXIT_BLOCK_PTR)
1062 return false;
1063 new_label = block_label (new_bb);
1065 for (i = 0; i < n; ++i)
1067 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1068 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1069 if (XEXP (old_ref, 0) == old_label)
1071 ASM_OPERANDS_LABEL (tmp, i)
1072 = gen_rtx_LABEL_REF (Pmode, new_label);
1073 --LABEL_NUSES (old_label);
1074 ++LABEL_NUSES (new_label);
1078 if (JUMP_LABEL (insn) == old_label)
1080 JUMP_LABEL (insn) = new_label;
1081 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1082 if (note)
1083 remove_note (insn, note);
1085 else
1087 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1088 if (note)
1089 remove_note (insn, note);
1090 if (JUMP_LABEL (insn) != new_label
1091 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1092 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1094 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1095 != NULL_RTX)
1096 XEXP (note, 0) = new_label;
1098 else
1100 /* ?? We may play the games with moving the named labels from
1101 one basic block to the other in case only one computed_jump is
1102 available. */
1103 if (computed_jump_p (insn)
1104 /* A return instruction can't be redirected. */
1105 || returnjump_p (insn))
1106 return false;
1108 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1110 /* If the insn doesn't go where we think, we're confused. */
1111 gcc_assert (JUMP_LABEL (insn) == old_label);
1113 /* If the substitution doesn't succeed, die. This can happen
1114 if the back end emitted unrecognizable instructions or if
1115 target is exit block on some arches. */
1116 if (!redirect_jump (insn, block_label (new_bb), 0))
1118 gcc_assert (new_bb == EXIT_BLOCK_PTR);
1119 return false;
1123 return true;
1127 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1128 NULL on failure */
1129 static edge
1130 redirect_branch_edge (edge e, basic_block target)
1132 rtx old_label = BB_HEAD (e->dest);
1133 basic_block src = e->src;
1134 rtx insn = BB_END (src);
1136 /* We can only redirect non-fallthru edges of jump insn. */
1137 if (e->flags & EDGE_FALLTHRU)
1138 return NULL;
1139 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1140 return NULL;
1142 if (!currently_expanding_to_rtl)
1144 if (!patch_jump_insn (insn, old_label, target))
1145 return NULL;
1147 else
1148 /* When expanding this BB might actually contain multiple
1149 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1150 Redirect all of those that match our label. */
1151 FOR_BB_INSNS (src, insn)
1152 if (JUMP_P (insn) && !patch_jump_insn (insn, old_label, target))
1153 return NULL;
1155 if (dump_file)
1156 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1157 e->src->index, e->dest->index, target->index);
1159 if (e->dest != target)
1160 e = redirect_edge_succ_nodup (e, target);
1162 return e;
1165 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
1166 expense of adding new instructions or reordering basic blocks.
1168 Function can be also called with edge destination equivalent to the TARGET.
1169 Then it should try the simplifications and do nothing if none is possible.
1171 Return edge representing the branch if transformation succeeded. Return NULL
1172 on failure.
1173 We still return NULL in case E already destinated TARGET and we didn't
1174 managed to simplify instruction stream. */
1176 static edge
1177 rtl_redirect_edge_and_branch (edge e, basic_block target)
1179 edge ret;
1180 basic_block src = e->src;
1182 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1183 return NULL;
1185 if (e->dest == target)
1186 return e;
1188 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1190 df_set_bb_dirty (src);
1191 return ret;
1194 ret = redirect_branch_edge (e, target);
1195 if (!ret)
1196 return NULL;
1198 df_set_bb_dirty (src);
1199 return ret;
1202 /* Like force_nonfallthru below, but additionally performs redirection
1203 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1204 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1205 simple_return_rtx, indicating which kind of returnjump to create.
1206 It should be NULL otherwise. */
1208 basic_block
1209 force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1211 basic_block jump_block, new_bb = NULL, src = e->src;
1212 rtx note;
1213 edge new_edge;
1214 int abnormal_edge_flags = 0;
1215 bool asm_goto_edge = false;
1216 int loc;
1218 /* In the case the last instruction is conditional jump to the next
1219 instruction, first redirect the jump itself and then continue
1220 by creating a basic block afterwards to redirect fallthru edge. */
1221 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
1222 && any_condjump_p (BB_END (e->src))
1223 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1225 rtx note;
1226 edge b = unchecked_make_edge (e->src, target, 0);
1227 bool redirected;
1229 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1230 gcc_assert (redirected);
1232 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1233 if (note)
1235 int prob = INTVAL (XEXP (note, 0));
1237 b->probability = prob;
1238 b->count = e->count * prob / REG_BR_PROB_BASE;
1239 e->probability -= e->probability;
1240 e->count -= b->count;
1241 if (e->probability < 0)
1242 e->probability = 0;
1243 if (e->count < 0)
1244 e->count = 0;
1248 if (e->flags & EDGE_ABNORMAL)
1250 /* Irritating special case - fallthru edge to the same block as abnormal
1251 edge.
1252 We can't redirect abnormal edge, but we still can split the fallthru
1253 one and create separate abnormal edge to original destination.
1254 This allows bb-reorder to make such edge non-fallthru. */
1255 gcc_assert (e->dest == target);
1256 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
1257 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
1259 else
1261 gcc_assert (e->flags & EDGE_FALLTHRU);
1262 if (e->src == ENTRY_BLOCK_PTR)
1264 /* We can't redirect the entry block. Create an empty block
1265 at the start of the function which we use to add the new
1266 jump. */
1267 edge tmp;
1268 edge_iterator ei;
1269 bool found = false;
1271 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
1273 /* Change the existing edge's source to be the new block, and add
1274 a new edge from the entry block to the new block. */
1275 e->src = bb;
1276 for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1278 if (tmp == e)
1280 VEC_unordered_remove (edge, ENTRY_BLOCK_PTR->succs, ei.index);
1281 found = true;
1282 break;
1284 else
1285 ei_next (&ei);
1288 gcc_assert (found);
1290 VEC_safe_push (edge, gc, bb->succs, e);
1291 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1295 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1296 don't point to the target or fallthru label. */
1297 if (JUMP_P (BB_END (e->src))
1298 && target != EXIT_BLOCK_PTR
1299 && (e->flags & EDGE_FALLTHRU)
1300 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
1302 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1304 for (i = 0; i < n; ++i)
1306 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
1307 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1308 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
1309 asm_goto_edge = true;
1313 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1315 gcov_type count = e->count;
1316 int probability = e->probability;
1317 /* Create the new structures. */
1319 /* If the old block ended with a tablejump, skip its table
1320 by searching forward from there. Otherwise start searching
1321 forward from the last instruction of the old block. */
1322 if (!tablejump_p (BB_END (e->src), NULL, &note))
1323 note = BB_END (e->src);
1324 note = NEXT_INSN (note);
1326 jump_block = create_basic_block (note, NULL, e->src);
1327 jump_block->count = count;
1328 jump_block->frequency = EDGE_FREQUENCY (e);
1329 jump_block->loop_depth = target->loop_depth;
1331 /* Make sure new block ends up in correct hot/cold section. */
1333 BB_COPY_PARTITION (jump_block, e->src);
1334 if (flag_reorder_blocks_and_partition
1335 && targetm_common.have_named_sections
1336 && JUMP_P (BB_END (jump_block))
1337 && !any_condjump_p (BB_END (jump_block))
1338 && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
1339 add_reg_note (BB_END (jump_block), REG_CROSSING_JUMP, NULL_RTX);
1341 /* Wire edge in. */
1342 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1343 new_edge->probability = probability;
1344 new_edge->count = count;
1346 /* Redirect old edge. */
1347 redirect_edge_pred (e, jump_block);
1348 e->probability = REG_BR_PROB_BASE;
1350 /* If asm goto has any label refs to target's label,
1351 add also edge from asm goto bb to target. */
1352 if (asm_goto_edge)
1354 new_edge->probability /= 2;
1355 new_edge->count /= 2;
1356 jump_block->count /= 2;
1357 jump_block->frequency /= 2;
1358 new_edge = make_edge (new_edge->src, target,
1359 e->flags & ~EDGE_FALLTHRU);
1360 new_edge->probability = probability - probability / 2;
1361 new_edge->count = count - count / 2;
1364 new_bb = jump_block;
1366 else
1367 jump_block = e->src;
1369 if (e->goto_locus && e->goto_block == NULL)
1370 loc = e->goto_locus;
1371 else
1372 loc = 0;
1373 e->flags &= ~EDGE_FALLTHRU;
1374 if (target == EXIT_BLOCK_PTR)
1376 if (jump_label == ret_rtx)
1378 #ifdef HAVE_return
1379 emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
1380 #else
1381 gcc_unreachable ();
1382 #endif
1384 else
1386 gcc_assert (jump_label == simple_return_rtx);
1387 #ifdef HAVE_simple_return
1388 emit_jump_insn_after_setloc (gen_simple_return (),
1389 BB_END (jump_block), loc);
1390 #else
1391 gcc_unreachable ();
1392 #endif
1394 set_return_jump_label (BB_END (jump_block));
1396 else
1398 rtx label = block_label (target);
1399 emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
1400 JUMP_LABEL (BB_END (jump_block)) = label;
1401 LABEL_NUSES (label)++;
1404 emit_barrier_after (BB_END (jump_block));
1405 redirect_edge_succ_nodup (e, target);
1407 if (abnormal_edge_flags)
1408 make_edge (src, target, abnormal_edge_flags);
1410 df_mark_solutions_dirty ();
1411 return new_bb;
1414 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1415 (and possibly create new basic block) to make edge non-fallthru.
1416 Return newly created BB or NULL if none. */
1418 static basic_block
1419 rtl_force_nonfallthru (edge e)
1421 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1424 /* Redirect edge even at the expense of creating new jump insn or
1425 basic block. Return new basic block if created, NULL otherwise.
1426 Conversion must be possible. */
1428 static basic_block
1429 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1431 if (redirect_edge_and_branch (e, target)
1432 || e->dest == target)
1433 return NULL;
1435 /* In case the edge redirection failed, try to force it to be non-fallthru
1436 and redirect newly created simplejump. */
1437 df_set_bb_dirty (e->src);
1438 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1441 /* The given edge should potentially be a fallthru edge. If that is in
1442 fact true, delete the jump and barriers that are in the way. */
1444 static void
1445 rtl_tidy_fallthru_edge (edge e)
1447 rtx q;
1448 basic_block b = e->src, c = b->next_bb;
1450 /* ??? In a late-running flow pass, other folks may have deleted basic
1451 blocks by nopping out blocks, leaving multiple BARRIERs between here
1452 and the target label. They ought to be chastised and fixed.
1454 We can also wind up with a sequence of undeletable labels between
1455 one block and the next.
1457 So search through a sequence of barriers, labels, and notes for
1458 the head of block C and assert that we really do fall through. */
1460 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1461 if (INSN_P (q))
1462 return;
1464 /* Remove what will soon cease being the jump insn from the source block.
1465 If block B consisted only of this single jump, turn it into a deleted
1466 note. */
1467 q = BB_END (b);
1468 if (JUMP_P (q)
1469 && onlyjump_p (q)
1470 && (any_uncondjump_p (q)
1471 || single_succ_p (b)))
1473 #ifdef HAVE_cc0
1474 /* If this was a conditional jump, we need to also delete
1475 the insn that set cc0. */
1476 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1477 q = PREV_INSN (q);
1478 #endif
1480 q = PREV_INSN (q);
1483 /* Selectively unlink the sequence. */
1484 if (q != PREV_INSN (BB_HEAD (c)))
1485 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1487 e->flags |= EDGE_FALLTHRU;
1490 /* Should move basic block BB after basic block AFTER. NIY. */
1492 static bool
1493 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1494 basic_block after ATTRIBUTE_UNUSED)
1496 return false;
1499 /* Split a (typically critical) edge. Return the new block.
1500 The edge must not be abnormal.
1502 ??? The code generally expects to be called on critical edges.
1503 The case of a block ending in an unconditional jump to a
1504 block with multiple predecessors is not handled optimally. */
1506 static basic_block
1507 rtl_split_edge (edge edge_in)
1509 basic_block bb;
1510 rtx before;
1512 /* Abnormal edges cannot be split. */
1513 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1515 /* We are going to place the new block in front of edge destination.
1516 Avoid existence of fallthru predecessors. */
1517 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1519 edge e = find_fallthru_edge (edge_in->dest->preds);
1521 if (e)
1522 force_nonfallthru (e);
1525 /* Create the basic block note. */
1526 if (edge_in->dest != EXIT_BLOCK_PTR)
1527 before = BB_HEAD (edge_in->dest);
1528 else
1529 before = NULL_RTX;
1531 /* If this is a fall through edge to the exit block, the blocks might be
1532 not adjacent, and the right place is after the source. */
1533 if ((edge_in->flags & EDGE_FALLTHRU) && edge_in->dest == EXIT_BLOCK_PTR)
1535 before = NEXT_INSN (BB_END (edge_in->src));
1536 bb = create_basic_block (before, NULL, edge_in->src);
1537 BB_COPY_PARTITION (bb, edge_in->src);
1539 else
1541 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1542 /* ??? Why not edge_in->dest->prev_bb here? */
1543 BB_COPY_PARTITION (bb, edge_in->dest);
1546 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1548 /* For non-fallthru edges, we must adjust the predecessor's
1549 jump instruction to target our new block. */
1550 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1552 edge redirected = redirect_edge_and_branch (edge_in, bb);
1553 gcc_assert (redirected);
1555 else
1557 if (edge_in->src != ENTRY_BLOCK_PTR)
1559 /* For asm goto even splitting of fallthru edge might
1560 need insn patching, as other labels might point to the
1561 old label. */
1562 rtx last = BB_END (edge_in->src);
1563 if (last
1564 && JUMP_P (last)
1565 && edge_in->dest != EXIT_BLOCK_PTR
1566 && extract_asm_operands (PATTERN (last)) != NULL_RTX
1567 && patch_jump_insn (last, before, bb))
1568 df_set_bb_dirty (edge_in->src);
1570 redirect_edge_succ (edge_in, bb);
1573 return bb;
1576 /* Queue instructions for insertion on an edge between two basic blocks.
1577 The new instructions and basic blocks (if any) will not appear in the
1578 CFG until commit_edge_insertions is called. */
1580 void
1581 insert_insn_on_edge (rtx pattern, edge e)
1583 /* We cannot insert instructions on an abnormal critical edge.
1584 It will be easier to find the culprit if we die now. */
1585 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1587 if (e->insns.r == NULL_RTX)
1588 start_sequence ();
1589 else
1590 push_to_sequence (e->insns.r);
1592 emit_insn (pattern);
1594 e->insns.r = get_insns ();
1595 end_sequence ();
1598 /* Update the CFG for the instructions queued on edge E. */
1600 void
1601 commit_one_edge_insertion (edge e)
1603 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1604 basic_block bb;
1606 /* Pull the insns off the edge now since the edge might go away. */
1607 insns = e->insns.r;
1608 e->insns.r = NULL_RTX;
1610 /* Figure out where to put these insns. If the destination has
1611 one predecessor, insert there. Except for the exit block. */
1612 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
1614 bb = e->dest;
1616 /* Get the location correct wrt a code label, and "nice" wrt
1617 a basic block note, and before everything else. */
1618 tmp = BB_HEAD (bb);
1619 if (LABEL_P (tmp))
1620 tmp = NEXT_INSN (tmp);
1621 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1622 tmp = NEXT_INSN (tmp);
1623 if (tmp == BB_HEAD (bb))
1624 before = tmp;
1625 else if (tmp)
1626 after = PREV_INSN (tmp);
1627 else
1628 after = get_last_insn ();
1631 /* If the source has one successor and the edge is not abnormal,
1632 insert there. Except for the entry block. */
1633 else if ((e->flags & EDGE_ABNORMAL) == 0
1634 && single_succ_p (e->src)
1635 && e->src != ENTRY_BLOCK_PTR)
1637 bb = e->src;
1639 /* It is possible to have a non-simple jump here. Consider a target
1640 where some forms of unconditional jumps clobber a register. This
1641 happens on the fr30 for example.
1643 We know this block has a single successor, so we can just emit
1644 the queued insns before the jump. */
1645 if (JUMP_P (BB_END (bb)))
1646 before = BB_END (bb);
1647 else
1649 /* We'd better be fallthru, or we've lost track of what's what. */
1650 gcc_assert (e->flags & EDGE_FALLTHRU);
1652 after = BB_END (bb);
1656 /* Otherwise we must split the edge. */
1657 else
1659 bb = split_edge (e);
1660 after = BB_END (bb);
1662 if (flag_reorder_blocks_and_partition
1663 && targetm_common.have_named_sections
1664 && e->src != ENTRY_BLOCK_PTR
1665 && BB_PARTITION (e->src) == BB_COLD_PARTITION
1666 && !(e->flags & EDGE_CROSSING)
1667 && JUMP_P (after)
1668 && !any_condjump_p (after)
1669 && (single_succ_edge (bb)->flags & EDGE_CROSSING))
1670 add_reg_note (after, REG_CROSSING_JUMP, NULL_RTX);
1673 /* Now that we've found the spot, do the insertion. */
1674 if (before)
1676 emit_insn_before_noloc (insns, before, bb);
1677 last = prev_nonnote_insn (before);
1679 else
1680 last = emit_insn_after_noloc (insns, after, bb);
1682 if (returnjump_p (last))
1684 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1685 This is not currently a problem because this only happens
1686 for the (single) epilogue, which already has a fallthru edge
1687 to EXIT. */
1689 e = single_succ_edge (bb);
1690 gcc_assert (e->dest == EXIT_BLOCK_PTR
1691 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
1693 e->flags &= ~EDGE_FALLTHRU;
1694 emit_barrier_after (last);
1696 if (before)
1697 delete_insn (before);
1699 else
1700 gcc_assert (!JUMP_P (last));
1703 /* Update the CFG for all queued instructions. */
1705 void
1706 commit_edge_insertions (void)
1708 basic_block bb;
1710 #ifdef ENABLE_CHECKING
1711 verify_flow_info ();
1712 #endif
1714 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1716 edge e;
1717 edge_iterator ei;
1719 FOR_EACH_EDGE (e, ei, bb->succs)
1720 if (e->insns.r)
1721 commit_one_edge_insertion (e);
1726 /* Print out RTL-specific basic block information (live information
1727 at start and end). */
1729 static void
1730 rtl_dump_bb (basic_block bb, FILE *outf, int indent, int flags ATTRIBUTE_UNUSED)
1732 rtx insn;
1733 rtx last;
1734 char *s_indent;
1736 s_indent = (char *) alloca ((size_t) indent + 1);
1737 memset (s_indent, ' ', (size_t) indent);
1738 s_indent[indent] = '\0';
1740 if (df)
1742 df_dump_top (bb, outf);
1743 putc ('\n', outf);
1746 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
1747 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1748 insn = NEXT_INSN (insn))
1749 print_rtl_single (outf, insn);
1751 if (df)
1753 df_dump_bottom (bb, outf);
1754 putc ('\n', outf);
1759 /* Like print_rtl, but also print out live information for the start of each
1760 basic block. */
1762 void
1763 print_rtl_with_bb (FILE *outf, const_rtx rtx_first)
1765 const_rtx tmp_rtx;
1766 if (rtx_first == 0)
1767 fprintf (outf, "(nil)\n");
1768 else
1770 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1771 int max_uid = get_max_uid ();
1772 basic_block *start = XCNEWVEC (basic_block, max_uid);
1773 basic_block *end = XCNEWVEC (basic_block, max_uid);
1774 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
1776 basic_block bb;
1778 if (df)
1779 df_dump_start (outf);
1781 FOR_EACH_BB_REVERSE (bb)
1783 rtx x;
1785 start[INSN_UID (BB_HEAD (bb))] = bb;
1786 end[INSN_UID (BB_END (bb))] = bb;
1787 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1789 enum bb_state state = IN_MULTIPLE_BB;
1791 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1792 state = IN_ONE_BB;
1793 in_bb_p[INSN_UID (x)] = state;
1795 if (x == BB_END (bb))
1796 break;
1800 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1802 int did_output;
1804 bb = start[INSN_UID (tmp_rtx)];
1805 if (bb != NULL)
1806 dump_bb_info (bb, true, false, dump_flags, ";; ", outf);
1808 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1809 && !NOTE_P (tmp_rtx)
1810 && !BARRIER_P (tmp_rtx))
1811 fprintf (outf, ";; Insn is not within a basic block\n");
1812 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1813 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1815 did_output = print_rtl_single (outf, tmp_rtx);
1817 bb = end[INSN_UID (tmp_rtx)];
1818 if (bb != NULL)
1819 dump_bb_info (bb, false, true, dump_flags, ";; ", outf);
1820 if (did_output)
1821 putc ('\n', outf);
1824 free (start);
1825 free (end);
1826 free (in_bb_p);
1829 if (crtl->epilogue_delay_list != 0)
1831 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1832 for (tmp_rtx = crtl->epilogue_delay_list; tmp_rtx != 0;
1833 tmp_rtx = XEXP (tmp_rtx, 1))
1834 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1838 void
1839 update_br_prob_note (basic_block bb)
1841 rtx note;
1842 if (!JUMP_P (BB_END (bb)))
1843 return;
1844 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
1845 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1846 return;
1847 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1850 /* Get the last insn associated with block BB (that includes barriers and
1851 tablejumps after BB). */
1853 get_last_bb_insn (basic_block bb)
1855 rtx tmp;
1856 rtx end = BB_END (bb);
1858 /* Include any jump table following the basic block. */
1859 if (tablejump_p (end, NULL, &tmp))
1860 end = tmp;
1862 /* Include any barriers that may follow the basic block. */
1863 tmp = next_nonnote_insn_bb (end);
1864 while (tmp && BARRIER_P (tmp))
1866 end = tmp;
1867 tmp = next_nonnote_insn_bb (end);
1870 return end;
1873 /* Verify the CFG and RTL consistency common for both underlying RTL and
1874 cfglayout RTL.
1876 Currently it does following checks:
1878 - overlapping of basic blocks
1879 - insns with wrong BLOCK_FOR_INSN pointers
1880 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1881 - tails of basic blocks (ensure that boundary is necessary)
1882 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1883 and NOTE_INSN_BASIC_BLOCK
1884 - verify that no fall_thru edge crosses hot/cold partition boundaries
1885 - verify that there are no pending RTL branch predictions
1887 In future it can be extended check a lot of other stuff as well
1888 (reachability of basic blocks, life information, etc. etc.). */
1890 static int
1891 rtl_verify_flow_info_1 (void)
1893 rtx x;
1894 int err = 0;
1895 basic_block bb;
1897 /* Check the general integrity of the basic blocks. */
1898 FOR_EACH_BB_REVERSE (bb)
1900 rtx insn;
1902 if (!(bb->flags & BB_RTL))
1904 error ("BB_RTL flag not set for block %d", bb->index);
1905 err = 1;
1908 FOR_BB_INSNS (bb, insn)
1909 if (BLOCK_FOR_INSN (insn) != bb)
1911 error ("insn %d basic block pointer is %d, should be %d",
1912 INSN_UID (insn),
1913 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
1914 bb->index);
1915 err = 1;
1918 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
1919 if (!BARRIER_P (insn)
1920 && BLOCK_FOR_INSN (insn) != NULL)
1922 error ("insn %d in header of bb %d has non-NULL basic block",
1923 INSN_UID (insn), bb->index);
1924 err = 1;
1926 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
1927 if (!BARRIER_P (insn)
1928 && BLOCK_FOR_INSN (insn) != NULL)
1930 error ("insn %d in footer of bb %d has non-NULL basic block",
1931 INSN_UID (insn), bb->index);
1932 err = 1;
1936 /* Now check the basic blocks (boundaries etc.) */
1937 FOR_EACH_BB_REVERSE (bb)
1939 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1940 edge e, fallthru = NULL;
1941 rtx note;
1942 edge_iterator ei;
1944 if (JUMP_P (BB_END (bb))
1945 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
1946 && EDGE_COUNT (bb->succs) >= 2
1947 && any_condjump_p (BB_END (bb)))
1949 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
1950 && profile_status != PROFILE_ABSENT)
1952 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
1953 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1954 err = 1;
1957 FOR_EACH_EDGE (e, ei, bb->succs)
1959 bool is_crossing;
1961 if (e->flags & EDGE_FALLTHRU)
1962 n_fallthru++, fallthru = e;
1964 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
1965 && e->src != ENTRY_BLOCK_PTR
1966 && e->dest != EXIT_BLOCK_PTR);
1967 if (e->flags & EDGE_CROSSING)
1969 if (!is_crossing)
1971 error ("EDGE_CROSSING incorrectly set across same section");
1972 err = 1;
1974 if (e->flags & EDGE_FALLTHRU)
1976 error ("fallthru edge crosses section boundary (bb %i)",
1977 e->src->index);
1978 err = 1;
1980 if (e->flags & EDGE_EH)
1982 error ("EH edge crosses section boundary (bb %i)",
1983 e->src->index);
1984 err = 1;
1987 else if (is_crossing)
1989 error ("EDGE_CROSSING missing across section boundary");
1990 err = 1;
1993 if ((e->flags & ~(EDGE_DFS_BACK
1994 | EDGE_CAN_FALLTHRU
1995 | EDGE_IRREDUCIBLE_LOOP
1996 | EDGE_LOOP_EXIT
1997 | EDGE_CROSSING
1998 | EDGE_PRESERVE)) == 0)
1999 n_branch++;
2001 if (e->flags & EDGE_ABNORMAL_CALL)
2002 n_call++;
2004 if (e->flags & EDGE_EH)
2005 n_eh++;
2006 else if (e->flags & EDGE_ABNORMAL)
2007 n_abnormal++;
2010 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
2012 error ("missing REG_EH_REGION note in the end of bb %i", bb->index);
2013 err = 1;
2015 if (n_eh > 1)
2017 error ("too many eh edges %i", bb->index);
2018 err = 1;
2020 if (n_branch
2021 && (!JUMP_P (BB_END (bb))
2022 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2023 || any_condjump_p (BB_END (bb))))))
2025 error ("too many outgoing branch edges from bb %i", bb->index);
2026 err = 1;
2028 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
2030 error ("fallthru edge after unconditional jump %i", bb->index);
2031 err = 1;
2033 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
2035 error ("wrong number of branch edges after unconditional jump %i",
2036 bb->index);
2037 err = 1;
2039 if (n_branch != 1 && any_condjump_p (BB_END (bb))
2040 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
2042 error ("wrong amount of branch edges after conditional jump %i",
2043 bb->index);
2044 err = 1;
2046 if (n_call && !CALL_P (BB_END (bb)))
2048 error ("call edges for non-call insn in bb %i", bb->index);
2049 err = 1;
2051 if (n_abnormal
2052 && (!CALL_P (BB_END (bb)) && n_call != n_abnormal)
2053 && (!JUMP_P (BB_END (bb))
2054 || any_condjump_p (BB_END (bb))
2055 || any_uncondjump_p (BB_END (bb))))
2057 error ("abnormal edges for no purpose in bb %i", bb->index);
2058 err = 1;
2061 for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
2062 /* We may have a barrier inside a basic block before dead code
2063 elimination. There is no BLOCK_FOR_INSN field in a barrier. */
2064 if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
2066 debug_rtx (x);
2067 if (! BLOCK_FOR_INSN (x))
2068 error
2069 ("insn %d inside basic block %d but block_for_insn is NULL",
2070 INSN_UID (x), bb->index);
2071 else
2072 error
2073 ("insn %d inside basic block %d but block_for_insn is %i",
2074 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
2076 err = 1;
2079 /* OK pointers are correct. Now check the header of basic
2080 block. It ought to contain optional CODE_LABEL followed
2081 by NOTE_BASIC_BLOCK. */
2082 x = BB_HEAD (bb);
2083 if (LABEL_P (x))
2085 if (BB_END (bb) == x)
2087 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2088 bb->index);
2089 err = 1;
2092 x = NEXT_INSN (x);
2095 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2097 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2098 bb->index);
2099 err = 1;
2102 if (BB_END (bb) == x)
2103 /* Do checks for empty blocks here. */
2105 else
2106 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2108 if (NOTE_INSN_BASIC_BLOCK_P (x))
2110 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2111 INSN_UID (x), bb->index);
2112 err = 1;
2115 if (x == BB_END (bb))
2116 break;
2118 if (control_flow_insn_p (x))
2120 error ("in basic block %d:", bb->index);
2121 fatal_insn ("flow control insn inside a basic block", x);
2126 /* Clean up. */
2127 return err;
2130 /* Verify the CFG and RTL consistency common for both underlying RTL and
2131 cfglayout RTL.
2133 Currently it does following checks:
2134 - all checks of rtl_verify_flow_info_1
2135 - test head/end pointers
2136 - check that all insns are in the basic blocks
2137 (except the switch handling code, barriers and notes)
2138 - check that all returns are followed by barriers
2139 - check that all fallthru edge points to the adjacent blocks. */
2141 static int
2142 rtl_verify_flow_info (void)
2144 basic_block bb;
2145 int err = rtl_verify_flow_info_1 ();
2146 rtx x;
2147 rtx last_head = get_last_insn ();
2148 basic_block *bb_info;
2149 int num_bb_notes;
2150 const rtx rtx_first = get_insns ();
2151 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
2152 const int max_uid = get_max_uid ();
2154 bb_info = XCNEWVEC (basic_block, max_uid);
2156 FOR_EACH_BB_REVERSE (bb)
2158 edge e;
2159 rtx head = BB_HEAD (bb);
2160 rtx end = BB_END (bb);
2162 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2164 /* Verify the end of the basic block is in the INSN chain. */
2165 if (x == end)
2166 break;
2168 /* And that the code outside of basic blocks has NULL bb field. */
2169 if (!BARRIER_P (x)
2170 && BLOCK_FOR_INSN (x) != NULL)
2172 error ("insn %d outside of basic blocks has non-NULL bb field",
2173 INSN_UID (x));
2174 err = 1;
2178 if (!x)
2180 error ("end insn %d for block %d not found in the insn stream",
2181 INSN_UID (end), bb->index);
2182 err = 1;
2185 /* Work backwards from the end to the head of the basic block
2186 to verify the head is in the RTL chain. */
2187 for (; x != NULL_RTX; x = PREV_INSN (x))
2189 /* While walking over the insn chain, verify insns appear
2190 in only one basic block. */
2191 if (bb_info[INSN_UID (x)] != NULL)
2193 error ("insn %d is in multiple basic blocks (%d and %d)",
2194 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2195 err = 1;
2198 bb_info[INSN_UID (x)] = bb;
2200 if (x == head)
2201 break;
2203 if (!x)
2205 error ("head insn %d for block %d not found in the insn stream",
2206 INSN_UID (head), bb->index);
2207 err = 1;
2210 last_head = PREV_INSN (x);
2212 e = find_fallthru_edge (bb->succs);
2213 if (!e)
2215 rtx insn;
2217 /* Ensure existence of barrier in BB with no fallthru edges. */
2218 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2220 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
2222 error ("missing barrier after block %i", bb->index);
2223 err = 1;
2224 break;
2226 if (BARRIER_P (insn))
2227 break;
2230 else if (e->src != ENTRY_BLOCK_PTR
2231 && e->dest != EXIT_BLOCK_PTR)
2233 rtx insn;
2235 if (e->src->next_bb != e->dest)
2237 error
2238 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2239 e->src->index, e->dest->index);
2240 err = 1;
2242 else
2243 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2244 insn = NEXT_INSN (insn))
2245 if (BARRIER_P (insn) || INSN_P (insn))
2247 error ("verify_flow_info: Incorrect fallthru %i->%i",
2248 e->src->index, e->dest->index);
2249 fatal_insn ("wrong insn in the fallthru edge", insn);
2250 err = 1;
2255 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2257 /* Check that the code before the first basic block has NULL
2258 bb field. */
2259 if (!BARRIER_P (x)
2260 && BLOCK_FOR_INSN (x) != NULL)
2262 error ("insn %d outside of basic blocks has non-NULL bb field",
2263 INSN_UID (x));
2264 err = 1;
2267 free (bb_info);
2269 num_bb_notes = 0;
2270 last_bb_seen = ENTRY_BLOCK_PTR;
2272 for (x = rtx_first; x; x = NEXT_INSN (x))
2274 if (NOTE_INSN_BASIC_BLOCK_P (x))
2276 bb = NOTE_BASIC_BLOCK (x);
2278 num_bb_notes++;
2279 if (bb != last_bb_seen->next_bb)
2280 internal_error ("basic blocks not laid down consecutively");
2282 curr_bb = last_bb_seen = bb;
2285 if (!curr_bb)
2287 switch (GET_CODE (x))
2289 case BARRIER:
2290 case NOTE:
2291 break;
2293 case CODE_LABEL:
2294 /* An addr_vec is placed outside any basic block. */
2295 if (NEXT_INSN (x)
2296 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
2297 x = NEXT_INSN (x);
2299 /* But in any case, non-deletable labels can appear anywhere. */
2300 break;
2302 default:
2303 fatal_insn ("insn outside basic block", x);
2307 if (JUMP_P (x)
2308 && returnjump_p (x) && ! condjump_p (x)
2309 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
2310 fatal_insn ("return not followed by barrier", x);
2311 if (curr_bb && x == BB_END (curr_bb))
2312 curr_bb = NULL;
2315 if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
2316 internal_error
2317 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2318 num_bb_notes, n_basic_blocks);
2320 return err;
2323 /* Assume that the preceding pass has possibly eliminated jump instructions
2324 or converted the unconditional jumps. Eliminate the edges from CFG.
2325 Return true if any edges are eliminated. */
2327 bool
2328 purge_dead_edges (basic_block bb)
2330 edge e;
2331 rtx insn = BB_END (bb), note;
2332 bool purged = false;
2333 bool found;
2334 edge_iterator ei;
2336 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
2338 insn = PREV_INSN (insn);
2339 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
2341 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2342 if (NONJUMP_INSN_P (insn)
2343 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2345 rtx eqnote;
2347 if (! may_trap_p (PATTERN (insn))
2348 || ((eqnote = find_reg_equal_equiv_note (insn))
2349 && ! may_trap_p (XEXP (eqnote, 0))))
2350 remove_note (insn, note);
2353 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2354 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2356 bool remove = false;
2358 /* There are three types of edges we need to handle correctly here: EH
2359 edges, abnormal call EH edges, and abnormal call non-EH edges. The
2360 latter can appear when nonlocal gotos are used. */
2361 if (e->flags & EDGE_ABNORMAL_CALL)
2363 if (!CALL_P (insn))
2364 remove = true;
2365 else if (can_nonlocal_goto (insn))
2367 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2369 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
2371 else
2372 remove = true;
2374 else if (e->flags & EDGE_EH)
2375 remove = !can_throw_internal (insn);
2377 if (remove)
2379 remove_edge (e);
2380 df_set_bb_dirty (bb);
2381 purged = true;
2383 else
2384 ei_next (&ei);
2387 if (JUMP_P (insn))
2389 rtx note;
2390 edge b,f;
2391 edge_iterator ei;
2393 /* We do care only about conditional jumps and simplejumps. */
2394 if (!any_condjump_p (insn)
2395 && !returnjump_p (insn)
2396 && !simplejump_p (insn))
2397 return purged;
2399 /* Branch probability/prediction notes are defined only for
2400 condjumps. We've possibly turned condjump into simplejump. */
2401 if (simplejump_p (insn))
2403 note = find_reg_note (insn, REG_BR_PROB, NULL);
2404 if (note)
2405 remove_note (insn, note);
2406 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2407 remove_note (insn, note);
2410 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2412 /* Avoid abnormal flags to leak from computed jumps turned
2413 into simplejumps. */
2415 e->flags &= ~EDGE_ABNORMAL;
2417 /* See if this edge is one we should keep. */
2418 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2419 /* A conditional jump can fall through into the next
2420 block, so we should keep the edge. */
2422 ei_next (&ei);
2423 continue;
2425 else if (e->dest != EXIT_BLOCK_PTR
2426 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
2427 /* If the destination block is the target of the jump,
2428 keep the edge. */
2430 ei_next (&ei);
2431 continue;
2433 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2434 /* If the destination block is the exit block, and this
2435 instruction is a return, then keep the edge. */
2437 ei_next (&ei);
2438 continue;
2440 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2441 /* Keep the edges that correspond to exceptions thrown by
2442 this instruction and rematerialize the EDGE_ABNORMAL
2443 flag we just cleared above. */
2445 e->flags |= EDGE_ABNORMAL;
2446 ei_next (&ei);
2447 continue;
2450 /* We do not need this edge. */
2451 df_set_bb_dirty (bb);
2452 purged = true;
2453 remove_edge (e);
2456 if (EDGE_COUNT (bb->succs) == 0 || !purged)
2457 return purged;
2459 if (dump_file)
2460 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
2462 if (!optimize)
2463 return purged;
2465 /* Redistribute probabilities. */
2466 if (single_succ_p (bb))
2468 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2469 single_succ_edge (bb)->count = bb->count;
2471 else
2473 note = find_reg_note (insn, REG_BR_PROB, NULL);
2474 if (!note)
2475 return purged;
2477 b = BRANCH_EDGE (bb);
2478 f = FALLTHRU_EDGE (bb);
2479 b->probability = INTVAL (XEXP (note, 0));
2480 f->probability = REG_BR_PROB_BASE - b->probability;
2481 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2482 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2485 return purged;
2487 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
2489 /* First, there should not be any EH or ABCALL edges resulting
2490 from non-local gotos and the like. If there were, we shouldn't
2491 have created the sibcall in the first place. Second, there
2492 should of course never have been a fallthru edge. */
2493 gcc_assert (single_succ_p (bb));
2494 gcc_assert (single_succ_edge (bb)->flags
2495 == (EDGE_SIBCALL | EDGE_ABNORMAL));
2497 return 0;
2500 /* If we don't see a jump insn, we don't know exactly why the block would
2501 have been broken at this point. Look for a simple, non-fallthru edge,
2502 as these are only created by conditional branches. If we find such an
2503 edge we know that there used to be a jump here and can then safely
2504 remove all non-fallthru edges. */
2505 found = false;
2506 FOR_EACH_EDGE (e, ei, bb->succs)
2507 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
2509 found = true;
2510 break;
2513 if (!found)
2514 return purged;
2516 /* Remove all but the fake and fallthru edges. The fake edge may be
2517 the only successor for this block in the case of noreturn
2518 calls. */
2519 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2521 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
2523 df_set_bb_dirty (bb);
2524 remove_edge (e);
2525 purged = true;
2527 else
2528 ei_next (&ei);
2531 gcc_assert (single_succ_p (bb));
2533 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2534 single_succ_edge (bb)->count = bb->count;
2536 if (dump_file)
2537 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
2538 bb->index);
2539 return purged;
2542 /* Search all basic blocks for potentially dead edges and purge them. Return
2543 true if some edge has been eliminated. */
2545 bool
2546 purge_all_dead_edges (void)
2548 int purged = false;
2549 basic_block bb;
2551 FOR_EACH_BB (bb)
2553 bool purged_here = purge_dead_edges (bb);
2555 purged |= purged_here;
2558 return purged;
2561 /* This is used by a few passes that emit some instructions after abnormal
2562 calls, moving the basic block's end, while they in fact do want to emit
2563 them on the fallthru edge. Look for abnormal call edges, find backward
2564 the call in the block and insert the instructions on the edge instead.
2566 Similarly, handle instructions throwing exceptions internally.
2568 Return true when instructions have been found and inserted on edges. */
2570 bool
2571 fixup_abnormal_edges (void)
2573 bool inserted = false;
2574 basic_block bb;
2576 FOR_EACH_BB (bb)
2578 edge e;
2579 edge_iterator ei;
2581 /* Look for cases we are interested in - calls or instructions causing
2582 exceptions. */
2583 FOR_EACH_EDGE (e, ei, bb->succs)
2584 if ((e->flags & EDGE_ABNORMAL_CALL)
2585 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
2586 == (EDGE_ABNORMAL | EDGE_EH)))
2587 break;
2589 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
2591 rtx insn;
2593 /* Get past the new insns generated. Allow notes, as the insns
2594 may be already deleted. */
2595 insn = BB_END (bb);
2596 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
2597 && !can_throw_internal (insn)
2598 && insn != BB_HEAD (bb))
2599 insn = PREV_INSN (insn);
2601 if (CALL_P (insn) || can_throw_internal (insn))
2603 rtx stop, next;
2605 e = find_fallthru_edge (bb->succs);
2607 stop = NEXT_INSN (BB_END (bb));
2608 BB_END (bb) = insn;
2610 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
2612 next = NEXT_INSN (insn);
2613 if (INSN_P (insn))
2615 delete_insn (insn);
2617 /* Sometimes there's still the return value USE.
2618 If it's placed after a trapping call (i.e. that
2619 call is the last insn anyway), we have no fallthru
2620 edge. Simply delete this use and don't try to insert
2621 on the non-existent edge. */
2622 if (GET_CODE (PATTERN (insn)) != USE)
2624 /* We're not deleting it, we're moving it. */
2625 INSN_DELETED_P (insn) = 0;
2626 PREV_INSN (insn) = NULL_RTX;
2627 NEXT_INSN (insn) = NULL_RTX;
2629 insert_insn_on_edge (insn, e);
2630 inserted = true;
2633 else if (!BARRIER_P (insn))
2634 set_block_for_insn (insn, NULL);
2638 /* It may be that we don't find any trapping insn. In this
2639 case we discovered quite late that the insn that had been
2640 marked as can_throw_internal in fact couldn't trap at all.
2641 So we should in fact delete the EH edges out of the block. */
2642 else
2643 purge_dead_edges (bb);
2647 return inserted;
2650 /* Same as split_block but update cfg_layout structures. */
2652 static basic_block
2653 cfg_layout_split_block (basic_block bb, void *insnp)
2655 rtx insn = (rtx) insnp;
2656 basic_block new_bb = rtl_split_block (bb, insn);
2658 BB_FOOTER (new_bb) = BB_FOOTER (bb);
2659 BB_FOOTER (bb) = NULL;
2661 return new_bb;
2664 /* Redirect Edge to DEST. */
2665 static edge
2666 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
2668 basic_block src = e->src;
2669 edge ret;
2671 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
2672 return NULL;
2674 if (e->dest == dest)
2675 return e;
2677 if (e->src != ENTRY_BLOCK_PTR
2678 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
2680 df_set_bb_dirty (src);
2681 return ret;
2684 if (e->src == ENTRY_BLOCK_PTR
2685 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
2687 if (dump_file)
2688 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
2689 e->src->index, dest->index);
2691 df_set_bb_dirty (e->src);
2692 redirect_edge_succ (e, dest);
2693 return e;
2696 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
2697 in the case the basic block appears to be in sequence. Avoid this
2698 transformation. */
2700 if (e->flags & EDGE_FALLTHRU)
2702 /* Redirect any branch edges unified with the fallthru one. */
2703 if (JUMP_P (BB_END (src))
2704 && label_is_jump_target_p (BB_HEAD (e->dest),
2705 BB_END (src)))
2707 edge redirected;
2709 if (dump_file)
2710 fprintf (dump_file, "Fallthru edge unified with branch "
2711 "%i->%i redirected to %i\n",
2712 e->src->index, e->dest->index, dest->index);
2713 e->flags &= ~EDGE_FALLTHRU;
2714 redirected = redirect_branch_edge (e, dest);
2715 gcc_assert (redirected);
2716 redirected->flags |= EDGE_FALLTHRU;
2717 df_set_bb_dirty (redirected->src);
2718 return redirected;
2720 /* In case we are redirecting fallthru edge to the branch edge
2721 of conditional jump, remove it. */
2722 if (EDGE_COUNT (src->succs) == 2)
2724 /* Find the edge that is different from E. */
2725 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
2727 if (s->dest == dest
2728 && any_condjump_p (BB_END (src))
2729 && onlyjump_p (BB_END (src)))
2730 delete_insn (BB_END (src));
2732 if (dump_file)
2733 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
2734 e->src->index, e->dest->index, dest->index);
2735 ret = redirect_edge_succ_nodup (e, dest);
2737 else
2738 ret = redirect_branch_edge (e, dest);
2740 /* We don't want simplejumps in the insn stream during cfglayout. */
2741 gcc_assert (!simplejump_p (BB_END (src)));
2743 df_set_bb_dirty (src);
2744 return ret;
2747 /* Simple wrapper as we always can redirect fallthru edges. */
2748 static basic_block
2749 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
2751 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
2753 gcc_assert (redirected);
2754 return NULL;
2757 /* Same as delete_basic_block but update cfg_layout structures. */
2759 static void
2760 cfg_layout_delete_block (basic_block bb)
2762 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
2764 if (BB_HEADER (bb))
2766 next = BB_HEAD (bb);
2767 if (prev)
2768 NEXT_INSN (prev) = BB_HEADER (bb);
2769 else
2770 set_first_insn (BB_HEADER (bb));
2771 PREV_INSN (BB_HEADER (bb)) = prev;
2772 insn = BB_HEADER (bb);
2773 while (NEXT_INSN (insn))
2774 insn = NEXT_INSN (insn);
2775 NEXT_INSN (insn) = next;
2776 PREV_INSN (next) = insn;
2778 next = NEXT_INSN (BB_END (bb));
2779 if (BB_FOOTER (bb))
2781 insn = BB_FOOTER (bb);
2782 while (insn)
2784 if (BARRIER_P (insn))
2786 if (PREV_INSN (insn))
2787 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2788 else
2789 BB_FOOTER (bb) = NEXT_INSN (insn);
2790 if (NEXT_INSN (insn))
2791 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2793 if (LABEL_P (insn))
2794 break;
2795 insn = NEXT_INSN (insn);
2797 if (BB_FOOTER (bb))
2799 insn = BB_END (bb);
2800 NEXT_INSN (insn) = BB_FOOTER (bb);
2801 PREV_INSN (BB_FOOTER (bb)) = insn;
2802 while (NEXT_INSN (insn))
2803 insn = NEXT_INSN (insn);
2804 NEXT_INSN (insn) = next;
2805 if (next)
2806 PREV_INSN (next) = insn;
2807 else
2808 set_last_insn (insn);
2811 if (bb->next_bb != EXIT_BLOCK_PTR)
2812 to = &BB_HEADER (bb->next_bb);
2813 else
2814 to = &cfg_layout_function_footer;
2816 rtl_delete_block (bb);
2818 if (prev)
2819 prev = NEXT_INSN (prev);
2820 else
2821 prev = get_insns ();
2822 if (next)
2823 next = PREV_INSN (next);
2824 else
2825 next = get_last_insn ();
2827 if (next && NEXT_INSN (next) != prev)
2829 remaints = unlink_insn_chain (prev, next);
2830 insn = remaints;
2831 while (NEXT_INSN (insn))
2832 insn = NEXT_INSN (insn);
2833 NEXT_INSN (insn) = *to;
2834 if (*to)
2835 PREV_INSN (*to) = insn;
2836 *to = remaints;
2840 /* Return true when blocks A and B can be safely merged. */
2842 static bool
2843 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
2845 /* If we are partitioning hot/cold basic blocks, we don't want to
2846 mess up unconditional or indirect jumps that cross between hot
2847 and cold sections.
2849 Basic block partitioning may result in some jumps that appear to
2850 be optimizable (or blocks that appear to be mergeable), but which really
2851 must be left untouched (they are required to make it safely across
2852 partition boundaries). See the comments at the top of
2853 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
2855 if (BB_PARTITION (a) != BB_PARTITION (b))
2856 return false;
2858 /* Protect the loop latches. */
2859 if (current_loops && b->loop_father->latch == b)
2860 return false;
2862 /* If we would end up moving B's instructions, make sure it doesn't fall
2863 through into the exit block, since we cannot recover from a fallthrough
2864 edge into the exit block occurring in the middle of a function. */
2865 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
2867 edge e = find_fallthru_edge (b->succs);
2868 if (e && e->dest == EXIT_BLOCK_PTR)
2869 return false;
2872 /* There must be exactly one edge in between the blocks. */
2873 return (single_succ_p (a)
2874 && single_succ (a) == b
2875 && single_pred_p (b) == 1
2876 && a != b
2877 /* Must be simple edge. */
2878 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
2879 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
2880 /* If the jump insn has side effects, we can't kill the edge.
2881 When not optimizing, try_redirect_by_replacing_jump will
2882 not allow us to redirect an edge by replacing a table jump. */
2883 && (!JUMP_P (BB_END (a))
2884 || ((!optimize || reload_completed)
2885 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
2888 /* Merge block A and B. The blocks must be mergeable. */
2890 static void
2891 cfg_layout_merge_blocks (basic_block a, basic_block b)
2893 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
2894 rtx insn;
2896 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
2898 if (dump_file)
2899 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
2900 a->index);
2902 /* If there was a CODE_LABEL beginning B, delete it. */
2903 if (LABEL_P (BB_HEAD (b)))
2905 delete_insn (BB_HEAD (b));
2908 /* We should have fallthru edge in a, or we can do dummy redirection to get
2909 it cleaned up. */
2910 if (JUMP_P (BB_END (a)))
2911 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
2912 gcc_assert (!JUMP_P (BB_END (a)));
2914 /* When not optimizing CFG and the edge is the only place in RTL which holds
2915 some unique locus, emit a nop with that locus in between. */
2916 if (!optimize)
2917 emit_nop_for_unique_locus_between (a, b);
2919 /* Possible line number notes should appear in between. */
2920 if (BB_HEADER (b))
2922 rtx first = BB_END (a), last;
2924 last = emit_insn_after_noloc (BB_HEADER (b), BB_END (a), a);
2925 /* The above might add a BARRIER as BB_END, but as barriers
2926 aren't valid parts of a bb, remove_insn doesn't update
2927 BB_END if it is a barrier. So adjust BB_END here. */
2928 while (BB_END (a) != first && BARRIER_P (BB_END (a)))
2929 BB_END (a) = PREV_INSN (BB_END (a));
2930 delete_insn_chain (NEXT_INSN (first), last, false);
2931 BB_HEADER (b) = NULL;
2934 /* In the case basic blocks are not adjacent, move them around. */
2935 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
2937 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
2939 emit_insn_after_noloc (insn, BB_END (a), a);
2941 /* Otherwise just re-associate the instructions. */
2942 else
2944 insn = BB_HEAD (b);
2945 BB_END (a) = BB_END (b);
2948 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
2949 We need to explicitly call. */
2950 update_bb_for_insn_chain (insn, BB_END (b), a);
2952 /* Skip possible DELETED_LABEL insn. */
2953 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
2954 insn = NEXT_INSN (insn);
2955 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
2956 BB_HEAD (b) = NULL;
2957 delete_insn (insn);
2959 df_bb_delete (b->index);
2961 /* Possible tablejumps and barriers should appear after the block. */
2962 if (BB_FOOTER (b))
2964 if (!BB_FOOTER (a))
2965 BB_FOOTER (a) = BB_FOOTER (b);
2966 else
2968 rtx last = BB_FOOTER (a);
2970 while (NEXT_INSN (last))
2971 last = NEXT_INSN (last);
2972 NEXT_INSN (last) = BB_FOOTER (b);
2973 PREV_INSN (BB_FOOTER (b)) = last;
2975 BB_FOOTER (b) = NULL;
2978 /* If B was a forwarder block, propagate the locus on the edge. */
2979 if (forwarder_p && !EDGE_SUCC (b, 0)->goto_locus)
2980 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
2982 if (dump_file)
2983 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
2986 /* Split edge E. */
2988 static basic_block
2989 cfg_layout_split_edge (edge e)
2991 basic_block new_bb =
2992 create_basic_block (e->src != ENTRY_BLOCK_PTR
2993 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
2994 NULL_RTX, e->src);
2996 if (e->dest == EXIT_BLOCK_PTR)
2997 BB_COPY_PARTITION (new_bb, e->src);
2998 else
2999 BB_COPY_PARTITION (new_bb, e->dest);
3000 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
3001 redirect_edge_and_branch_force (e, new_bb);
3003 return new_bb;
3006 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
3008 static void
3009 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
3013 /* Return 1 if BB ends with a call, possibly followed by some
3014 instructions that must stay with the call, 0 otherwise. */
3016 static bool
3017 rtl_block_ends_with_call_p (basic_block bb)
3019 rtx insn = BB_END (bb);
3021 while (!CALL_P (insn)
3022 && insn != BB_HEAD (bb)
3023 && (keep_with_call_p (insn)
3024 || NOTE_P (insn)
3025 || DEBUG_INSN_P (insn)))
3026 insn = PREV_INSN (insn);
3027 return (CALL_P (insn));
3030 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
3032 static bool
3033 rtl_block_ends_with_condjump_p (const_basic_block bb)
3035 return any_condjump_p (BB_END (bb));
3038 /* Return true if we need to add fake edge to exit.
3039 Helper function for rtl_flow_call_edges_add. */
3041 static bool
3042 need_fake_edge_p (const_rtx insn)
3044 if (!INSN_P (insn))
3045 return false;
3047 if ((CALL_P (insn)
3048 && !SIBLING_CALL_P (insn)
3049 && !find_reg_note (insn, REG_NORETURN, NULL)
3050 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
3051 return true;
3053 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
3054 && MEM_VOLATILE_P (PATTERN (insn)))
3055 || (GET_CODE (PATTERN (insn)) == PARALLEL
3056 && asm_noperands (insn) != -1
3057 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
3058 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3061 /* Add fake edges to the function exit for any non constant and non noreturn
3062 calls, volatile inline assembly in the bitmap of blocks specified by
3063 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
3064 that were split.
3066 The goal is to expose cases in which entering a basic block does not imply
3067 that all subsequent instructions must be executed. */
3069 static int
3070 rtl_flow_call_edges_add (sbitmap blocks)
3072 int i;
3073 int blocks_split = 0;
3074 int last_bb = last_basic_block;
3075 bool check_last_block = false;
3077 if (n_basic_blocks == NUM_FIXED_BLOCKS)
3078 return 0;
3080 if (! blocks)
3081 check_last_block = true;
3082 else
3083 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
3085 /* In the last basic block, before epilogue generation, there will be
3086 a fallthru edge to EXIT. Special care is required if the last insn
3087 of the last basic block is a call because make_edge folds duplicate
3088 edges, which would result in the fallthru edge also being marked
3089 fake, which would result in the fallthru edge being removed by
3090 remove_fake_edges, which would result in an invalid CFG.
3092 Moreover, we can't elide the outgoing fake edge, since the block
3093 profiler needs to take this into account in order to solve the minimal
3094 spanning tree in the case that the call doesn't return.
3096 Handle this by adding a dummy instruction in a new last basic block. */
3097 if (check_last_block)
3099 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
3100 rtx insn = BB_END (bb);
3102 /* Back up past insns that must be kept in the same block as a call. */
3103 while (insn != BB_HEAD (bb)
3104 && keep_with_call_p (insn))
3105 insn = PREV_INSN (insn);
3107 if (need_fake_edge_p (insn))
3109 edge e;
3111 e = find_edge (bb, EXIT_BLOCK_PTR);
3112 if (e)
3114 insert_insn_on_edge (gen_use (const0_rtx), e);
3115 commit_edge_insertions ();
3120 /* Now add fake edges to the function exit for any non constant
3121 calls since there is no way that we can determine if they will
3122 return or not... */
3124 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
3126 basic_block bb = BASIC_BLOCK (i);
3127 rtx insn;
3128 rtx prev_insn;
3130 if (!bb)
3131 continue;
3133 if (blocks && !TEST_BIT (blocks, i))
3134 continue;
3136 for (insn = BB_END (bb); ; insn = prev_insn)
3138 prev_insn = PREV_INSN (insn);
3139 if (need_fake_edge_p (insn))
3141 edge e;
3142 rtx split_at_insn = insn;
3144 /* Don't split the block between a call and an insn that should
3145 remain in the same block as the call. */
3146 if (CALL_P (insn))
3147 while (split_at_insn != BB_END (bb)
3148 && keep_with_call_p (NEXT_INSN (split_at_insn)))
3149 split_at_insn = NEXT_INSN (split_at_insn);
3151 /* The handling above of the final block before the epilogue
3152 should be enough to verify that there is no edge to the exit
3153 block in CFG already. Calling make_edge in such case would
3154 cause us to mark that edge as fake and remove it later. */
3156 #ifdef ENABLE_CHECKING
3157 if (split_at_insn == BB_END (bb))
3159 e = find_edge (bb, EXIT_BLOCK_PTR);
3160 gcc_assert (e == NULL);
3162 #endif
3164 /* Note that the following may create a new basic block
3165 and renumber the existing basic blocks. */
3166 if (split_at_insn != BB_END (bb))
3168 e = split_block (bb, split_at_insn);
3169 if (e)
3170 blocks_split++;
3173 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
3176 if (insn == BB_HEAD (bb))
3177 break;
3181 if (blocks_split)
3182 verify_flow_info ();
3184 return blocks_split;
3187 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
3188 the conditional branch target, SECOND_HEAD should be the fall-thru
3189 there is no need to handle this here the loop versioning code handles
3190 this. the reason for SECON_HEAD is that it is needed for condition
3191 in trees, and this should be of the same type since it is a hook. */
3192 static void
3193 rtl_lv_add_condition_to_bb (basic_block first_head ,
3194 basic_block second_head ATTRIBUTE_UNUSED,
3195 basic_block cond_bb, void *comp_rtx)
3197 rtx label, seq, jump;
3198 rtx op0 = XEXP ((rtx)comp_rtx, 0);
3199 rtx op1 = XEXP ((rtx)comp_rtx, 1);
3200 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
3201 enum machine_mode mode;
3204 label = block_label (first_head);
3205 mode = GET_MODE (op0);
3206 if (mode == VOIDmode)
3207 mode = GET_MODE (op1);
3209 start_sequence ();
3210 op0 = force_operand (op0, NULL_RTX);
3211 op1 = force_operand (op1, NULL_RTX);
3212 do_compare_rtx_and_jump (op0, op1, comp, 0,
3213 mode, NULL_RTX, NULL_RTX, label, -1);
3214 jump = get_last_insn ();
3215 JUMP_LABEL (jump) = label;
3216 LABEL_NUSES (label)++;
3217 seq = get_insns ();
3218 end_sequence ();
3220 /* Add the new cond , in the new head. */
3221 emit_insn_after(seq, BB_END(cond_bb));
3225 /* Given a block B with unconditional branch at its end, get the
3226 store the return the branch edge and the fall-thru edge in
3227 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
3228 static void
3229 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
3230 edge *fallthru_edge)
3232 edge e = EDGE_SUCC (b, 0);
3234 if (e->flags & EDGE_FALLTHRU)
3236 *fallthru_edge = e;
3237 *branch_edge = EDGE_SUCC (b, 1);
3239 else
3241 *branch_edge = e;
3242 *fallthru_edge = EDGE_SUCC (b, 1);
3246 void
3247 init_rtl_bb_info (basic_block bb)
3249 gcc_assert (!bb->il.x.rtl);
3250 bb->il.x.head_ = NULL;
3251 bb->il.x.rtl = ggc_alloc_cleared_rtl_bb_info ();
3254 /* Returns true if it is possible to remove edge E by redirecting
3255 it to the destination of the other edge from E->src. */
3257 static bool
3258 rtl_can_remove_branch_p (const_edge e)
3260 const_basic_block src = e->src;
3261 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
3262 const_rtx insn = BB_END (src), set;
3264 /* The conditions are taken from try_redirect_by_replacing_jump. */
3265 if (target == EXIT_BLOCK_PTR)
3266 return false;
3268 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
3269 return false;
3271 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
3272 || BB_PARTITION (src) != BB_PARTITION (target))
3273 return false;
3275 if (!onlyjump_p (insn)
3276 || tablejump_p (insn, NULL, NULL))
3277 return false;
3279 set = single_set (insn);
3280 if (!set || side_effects_p (set))
3281 return false;
3283 return true;
3286 /* We do not want to declare these functions in a header file, since they
3287 should only be used through the cfghooks interface, and we do not want to
3288 move them here since it would require also moving quite a lot of related
3289 code. They are in cfglayout.c. */
3290 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block);
3291 extern basic_block cfg_layout_duplicate_bb (basic_block);
3293 static basic_block
3294 rtl_duplicate_bb (basic_block bb)
3296 bb = cfg_layout_duplicate_bb (bb);
3297 bb->aux = NULL;
3298 return bb;
3301 /* Implementation of CFG manipulation for linearized RTL. */
3302 struct cfg_hooks rtl_cfg_hooks = {
3303 "rtl",
3304 rtl_verify_flow_info,
3305 rtl_dump_bb,
3306 rtl_create_basic_block,
3307 rtl_redirect_edge_and_branch,
3308 rtl_redirect_edge_and_branch_force,
3309 rtl_can_remove_branch_p,
3310 rtl_delete_block,
3311 rtl_split_block,
3312 rtl_move_block_after,
3313 rtl_can_merge_blocks, /* can_merge_blocks_p */
3314 rtl_merge_blocks,
3315 rtl_predict_edge,
3316 rtl_predicted_by_p,
3317 cfg_layout_can_duplicate_bb_p,
3318 rtl_duplicate_bb,
3319 rtl_split_edge,
3320 rtl_make_forwarder_block,
3321 rtl_tidy_fallthru_edge,
3322 rtl_force_nonfallthru,
3323 rtl_block_ends_with_call_p,
3324 rtl_block_ends_with_condjump_p,
3325 rtl_flow_call_edges_add,
3326 NULL, /* execute_on_growing_pred */
3327 NULL, /* execute_on_shrinking_pred */
3328 NULL, /* duplicate loop for trees */
3329 NULL, /* lv_add_condition_to_bb */
3330 NULL, /* lv_adjust_loop_header_phi*/
3331 NULL, /* extract_cond_bb_edges */
3332 NULL /* flush_pending_stmts */
3335 /* Implementation of CFG manipulation for cfg layout RTL, where
3336 basic block connected via fallthru edges does not have to be adjacent.
3337 This representation will hopefully become the default one in future
3338 version of the compiler. */
3340 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
3341 "cfglayout mode",
3342 rtl_verify_flow_info_1,
3343 rtl_dump_bb,
3344 cfg_layout_create_basic_block,
3345 cfg_layout_redirect_edge_and_branch,
3346 cfg_layout_redirect_edge_and_branch_force,
3347 rtl_can_remove_branch_p,
3348 cfg_layout_delete_block,
3349 cfg_layout_split_block,
3350 rtl_move_block_after,
3351 cfg_layout_can_merge_blocks_p,
3352 cfg_layout_merge_blocks,
3353 rtl_predict_edge,
3354 rtl_predicted_by_p,
3355 cfg_layout_can_duplicate_bb_p,
3356 cfg_layout_duplicate_bb,
3357 cfg_layout_split_edge,
3358 rtl_make_forwarder_block,
3359 NULL, /* tidy_fallthru_edge */
3360 rtl_force_nonfallthru,
3361 rtl_block_ends_with_call_p,
3362 rtl_block_ends_with_condjump_p,
3363 rtl_flow_call_edges_add,
3364 NULL, /* execute_on_growing_pred */
3365 NULL, /* execute_on_shrinking_pred */
3366 duplicate_loop_to_header_edge, /* duplicate loop for trees */
3367 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
3368 NULL, /* lv_adjust_loop_header_phi*/
3369 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
3370 NULL /* flush_pending_stmts */