* ChangeLog: Fix whitespace.
[official-gcc.git] / gcc / cfgrtl.c
blob4b1d8350de5d5b0a39de00e917f5d44e33d068ed
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
25 Available functionality:
26 - Basic CFG/RTL manipulation API documented in cfghooks.h
27 - CFG-aware instruction chain manipulation
28 delete_insn, delete_insn_chain
29 - Edge splitting and committing to edges
30 insert_insn_on_edge, commit_edge_insertions
31 - CFG updating after insn simplification
32 purge_dead_edges, purge_all_dead_edges
33 - CFG fixing after coarse manipulation
34 fixup_abnormal_edges
36 Functions not supposed for generic use:
37 - Infrastructure to determine quickly basic block for insn
38 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
39 - Edge redirection with updating and optimizing of insn chain
40 block_label, tidy_fallthru_edge, force_nonfallthru */
42 #include "config.h"
43 #include "system.h"
44 #include "coretypes.h"
45 #include "tm.h"
46 #include "tree.h"
47 #include "hard-reg-set.h"
48 #include "basic-block.h"
49 #include "regs.h"
50 #include "flags.h"
51 #include "output.h"
52 #include "function.h"
53 #include "except.h"
54 #include "rtl-error.h"
55 #include "tm_p.h"
56 #include "obstack.h"
57 #include "insn-attr.h"
58 #include "insn-config.h"
59 #include "cfglayout.h"
60 #include "expr.h"
61 #include "target.h"
62 #include "common/common-target.h"
63 #include "cfgloop.h"
64 #include "ggc.h"
65 #include "tree-pass.h"
66 #include "df.h"
68 static int can_delete_note_p (const_rtx);
69 static int can_delete_label_p (const_rtx);
70 static basic_block rtl_split_edge (edge);
71 static bool rtl_move_block_after (basic_block, basic_block);
72 static int rtl_verify_flow_info (void);
73 static basic_block cfg_layout_split_block (basic_block, void *);
74 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
75 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
76 static void cfg_layout_delete_block (basic_block);
77 static void rtl_delete_block (basic_block);
78 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
79 static edge rtl_redirect_edge_and_branch (edge, basic_block);
80 static basic_block rtl_split_block (basic_block, void *);
81 static void rtl_dump_bb (basic_block, FILE *, int, int);
82 static int rtl_verify_flow_info_1 (void);
83 static void rtl_make_forwarder_block (edge);
85 /* Return true if NOTE is not one of the ones that must be kept paired,
86 so that we may simply delete it. */
88 static int
89 can_delete_note_p (const_rtx note)
91 switch (NOTE_KIND (note))
93 case NOTE_INSN_DELETED:
94 case NOTE_INSN_BASIC_BLOCK:
95 case NOTE_INSN_EPILOGUE_BEG:
96 return true;
98 default:
99 return false;
103 /* True if a given label can be deleted. */
105 static int
106 can_delete_label_p (const_rtx label)
108 return (!LABEL_PRESERVE_P (label)
109 /* User declared labels must be preserved. */
110 && LABEL_NAME (label) == 0
111 && !in_expr_list_p (forced_labels, label));
114 /* Delete INSN by patching it out. Return the next insn. */
117 delete_insn (rtx insn)
119 rtx next = NEXT_INSN (insn);
120 rtx note;
121 bool really_delete = true;
123 if (LABEL_P (insn))
125 /* Some labels can't be directly removed from the INSN chain, as they
126 might be references via variables, constant pool etc.
127 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
128 if (! can_delete_label_p (insn))
130 const char *name = LABEL_NAME (insn);
132 really_delete = false;
133 PUT_CODE (insn, NOTE);
134 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
135 NOTE_DELETED_LABEL_NAME (insn) = name;
138 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
141 if (really_delete)
143 /* If this insn has already been deleted, something is very wrong. */
144 gcc_assert (!INSN_DELETED_P (insn));
145 remove_insn (insn);
146 INSN_DELETED_P (insn) = 1;
149 /* If deleting a jump, decrement the use count of the label. Deleting
150 the label itself should happen in the normal course of block merging. */
151 if (JUMP_P (insn))
153 if (JUMP_LABEL (insn)
154 && LABEL_P (JUMP_LABEL (insn)))
155 LABEL_NUSES (JUMP_LABEL (insn))--;
157 /* If there are more targets, remove them too. */
158 while ((note
159 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
160 && LABEL_P (XEXP (note, 0)))
162 LABEL_NUSES (XEXP (note, 0))--;
163 remove_note (insn, note);
167 /* Also if deleting any insn that references a label as an operand. */
168 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
169 && LABEL_P (XEXP (note, 0)))
171 LABEL_NUSES (XEXP (note, 0))--;
172 remove_note (insn, note);
175 if (JUMP_TABLE_DATA_P (insn))
177 rtx pat = PATTERN (insn);
178 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
179 int len = XVECLEN (pat, diff_vec_p);
180 int i;
182 for (i = 0; i < len; i++)
184 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
186 /* When deleting code in bulk (e.g. removing many unreachable
187 blocks) we can delete a label that's a target of the vector
188 before deleting the vector itself. */
189 if (!NOTE_P (label))
190 LABEL_NUSES (label)--;
194 return next;
197 /* Like delete_insn but also purge dead edges from BB. */
200 delete_insn_and_edges (rtx insn)
202 rtx x;
203 bool purge = false;
205 if (INSN_P (insn)
206 && BLOCK_FOR_INSN (insn)
207 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
208 purge = true;
209 x = delete_insn (insn);
210 if (purge)
211 purge_dead_edges (BLOCK_FOR_INSN (insn));
212 return x;
215 /* Unlink a chain of insns between START and FINISH, leaving notes
216 that must be paired. If CLEAR_BB is true, we set bb field for
217 insns that cannot be removed to NULL. */
219 void
220 delete_insn_chain (rtx start, rtx finish, bool clear_bb)
222 rtx next;
224 /* Unchain the insns one by one. It would be quicker to delete all of these
225 with a single unchaining, rather than one at a time, but we need to keep
226 the NOTE's. */
227 while (1)
229 next = NEXT_INSN (start);
230 if (NOTE_P (start) && !can_delete_note_p (start))
232 else
233 next = delete_insn (start);
235 if (clear_bb && !INSN_DELETED_P (start))
236 set_block_for_insn (start, NULL);
238 if (start == finish)
239 break;
240 start = next;
244 /* Create a new basic block consisting of the instructions between HEAD and END
245 inclusive. This function is designed to allow fast BB construction - reuses
246 the note and basic block struct in BB_NOTE, if any and do not grow
247 BASIC_BLOCK chain and should be used directly only by CFG construction code.
248 END can be NULL in to create new empty basic block before HEAD. Both END
249 and HEAD can be NULL to create basic block at the end of INSN chain.
250 AFTER is the basic block we should be put after. */
252 basic_block
253 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
255 basic_block bb;
257 if (bb_note
258 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
259 && bb->aux == NULL)
261 /* If we found an existing note, thread it back onto the chain. */
263 rtx after;
265 if (LABEL_P (head))
266 after = head;
267 else
269 after = PREV_INSN (head);
270 head = bb_note;
273 if (after != bb_note && NEXT_INSN (after) != bb_note)
274 reorder_insns_nobb (bb_note, bb_note, after);
276 else
278 /* Otherwise we must create a note and a basic block structure. */
280 bb = alloc_block ();
282 init_rtl_bb_info (bb);
283 if (!head && !end)
284 head = end = bb_note
285 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
286 else if (LABEL_P (head) && end)
288 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
289 if (head == end)
290 end = bb_note;
292 else
294 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
295 head = bb_note;
296 if (!end)
297 end = head;
300 NOTE_BASIC_BLOCK (bb_note) = bb;
303 /* Always include the bb note in the block. */
304 if (NEXT_INSN (end) == bb_note)
305 end = bb_note;
307 BB_HEAD (bb) = head;
308 BB_END (bb) = end;
309 bb->index = last_basic_block++;
310 bb->flags = BB_NEW | BB_RTL;
311 link_block (bb, after);
312 SET_BASIC_BLOCK (bb->index, bb);
313 df_bb_refs_record (bb->index, false);
314 update_bb_for_insn (bb);
315 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
317 /* Tag the block so that we know it has been used when considering
318 other basic block notes. */
319 bb->aux = bb;
321 return bb;
324 /* Create new basic block consisting of instructions in between HEAD and END
325 and place it to the BB chain after block AFTER. END can be NULL to
326 create a new empty basic block before HEAD. Both END and HEAD can be
327 NULL to create basic block at the end of INSN chain. */
329 static basic_block
330 rtl_create_basic_block (void *headp, void *endp, basic_block after)
332 rtx head = (rtx) headp, end = (rtx) endp;
333 basic_block bb;
335 /* Grow the basic block array if needed. */
336 if ((size_t) last_basic_block >= VEC_length (basic_block, basic_block_info))
338 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
339 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
342 n_basic_blocks++;
344 bb = create_basic_block_structure (head, end, NULL, after);
345 bb->aux = NULL;
346 return bb;
349 static basic_block
350 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
352 basic_block newbb = rtl_create_basic_block (head, end, after);
354 return newbb;
357 /* Delete the insns in a (non-live) block. We physically delete every
358 non-deleted-note insn, and update the flow graph appropriately.
360 Return nonzero if we deleted an exception handler. */
362 /* ??? Preserving all such notes strikes me as wrong. It would be nice
363 to post-process the stream to remove empty blocks, loops, ranges, etc. */
365 static void
366 rtl_delete_block (basic_block b)
368 rtx insn, end;
370 /* If the head of this block is a CODE_LABEL, then it might be the
371 label for an exception handler which can't be reached. We need
372 to remove the label from the exception_handler_label list. */
373 insn = BB_HEAD (b);
375 end = get_last_bb_insn (b);
377 /* Selectively delete the entire chain. */
378 BB_HEAD (b) = NULL;
379 delete_insn_chain (insn, end, true);
382 if (dump_file)
383 fprintf (dump_file, "deleting block %d\n", b->index);
384 df_bb_delete (b->index);
387 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
389 void
390 compute_bb_for_insn (void)
392 basic_block bb;
394 FOR_EACH_BB (bb)
396 rtx end = BB_END (bb);
397 rtx insn;
399 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
401 BLOCK_FOR_INSN (insn) = bb;
402 if (insn == end)
403 break;
408 /* Release the basic_block_for_insn array. */
410 unsigned int
411 free_bb_for_insn (void)
413 rtx insn;
414 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
415 if (!BARRIER_P (insn))
416 BLOCK_FOR_INSN (insn) = NULL;
417 return 0;
420 static unsigned int
421 rest_of_pass_free_cfg (void)
423 #ifdef DELAY_SLOTS
424 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
425 valid at that point so it would be too late to call df_analyze. */
426 if (optimize > 0 && flag_delayed_branch)
428 df_note_add_problem ();
429 df_analyze ();
431 #endif
433 free_bb_for_insn ();
434 return 0;
437 struct rtl_opt_pass pass_free_cfg =
440 RTL_PASS,
441 "*free_cfg", /* name */
442 NULL, /* gate */
443 rest_of_pass_free_cfg, /* execute */
444 NULL, /* sub */
445 NULL, /* next */
446 0, /* static_pass_number */
447 TV_NONE, /* tv_id */
448 0, /* properties_required */
449 0, /* properties_provided */
450 PROP_cfg, /* properties_destroyed */
451 0, /* todo_flags_start */
452 0, /* todo_flags_finish */
456 /* Return RTX to emit after when we want to emit code on the entry of function. */
458 entry_of_function (void)
460 return (n_basic_blocks > NUM_FIXED_BLOCKS ?
461 BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
464 /* Emit INSN at the entry point of the function, ensuring that it is only
465 executed once per function. */
466 void
467 emit_insn_at_entry (rtx insn)
469 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
470 edge e = ei_safe_edge (ei);
471 gcc_assert (e->flags & EDGE_FALLTHRU);
473 insert_insn_on_edge (insn, e);
474 commit_edge_insertions ();
477 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
478 (or BARRIER if found) and notify df of the bb change.
479 The insn chain range is inclusive
480 (i.e. both BEGIN and END will be updated. */
482 static void
483 update_bb_for_insn_chain (rtx begin, rtx end, basic_block bb)
485 rtx insn;
487 end = NEXT_INSN (end);
488 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
489 if (!BARRIER_P (insn))
490 df_insn_change_bb (insn, bb);
493 /* Update BLOCK_FOR_INSN of insns in BB to BB,
494 and notify df of the change. */
496 void
497 update_bb_for_insn (basic_block bb)
499 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
503 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
504 note associated with the BLOCK. */
506 static rtx
507 first_insn_after_basic_block_note (basic_block block)
509 rtx insn;
511 /* Get the first instruction in the block. */
512 insn = BB_HEAD (block);
514 if (insn == NULL_RTX)
515 return NULL_RTX;
516 if (LABEL_P (insn))
517 insn = NEXT_INSN (insn);
518 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
520 return NEXT_INSN (insn);
523 /* Creates a new basic block just after basic block B by splitting
524 everything after specified instruction I. */
526 static basic_block
527 rtl_split_block (basic_block bb, void *insnp)
529 basic_block new_bb;
530 rtx insn = (rtx) insnp;
531 edge e;
532 edge_iterator ei;
534 if (!insn)
536 insn = first_insn_after_basic_block_note (bb);
538 if (insn)
540 rtx next = insn;
542 insn = PREV_INSN (insn);
544 /* If the block contains only debug insns, insn would have
545 been NULL in a non-debug compilation, and then we'd end
546 up emitting a DELETED note. For -fcompare-debug
547 stability, emit the note too. */
548 if (insn != BB_END (bb)
549 && DEBUG_INSN_P (next)
550 && DEBUG_INSN_P (BB_END (bb)))
552 while (next != BB_END (bb) && DEBUG_INSN_P (next))
553 next = NEXT_INSN (next);
555 if (next == BB_END (bb))
556 emit_note_after (NOTE_INSN_DELETED, next);
559 else
560 insn = get_last_insn ();
563 /* We probably should check type of the insn so that we do not create
564 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
565 bother. */
566 if (insn == BB_END (bb))
567 emit_note_after (NOTE_INSN_DELETED, insn);
569 /* Create the new basic block. */
570 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
571 BB_COPY_PARTITION (new_bb, bb);
572 BB_END (bb) = insn;
574 /* Redirect the outgoing edges. */
575 new_bb->succs = bb->succs;
576 bb->succs = NULL;
577 FOR_EACH_EDGE (e, ei, new_bb->succs)
578 e->src = new_bb;
580 /* The new block starts off being dirty. */
581 df_set_bb_dirty (bb);
582 return new_bb;
585 /* Blocks A and B are to be merged into a single block A. The insns
586 are already contiguous. */
588 static void
589 rtl_merge_blocks (basic_block a, basic_block b)
591 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
592 rtx del_first = NULL_RTX, del_last = NULL_RTX;
593 rtx b_debug_start = b_end, b_debug_end = b_end;
594 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
595 int b_empty = 0;
597 if (dump_file)
598 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
599 a->index);
601 while (DEBUG_INSN_P (b_end))
602 b_end = PREV_INSN (b_debug_start = b_end);
604 /* If there was a CODE_LABEL beginning B, delete it. */
605 if (LABEL_P (b_head))
607 /* Detect basic blocks with nothing but a label. This can happen
608 in particular at the end of a function. */
609 if (b_head == b_end)
610 b_empty = 1;
612 del_first = del_last = b_head;
613 b_head = NEXT_INSN (b_head);
616 /* Delete the basic block note and handle blocks containing just that
617 note. */
618 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
620 if (b_head == b_end)
621 b_empty = 1;
622 if (! del_last)
623 del_first = b_head;
625 del_last = b_head;
626 b_head = NEXT_INSN (b_head);
629 /* If there was a jump out of A, delete it. */
630 if (JUMP_P (a_end))
632 rtx prev;
634 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
635 if (!NOTE_P (prev)
636 || NOTE_INSN_BASIC_BLOCK_P (prev)
637 || prev == BB_HEAD (a))
638 break;
640 del_first = a_end;
642 #ifdef HAVE_cc0
643 /* If this was a conditional jump, we need to also delete
644 the insn that set cc0. */
645 if (only_sets_cc0_p (prev))
647 rtx tmp = prev;
649 prev = prev_nonnote_insn (prev);
650 if (!prev)
651 prev = BB_HEAD (a);
652 del_first = tmp;
654 #endif
656 a_end = PREV_INSN (del_first);
658 else if (BARRIER_P (NEXT_INSN (a_end)))
659 del_first = NEXT_INSN (a_end);
661 /* Delete everything marked above as well as crap that might be
662 hanging out between the two blocks. */
663 BB_HEAD (b) = NULL;
664 delete_insn_chain (del_first, del_last, true);
666 /* Reassociate the insns of B with A. */
667 if (!b_empty)
669 update_bb_for_insn_chain (a_end, b_debug_end, a);
671 a_end = b_debug_end;
673 else if (b_end != b_debug_end)
675 /* Move any deleted labels and other notes between the end of A
676 and the debug insns that make up B after the debug insns,
677 bringing the debug insns into A while keeping the notes after
678 the end of A. */
679 if (NEXT_INSN (a_end) != b_debug_start)
680 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
681 b_debug_end);
682 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
683 a_end = b_debug_end;
686 df_bb_delete (b->index);
687 BB_END (a) = a_end;
689 /* If B was a forwarder block, propagate the locus on the edge. */
690 if (forwarder_p && !EDGE_SUCC (b, 0)->goto_locus)
691 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
693 if (dump_file)
694 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
698 /* Return true when block A and B can be merged. */
700 static bool
701 rtl_can_merge_blocks (basic_block a, basic_block b)
703 /* If we are partitioning hot/cold basic blocks, we don't want to
704 mess up unconditional or indirect jumps that cross between hot
705 and cold sections.
707 Basic block partitioning may result in some jumps that appear to
708 be optimizable (or blocks that appear to be mergeable), but which really
709 must be left untouched (they are required to make it safely across
710 partition boundaries). See the comments at the top of
711 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
713 if (BB_PARTITION (a) != BB_PARTITION (b))
714 return false;
716 /* There must be exactly one edge in between the blocks. */
717 return (single_succ_p (a)
718 && single_succ (a) == b
719 && single_pred_p (b)
720 && a != b
721 /* Must be simple edge. */
722 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
723 && a->next_bb == b
724 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
725 /* If the jump insn has side effects,
726 we can't kill the edge. */
727 && (!JUMP_P (BB_END (a))
728 || (reload_completed
729 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
732 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
733 exist. */
736 block_label (basic_block block)
738 if (block == EXIT_BLOCK_PTR)
739 return NULL_RTX;
741 if (!LABEL_P (BB_HEAD (block)))
743 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
746 return BB_HEAD (block);
749 /* Attempt to perform edge redirection by replacing possibly complex jump
750 instruction by unconditional jump or removing jump completely. This can
751 apply only if all edges now point to the same block. The parameters and
752 return values are equivalent to redirect_edge_and_branch. */
754 edge
755 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
757 basic_block src = e->src;
758 rtx insn = BB_END (src), kill_from;
759 rtx set;
760 int fallthru = 0;
762 /* If we are partitioning hot/cold basic blocks, we don't want to
763 mess up unconditional or indirect jumps that cross between hot
764 and cold sections.
766 Basic block partitioning may result in some jumps that appear to
767 be optimizable (or blocks that appear to be mergeable), but which really
768 must be left untouched (they are required to make it safely across
769 partition boundaries). See the comments at the top of
770 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
772 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
773 || BB_PARTITION (src) != BB_PARTITION (target))
774 return NULL;
776 /* We can replace or remove a complex jump only when we have exactly
777 two edges. Also, if we have exactly one outgoing edge, we can
778 redirect that. */
779 if (EDGE_COUNT (src->succs) >= 3
780 /* Verify that all targets will be TARGET. Specifically, the
781 edge that is not E must also go to TARGET. */
782 || (EDGE_COUNT (src->succs) == 2
783 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
784 return NULL;
786 if (!onlyjump_p (insn))
787 return NULL;
788 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
789 return NULL;
791 /* Avoid removing branch with side effects. */
792 set = single_set (insn);
793 if (!set || side_effects_p (set))
794 return NULL;
796 /* In case we zap a conditional jump, we'll need to kill
797 the cc0 setter too. */
798 kill_from = insn;
799 #ifdef HAVE_cc0
800 if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
801 && only_sets_cc0_p (PREV_INSN (insn)))
802 kill_from = PREV_INSN (insn);
803 #endif
805 /* See if we can create the fallthru edge. */
806 if (in_cfglayout || can_fallthru (src, target))
808 if (dump_file)
809 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
810 fallthru = 1;
812 /* Selectively unlink whole insn chain. */
813 if (in_cfglayout)
815 rtx insn = src->il.rtl->footer;
817 delete_insn_chain (kill_from, BB_END (src), false);
819 /* Remove barriers but keep jumptables. */
820 while (insn)
822 if (BARRIER_P (insn))
824 if (PREV_INSN (insn))
825 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
826 else
827 src->il.rtl->footer = NEXT_INSN (insn);
828 if (NEXT_INSN (insn))
829 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
831 if (LABEL_P (insn))
832 break;
833 insn = NEXT_INSN (insn);
836 else
837 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
838 false);
841 /* If this already is simplejump, redirect it. */
842 else if (simplejump_p (insn))
844 if (e->dest == target)
845 return NULL;
846 if (dump_file)
847 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
848 INSN_UID (insn), e->dest->index, target->index);
849 if (!redirect_jump (insn, block_label (target), 0))
851 gcc_assert (target == EXIT_BLOCK_PTR);
852 return NULL;
856 /* Cannot do anything for target exit block. */
857 else if (target == EXIT_BLOCK_PTR)
858 return NULL;
860 /* Or replace possibly complicated jump insn by simple jump insn. */
861 else
863 rtx target_label = block_label (target);
864 rtx barrier, label, table;
866 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
867 JUMP_LABEL (BB_END (src)) = target_label;
868 LABEL_NUSES (target_label)++;
869 if (dump_file)
870 fprintf (dump_file, "Replacing insn %i by jump %i\n",
871 INSN_UID (insn), INSN_UID (BB_END (src)));
874 delete_insn_chain (kill_from, insn, false);
876 /* Recognize a tablejump that we are converting to a
877 simple jump and remove its associated CODE_LABEL
878 and ADDR_VEC or ADDR_DIFF_VEC. */
879 if (tablejump_p (insn, &label, &table))
880 delete_insn_chain (label, table, false);
882 barrier = next_nonnote_insn (BB_END (src));
883 if (!barrier || !BARRIER_P (barrier))
884 emit_barrier_after (BB_END (src));
885 else
887 if (barrier != NEXT_INSN (BB_END (src)))
889 /* Move the jump before barrier so that the notes
890 which originally were or were created before jump table are
891 inside the basic block. */
892 rtx new_insn = BB_END (src);
894 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
895 PREV_INSN (barrier), src);
897 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
898 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
900 NEXT_INSN (new_insn) = barrier;
901 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
903 PREV_INSN (new_insn) = PREV_INSN (barrier);
904 PREV_INSN (barrier) = new_insn;
909 /* Keep only one edge out and set proper flags. */
910 if (!single_succ_p (src))
911 remove_edge (e);
912 gcc_assert (single_succ_p (src));
914 e = single_succ_edge (src);
915 if (fallthru)
916 e->flags = EDGE_FALLTHRU;
917 else
918 e->flags = 0;
920 e->probability = REG_BR_PROB_BASE;
921 e->count = src->count;
923 if (e->dest != target)
924 redirect_edge_succ (e, target);
925 return e;
928 /* Subroutine of redirect_branch_edge that tries to patch the jump
929 instruction INSN so that it reaches block NEW. Do this
930 only when it originally reached block OLD. Return true if this
931 worked or the original target wasn't OLD, return false if redirection
932 doesn't work. */
934 static bool
935 patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
937 rtx tmp;
938 /* Recognize a tablejump and adjust all matching cases. */
939 if (tablejump_p (insn, NULL, &tmp))
941 rtvec vec;
942 int j;
943 rtx new_label = block_label (new_bb);
945 if (new_bb == EXIT_BLOCK_PTR)
946 return false;
947 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
948 vec = XVEC (PATTERN (tmp), 0);
949 else
950 vec = XVEC (PATTERN (tmp), 1);
952 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
953 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
955 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
956 --LABEL_NUSES (old_label);
957 ++LABEL_NUSES (new_label);
960 /* Handle casesi dispatch insns. */
961 if ((tmp = single_set (insn)) != NULL
962 && SET_DEST (tmp) == pc_rtx
963 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
964 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
965 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
967 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
968 new_label);
969 --LABEL_NUSES (old_label);
970 ++LABEL_NUSES (new_label);
973 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
975 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
976 rtx new_label, note;
978 if (new_bb == EXIT_BLOCK_PTR)
979 return false;
980 new_label = block_label (new_bb);
982 for (i = 0; i < n; ++i)
984 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
985 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
986 if (XEXP (old_ref, 0) == old_label)
988 ASM_OPERANDS_LABEL (tmp, i)
989 = gen_rtx_LABEL_REF (Pmode, new_label);
990 --LABEL_NUSES (old_label);
991 ++LABEL_NUSES (new_label);
995 if (JUMP_LABEL (insn) == old_label)
997 JUMP_LABEL (insn) = new_label;
998 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
999 if (note)
1000 remove_note (insn, note);
1002 else
1004 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1005 if (note)
1006 remove_note (insn, note);
1007 if (JUMP_LABEL (insn) != new_label
1008 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1009 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1011 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1012 != NULL_RTX)
1013 XEXP (note, 0) = new_label;
1015 else
1017 /* ?? We may play the games with moving the named labels from
1018 one basic block to the other in case only one computed_jump is
1019 available. */
1020 if (computed_jump_p (insn)
1021 /* A return instruction can't be redirected. */
1022 || returnjump_p (insn))
1023 return false;
1025 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1027 /* If the insn doesn't go where we think, we're confused. */
1028 gcc_assert (JUMP_LABEL (insn) == old_label);
1030 /* If the substitution doesn't succeed, die. This can happen
1031 if the back end emitted unrecognizable instructions or if
1032 target is exit block on some arches. */
1033 if (!redirect_jump (insn, block_label (new_bb), 0))
1035 gcc_assert (new_bb == EXIT_BLOCK_PTR);
1036 return false;
1040 return true;
1044 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1045 NULL on failure */
1046 static edge
1047 redirect_branch_edge (edge e, basic_block target)
1049 rtx old_label = BB_HEAD (e->dest);
1050 basic_block src = e->src;
1051 rtx insn = BB_END (src);
1053 /* We can only redirect non-fallthru edges of jump insn. */
1054 if (e->flags & EDGE_FALLTHRU)
1055 return NULL;
1056 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1057 return NULL;
1059 if (!currently_expanding_to_rtl)
1061 if (!patch_jump_insn (insn, old_label, target))
1062 return NULL;
1064 else
1065 /* When expanding this BB might actually contain multiple
1066 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1067 Redirect all of those that match our label. */
1068 FOR_BB_INSNS (src, insn)
1069 if (JUMP_P (insn) && !patch_jump_insn (insn, old_label, target))
1070 return NULL;
1072 if (dump_file)
1073 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1074 e->src->index, e->dest->index, target->index);
1076 if (e->dest != target)
1077 e = redirect_edge_succ_nodup (e, target);
1079 return e;
1082 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
1083 expense of adding new instructions or reordering basic blocks.
1085 Function can be also called with edge destination equivalent to the TARGET.
1086 Then it should try the simplifications and do nothing if none is possible.
1088 Return edge representing the branch if transformation succeeded. Return NULL
1089 on failure.
1090 We still return NULL in case E already destinated TARGET and we didn't
1091 managed to simplify instruction stream. */
1093 static edge
1094 rtl_redirect_edge_and_branch (edge e, basic_block target)
1096 edge ret;
1097 basic_block src = e->src;
1099 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1100 return NULL;
1102 if (e->dest == target)
1103 return e;
1105 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1107 df_set_bb_dirty (src);
1108 return ret;
1111 ret = redirect_branch_edge (e, target);
1112 if (!ret)
1113 return NULL;
1115 df_set_bb_dirty (src);
1116 return ret;
1119 /* Like force_nonfallthru below, but additionally performs redirection
1120 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1121 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1122 simple_return_rtx, indicating which kind of returnjump to create.
1123 It should be NULL otherwise. */
1125 basic_block
1126 force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1128 basic_block jump_block, new_bb = NULL, src = e->src;
1129 rtx note;
1130 edge new_edge;
1131 int abnormal_edge_flags = 0;
1132 int loc;
1134 /* In the case the last instruction is conditional jump to the next
1135 instruction, first redirect the jump itself and then continue
1136 by creating a basic block afterwards to redirect fallthru edge. */
1137 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
1138 && any_condjump_p (BB_END (e->src))
1139 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1141 rtx note;
1142 edge b = unchecked_make_edge (e->src, target, 0);
1143 bool redirected;
1145 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1146 gcc_assert (redirected);
1148 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1149 if (note)
1151 int prob = INTVAL (XEXP (note, 0));
1153 b->probability = prob;
1154 b->count = e->count * prob / REG_BR_PROB_BASE;
1155 e->probability -= e->probability;
1156 e->count -= b->count;
1157 if (e->probability < 0)
1158 e->probability = 0;
1159 if (e->count < 0)
1160 e->count = 0;
1164 if (e->flags & EDGE_ABNORMAL)
1166 /* Irritating special case - fallthru edge to the same block as abnormal
1167 edge.
1168 We can't redirect abnormal edge, but we still can split the fallthru
1169 one and create separate abnormal edge to original destination.
1170 This allows bb-reorder to make such edge non-fallthru. */
1171 gcc_assert (e->dest == target);
1172 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
1173 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
1175 else
1177 gcc_assert (e->flags & EDGE_FALLTHRU);
1178 if (e->src == ENTRY_BLOCK_PTR)
1180 /* We can't redirect the entry block. Create an empty block
1181 at the start of the function which we use to add the new
1182 jump. */
1183 edge tmp;
1184 edge_iterator ei;
1185 bool found = false;
1187 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
1189 /* Change the existing edge's source to be the new block, and add
1190 a new edge from the entry block to the new block. */
1191 e->src = bb;
1192 for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1194 if (tmp == e)
1196 VEC_unordered_remove (edge, ENTRY_BLOCK_PTR->succs, ei.index);
1197 found = true;
1198 break;
1200 else
1201 ei_next (&ei);
1204 gcc_assert (found);
1206 VEC_safe_push (edge, gc, bb->succs, e);
1207 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1211 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags)
1213 /* Create the new structures. */
1215 /* If the old block ended with a tablejump, skip its table
1216 by searching forward from there. Otherwise start searching
1217 forward from the last instruction of the old block. */
1218 if (!tablejump_p (BB_END (e->src), NULL, &note))
1219 note = BB_END (e->src);
1220 note = NEXT_INSN (note);
1222 jump_block = create_basic_block (note, NULL, e->src);
1223 jump_block->count = e->count;
1224 jump_block->frequency = EDGE_FREQUENCY (e);
1225 jump_block->loop_depth = target->loop_depth;
1227 /* Make sure new block ends up in correct hot/cold section. */
1229 BB_COPY_PARTITION (jump_block, e->src);
1230 if (flag_reorder_blocks_and_partition
1231 && targetm_common.have_named_sections
1232 && JUMP_P (BB_END (jump_block))
1233 && !any_condjump_p (BB_END (jump_block))
1234 && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
1235 add_reg_note (BB_END (jump_block), REG_CROSSING_JUMP, NULL_RTX);
1237 /* Wire edge in. */
1238 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1239 new_edge->probability = e->probability;
1240 new_edge->count = e->count;
1242 /* Redirect old edge. */
1243 redirect_edge_pred (e, jump_block);
1244 e->probability = REG_BR_PROB_BASE;
1246 new_bb = jump_block;
1248 else
1249 jump_block = e->src;
1251 if (e->goto_locus && e->goto_block == NULL)
1252 loc = e->goto_locus;
1253 else
1254 loc = 0;
1255 e->flags &= ~EDGE_FALLTHRU;
1256 if (target == EXIT_BLOCK_PTR)
1258 if (jump_label == ret_rtx)
1260 #ifdef HAVE_return
1261 emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
1262 #else
1263 gcc_unreachable ();
1264 #endif
1266 else
1268 gcc_assert (jump_label == simple_return_rtx);
1269 #ifdef HAVE_simple_return
1270 emit_jump_insn_after_setloc (gen_simple_return (),
1271 BB_END (jump_block), loc);
1272 #else
1273 gcc_unreachable ();
1274 #endif
1276 set_return_jump_label (BB_END (jump_block));
1278 else
1280 rtx label = block_label (target);
1281 emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
1282 JUMP_LABEL (BB_END (jump_block)) = label;
1283 LABEL_NUSES (label)++;
1286 emit_barrier_after (BB_END (jump_block));
1287 redirect_edge_succ_nodup (e, target);
1289 if (abnormal_edge_flags)
1290 make_edge (src, target, abnormal_edge_flags);
1292 df_mark_solutions_dirty ();
1293 return new_bb;
1296 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1297 (and possibly create new basic block) to make edge non-fallthru.
1298 Return newly created BB or NULL if none. */
1300 static basic_block
1301 rtl_force_nonfallthru (edge e)
1303 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1306 /* Redirect edge even at the expense of creating new jump insn or
1307 basic block. Return new basic block if created, NULL otherwise.
1308 Conversion must be possible. */
1310 static basic_block
1311 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1313 if (redirect_edge_and_branch (e, target)
1314 || e->dest == target)
1315 return NULL;
1317 /* In case the edge redirection failed, try to force it to be non-fallthru
1318 and redirect newly created simplejump. */
1319 df_set_bb_dirty (e->src);
1320 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1323 /* The given edge should potentially be a fallthru edge. If that is in
1324 fact true, delete the jump and barriers that are in the way. */
1326 static void
1327 rtl_tidy_fallthru_edge (edge e)
1329 rtx q;
1330 basic_block b = e->src, c = b->next_bb;
1332 /* ??? In a late-running flow pass, other folks may have deleted basic
1333 blocks by nopping out blocks, leaving multiple BARRIERs between here
1334 and the target label. They ought to be chastised and fixed.
1336 We can also wind up with a sequence of undeletable labels between
1337 one block and the next.
1339 So search through a sequence of barriers, labels, and notes for
1340 the head of block C and assert that we really do fall through. */
1342 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1343 if (INSN_P (q))
1344 return;
1346 /* Remove what will soon cease being the jump insn from the source block.
1347 If block B consisted only of this single jump, turn it into a deleted
1348 note. */
1349 q = BB_END (b);
1350 if (JUMP_P (q)
1351 && onlyjump_p (q)
1352 && (any_uncondjump_p (q)
1353 || single_succ_p (b)))
1355 #ifdef HAVE_cc0
1356 /* If this was a conditional jump, we need to also delete
1357 the insn that set cc0. */
1358 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1359 q = PREV_INSN (q);
1360 #endif
1362 q = PREV_INSN (q);
1365 /* Selectively unlink the sequence. */
1366 if (q != PREV_INSN (BB_HEAD (c)))
1367 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1369 e->flags |= EDGE_FALLTHRU;
1372 /* Should move basic block BB after basic block AFTER. NIY. */
1374 static bool
1375 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1376 basic_block after ATTRIBUTE_UNUSED)
1378 return false;
1381 /* Split a (typically critical) edge. Return the new block.
1382 The edge must not be abnormal.
1384 ??? The code generally expects to be called on critical edges.
1385 The case of a block ending in an unconditional jump to a
1386 block with multiple predecessors is not handled optimally. */
1388 static basic_block
1389 rtl_split_edge (edge edge_in)
1391 basic_block bb;
1392 rtx before;
1394 /* Abnormal edges cannot be split. */
1395 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1397 /* We are going to place the new block in front of edge destination.
1398 Avoid existence of fallthru predecessors. */
1399 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1401 edge e = find_fallthru_edge (edge_in->dest->preds);
1403 if (e)
1404 force_nonfallthru (e);
1407 /* Create the basic block note. */
1408 if (edge_in->dest != EXIT_BLOCK_PTR)
1409 before = BB_HEAD (edge_in->dest);
1410 else
1411 before = NULL_RTX;
1413 /* If this is a fall through edge to the exit block, the blocks might be
1414 not adjacent, and the right place is after the source. */
1415 if ((edge_in->flags & EDGE_FALLTHRU) && edge_in->dest == EXIT_BLOCK_PTR)
1417 before = NEXT_INSN (BB_END (edge_in->src));
1418 bb = create_basic_block (before, NULL, edge_in->src);
1419 BB_COPY_PARTITION (bb, edge_in->src);
1421 else
1423 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1424 /* ??? Why not edge_in->dest->prev_bb here? */
1425 BB_COPY_PARTITION (bb, edge_in->dest);
1428 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1430 /* For non-fallthru edges, we must adjust the predecessor's
1431 jump instruction to target our new block. */
1432 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1434 edge redirected = redirect_edge_and_branch (edge_in, bb);
1435 gcc_assert (redirected);
1437 else
1439 if (edge_in->src != ENTRY_BLOCK_PTR)
1441 /* For asm goto even splitting of fallthru edge might
1442 need insn patching, as other labels might point to the
1443 old label. */
1444 rtx last = BB_END (edge_in->src);
1445 if (last
1446 && JUMP_P (last)
1447 && edge_in->dest != EXIT_BLOCK_PTR
1448 && extract_asm_operands (PATTERN (last)) != NULL_RTX
1449 && patch_jump_insn (last, before, bb))
1450 df_set_bb_dirty (edge_in->src);
1452 redirect_edge_succ (edge_in, bb);
1455 return bb;
1458 /* Queue instructions for insertion on an edge between two basic blocks.
1459 The new instructions and basic blocks (if any) will not appear in the
1460 CFG until commit_edge_insertions is called. */
1462 void
1463 insert_insn_on_edge (rtx pattern, edge e)
1465 /* We cannot insert instructions on an abnormal critical edge.
1466 It will be easier to find the culprit if we die now. */
1467 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1469 if (e->insns.r == NULL_RTX)
1470 start_sequence ();
1471 else
1472 push_to_sequence (e->insns.r);
1474 emit_insn (pattern);
1476 e->insns.r = get_insns ();
1477 end_sequence ();
1480 /* Update the CFG for the instructions queued on edge E. */
1482 void
1483 commit_one_edge_insertion (edge e)
1485 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1486 basic_block bb;
1488 /* Pull the insns off the edge now since the edge might go away. */
1489 insns = e->insns.r;
1490 e->insns.r = NULL_RTX;
1492 /* Figure out where to put these insns. If the destination has
1493 one predecessor, insert there. Except for the exit block. */
1494 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
1496 bb = e->dest;
1498 /* Get the location correct wrt a code label, and "nice" wrt
1499 a basic block note, and before everything else. */
1500 tmp = BB_HEAD (bb);
1501 if (LABEL_P (tmp))
1502 tmp = NEXT_INSN (tmp);
1503 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1504 tmp = NEXT_INSN (tmp);
1505 if (tmp == BB_HEAD (bb))
1506 before = tmp;
1507 else if (tmp)
1508 after = PREV_INSN (tmp);
1509 else
1510 after = get_last_insn ();
1513 /* If the source has one successor and the edge is not abnormal,
1514 insert there. Except for the entry block. */
1515 else if ((e->flags & EDGE_ABNORMAL) == 0
1516 && single_succ_p (e->src)
1517 && e->src != ENTRY_BLOCK_PTR)
1519 bb = e->src;
1521 /* It is possible to have a non-simple jump here. Consider a target
1522 where some forms of unconditional jumps clobber a register. This
1523 happens on the fr30 for example.
1525 We know this block has a single successor, so we can just emit
1526 the queued insns before the jump. */
1527 if (JUMP_P (BB_END (bb)))
1528 before = BB_END (bb);
1529 else
1531 /* We'd better be fallthru, or we've lost track of what's what. */
1532 gcc_assert (e->flags & EDGE_FALLTHRU);
1534 after = BB_END (bb);
1538 /* Otherwise we must split the edge. */
1539 else
1541 bb = split_edge (e);
1542 after = BB_END (bb);
1544 if (flag_reorder_blocks_and_partition
1545 && targetm_common.have_named_sections
1546 && e->src != ENTRY_BLOCK_PTR
1547 && BB_PARTITION (e->src) == BB_COLD_PARTITION
1548 && !(e->flags & EDGE_CROSSING)
1549 && JUMP_P (after)
1550 && !any_condjump_p (after)
1551 && (single_succ_edge (bb)->flags & EDGE_CROSSING))
1552 add_reg_note (after, REG_CROSSING_JUMP, NULL_RTX);
1555 /* Now that we've found the spot, do the insertion. */
1556 if (before)
1558 emit_insn_before_noloc (insns, before, bb);
1559 last = prev_nonnote_insn (before);
1561 else
1562 last = emit_insn_after_noloc (insns, after, bb);
1564 if (returnjump_p (last))
1566 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1567 This is not currently a problem because this only happens
1568 for the (single) epilogue, which already has a fallthru edge
1569 to EXIT. */
1571 e = single_succ_edge (bb);
1572 gcc_assert (e->dest == EXIT_BLOCK_PTR
1573 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
1575 e->flags &= ~EDGE_FALLTHRU;
1576 emit_barrier_after (last);
1578 if (before)
1579 delete_insn (before);
1581 else
1582 gcc_assert (!JUMP_P (last));
1585 /* Update the CFG for all queued instructions. */
1587 void
1588 commit_edge_insertions (void)
1590 basic_block bb;
1592 #ifdef ENABLE_CHECKING
1593 verify_flow_info ();
1594 #endif
1596 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1598 edge e;
1599 edge_iterator ei;
1601 FOR_EACH_EDGE (e, ei, bb->succs)
1602 if (e->insns.r)
1603 commit_one_edge_insertion (e);
1608 /* Print out RTL-specific basic block information (live information
1609 at start and end). */
1611 static void
1612 rtl_dump_bb (basic_block bb, FILE *outf, int indent, int flags ATTRIBUTE_UNUSED)
1614 rtx insn;
1615 rtx last;
1616 char *s_indent;
1618 s_indent = (char *) alloca ((size_t) indent + 1);
1619 memset (s_indent, ' ', (size_t) indent);
1620 s_indent[indent] = '\0';
1622 if (df)
1624 df_dump_top (bb, outf);
1625 putc ('\n', outf);
1628 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1629 insn = NEXT_INSN (insn))
1630 print_rtl_single (outf, insn);
1632 if (df)
1634 df_dump_bottom (bb, outf);
1635 putc ('\n', outf);
1640 /* Like print_rtl, but also print out live information for the start of each
1641 basic block. */
1643 void
1644 print_rtl_with_bb (FILE *outf, const_rtx rtx_first)
1646 const_rtx tmp_rtx;
1647 if (rtx_first == 0)
1648 fprintf (outf, "(nil)\n");
1649 else
1651 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1652 int max_uid = get_max_uid ();
1653 basic_block *start = XCNEWVEC (basic_block, max_uid);
1654 basic_block *end = XCNEWVEC (basic_block, max_uid);
1655 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
1657 basic_block bb;
1659 if (df)
1660 df_dump_start (outf);
1662 FOR_EACH_BB_REVERSE (bb)
1664 rtx x;
1666 start[INSN_UID (BB_HEAD (bb))] = bb;
1667 end[INSN_UID (BB_END (bb))] = bb;
1668 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1670 enum bb_state state = IN_MULTIPLE_BB;
1672 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1673 state = IN_ONE_BB;
1674 in_bb_p[INSN_UID (x)] = state;
1676 if (x == BB_END (bb))
1677 break;
1681 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1683 int did_output;
1685 bb = start[INSN_UID (tmp_rtx)];
1686 if (bb != NULL)
1687 dump_bb_info (bb, true, false, dump_flags, ";; ", outf);
1689 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1690 && !NOTE_P (tmp_rtx)
1691 && !BARRIER_P (tmp_rtx))
1692 fprintf (outf, ";; Insn is not within a basic block\n");
1693 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1694 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1696 did_output = print_rtl_single (outf, tmp_rtx);
1698 bb = end[INSN_UID (tmp_rtx)];
1699 if (bb != NULL)
1700 dump_bb_info (bb, false, true, dump_flags, ";; ", outf);
1701 if (did_output)
1702 putc ('\n', outf);
1705 free (start);
1706 free (end);
1707 free (in_bb_p);
1710 if (crtl->epilogue_delay_list != 0)
1712 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1713 for (tmp_rtx = crtl->epilogue_delay_list; tmp_rtx != 0;
1714 tmp_rtx = XEXP (tmp_rtx, 1))
1715 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1719 void
1720 update_br_prob_note (basic_block bb)
1722 rtx note;
1723 if (!JUMP_P (BB_END (bb)))
1724 return;
1725 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
1726 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1727 return;
1728 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1731 /* Get the last insn associated with block BB (that includes barriers and
1732 tablejumps after BB). */
1734 get_last_bb_insn (basic_block bb)
1736 rtx tmp;
1737 rtx end = BB_END (bb);
1739 /* Include any jump table following the basic block. */
1740 if (tablejump_p (end, NULL, &tmp))
1741 end = tmp;
1743 /* Include any barriers that may follow the basic block. */
1744 tmp = next_nonnote_insn_bb (end);
1745 while (tmp && BARRIER_P (tmp))
1747 end = tmp;
1748 tmp = next_nonnote_insn_bb (end);
1751 return end;
1754 /* Verify the CFG and RTL consistency common for both underlying RTL and
1755 cfglayout RTL.
1757 Currently it does following checks:
1759 - overlapping of basic blocks
1760 - insns with wrong BLOCK_FOR_INSN pointers
1761 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1762 - tails of basic blocks (ensure that boundary is necessary)
1763 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1764 and NOTE_INSN_BASIC_BLOCK
1765 - verify that no fall_thru edge crosses hot/cold partition boundaries
1766 - verify that there are no pending RTL branch predictions
1768 In future it can be extended check a lot of other stuff as well
1769 (reachability of basic blocks, life information, etc. etc.). */
1771 static int
1772 rtl_verify_flow_info_1 (void)
1774 rtx x;
1775 int err = 0;
1776 basic_block bb;
1778 /* Check the general integrity of the basic blocks. */
1779 FOR_EACH_BB_REVERSE (bb)
1781 rtx insn;
1783 if (!(bb->flags & BB_RTL))
1785 error ("BB_RTL flag not set for block %d", bb->index);
1786 err = 1;
1789 FOR_BB_INSNS (bb, insn)
1790 if (BLOCK_FOR_INSN (insn) != bb)
1792 error ("insn %d basic block pointer is %d, should be %d",
1793 INSN_UID (insn),
1794 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
1795 bb->index);
1796 err = 1;
1799 for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn))
1800 if (!BARRIER_P (insn)
1801 && BLOCK_FOR_INSN (insn) != NULL)
1803 error ("insn %d in header of bb %d has non-NULL basic block",
1804 INSN_UID (insn), bb->index);
1805 err = 1;
1807 for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
1808 if (!BARRIER_P (insn)
1809 && BLOCK_FOR_INSN (insn) != NULL)
1811 error ("insn %d in footer of bb %d has non-NULL basic block",
1812 INSN_UID (insn), bb->index);
1813 err = 1;
1817 /* Now check the basic blocks (boundaries etc.) */
1818 FOR_EACH_BB_REVERSE (bb)
1820 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1821 edge e, fallthru = NULL;
1822 rtx note;
1823 edge_iterator ei;
1825 if (JUMP_P (BB_END (bb))
1826 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
1827 && EDGE_COUNT (bb->succs) >= 2
1828 && any_condjump_p (BB_END (bb)))
1830 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
1831 && profile_status != PROFILE_ABSENT)
1833 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
1834 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1835 err = 1;
1838 FOR_EACH_EDGE (e, ei, bb->succs)
1840 bool is_crossing;
1842 if (e->flags & EDGE_FALLTHRU)
1843 n_fallthru++, fallthru = e;
1845 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
1846 && e->src != ENTRY_BLOCK_PTR
1847 && e->dest != EXIT_BLOCK_PTR);
1848 if (e->flags & EDGE_CROSSING)
1850 if (!is_crossing)
1852 error ("EDGE_CROSSING incorrectly set across same section");
1853 err = 1;
1855 if (e->flags & EDGE_FALLTHRU)
1857 error ("fallthru edge crosses section boundary (bb %i)",
1858 e->src->index);
1859 err = 1;
1861 if (e->flags & EDGE_EH)
1863 error ("EH edge crosses section boundary (bb %i)",
1864 e->src->index);
1865 err = 1;
1868 else if (is_crossing)
1870 error ("EDGE_CROSSING missing across section boundary");
1871 err = 1;
1874 if ((e->flags & ~(EDGE_DFS_BACK
1875 | EDGE_CAN_FALLTHRU
1876 | EDGE_IRREDUCIBLE_LOOP
1877 | EDGE_LOOP_EXIT
1878 | EDGE_CROSSING
1879 | EDGE_PRESERVE)) == 0)
1880 n_branch++;
1882 if (e->flags & EDGE_ABNORMAL_CALL)
1883 n_call++;
1885 if (e->flags & EDGE_EH)
1886 n_eh++;
1887 else if (e->flags & EDGE_ABNORMAL)
1888 n_abnormal++;
1891 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
1893 error ("missing REG_EH_REGION note in the end of bb %i", bb->index);
1894 err = 1;
1896 if (n_eh > 1)
1898 error ("too many eh edges %i", bb->index);
1899 err = 1;
1901 if (n_branch
1902 && (!JUMP_P (BB_END (bb))
1903 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
1904 || any_condjump_p (BB_END (bb))))))
1906 error ("too many outgoing branch edges from bb %i", bb->index);
1907 err = 1;
1909 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
1911 error ("fallthru edge after unconditional jump %i", bb->index);
1912 err = 1;
1914 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
1916 error ("wrong number of branch edges after unconditional jump %i",
1917 bb->index);
1918 err = 1;
1920 if (n_branch != 1 && any_condjump_p (BB_END (bb))
1921 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
1923 error ("wrong amount of branch edges after conditional jump %i",
1924 bb->index);
1925 err = 1;
1927 if (n_call && !CALL_P (BB_END (bb)))
1929 error ("call edges for non-call insn in bb %i", bb->index);
1930 err = 1;
1932 if (n_abnormal
1933 && (!CALL_P (BB_END (bb)) && n_call != n_abnormal)
1934 && (!JUMP_P (BB_END (bb))
1935 || any_condjump_p (BB_END (bb))
1936 || any_uncondjump_p (BB_END (bb))))
1938 error ("abnormal edges for no purpose in bb %i", bb->index);
1939 err = 1;
1942 for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
1943 /* We may have a barrier inside a basic block before dead code
1944 elimination. There is no BLOCK_FOR_INSN field in a barrier. */
1945 if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
1947 debug_rtx (x);
1948 if (! BLOCK_FOR_INSN (x))
1949 error
1950 ("insn %d inside basic block %d but block_for_insn is NULL",
1951 INSN_UID (x), bb->index);
1952 else
1953 error
1954 ("insn %d inside basic block %d but block_for_insn is %i",
1955 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
1957 err = 1;
1960 /* OK pointers are correct. Now check the header of basic
1961 block. It ought to contain optional CODE_LABEL followed
1962 by NOTE_BASIC_BLOCK. */
1963 x = BB_HEAD (bb);
1964 if (LABEL_P (x))
1966 if (BB_END (bb) == x)
1968 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1969 bb->index);
1970 err = 1;
1973 x = NEXT_INSN (x);
1976 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
1978 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1979 bb->index);
1980 err = 1;
1983 if (BB_END (bb) == x)
1984 /* Do checks for empty blocks here. */
1986 else
1987 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
1989 if (NOTE_INSN_BASIC_BLOCK_P (x))
1991 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
1992 INSN_UID (x), bb->index);
1993 err = 1;
1996 if (x == BB_END (bb))
1997 break;
1999 if (control_flow_insn_p (x))
2001 error ("in basic block %d:", bb->index);
2002 fatal_insn ("flow control insn inside a basic block", x);
2007 /* Clean up. */
2008 return err;
2011 /* Verify the CFG and RTL consistency common for both underlying RTL and
2012 cfglayout RTL.
2014 Currently it does following checks:
2015 - all checks of rtl_verify_flow_info_1
2016 - test head/end pointers
2017 - check that all insns are in the basic blocks
2018 (except the switch handling code, barriers and notes)
2019 - check that all returns are followed by barriers
2020 - check that all fallthru edge points to the adjacent blocks. */
2022 static int
2023 rtl_verify_flow_info (void)
2025 basic_block bb;
2026 int err = rtl_verify_flow_info_1 ();
2027 rtx x;
2028 rtx last_head = get_last_insn ();
2029 basic_block *bb_info;
2030 int num_bb_notes;
2031 const rtx rtx_first = get_insns ();
2032 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
2033 const int max_uid = get_max_uid ();
2035 bb_info = XCNEWVEC (basic_block, max_uid);
2037 FOR_EACH_BB_REVERSE (bb)
2039 edge e;
2040 rtx head = BB_HEAD (bb);
2041 rtx end = BB_END (bb);
2043 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2045 /* Verify the end of the basic block is in the INSN chain. */
2046 if (x == end)
2047 break;
2049 /* And that the code outside of basic blocks has NULL bb field. */
2050 if (!BARRIER_P (x)
2051 && BLOCK_FOR_INSN (x) != NULL)
2053 error ("insn %d outside of basic blocks has non-NULL bb field",
2054 INSN_UID (x));
2055 err = 1;
2059 if (!x)
2061 error ("end insn %d for block %d not found in the insn stream",
2062 INSN_UID (end), bb->index);
2063 err = 1;
2066 /* Work backwards from the end to the head of the basic block
2067 to verify the head is in the RTL chain. */
2068 for (; x != NULL_RTX; x = PREV_INSN (x))
2070 /* While walking over the insn chain, verify insns appear
2071 in only one basic block. */
2072 if (bb_info[INSN_UID (x)] != NULL)
2074 error ("insn %d is in multiple basic blocks (%d and %d)",
2075 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2076 err = 1;
2079 bb_info[INSN_UID (x)] = bb;
2081 if (x == head)
2082 break;
2084 if (!x)
2086 error ("head insn %d for block %d not found in the insn stream",
2087 INSN_UID (head), bb->index);
2088 err = 1;
2091 last_head = PREV_INSN (x);
2093 e = find_fallthru_edge (bb->succs);
2094 if (!e)
2096 rtx insn;
2098 /* Ensure existence of barrier in BB with no fallthru edges. */
2099 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2101 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
2103 error ("missing barrier after block %i", bb->index);
2104 err = 1;
2105 break;
2107 if (BARRIER_P (insn))
2108 break;
2111 else if (e->src != ENTRY_BLOCK_PTR
2112 && e->dest != EXIT_BLOCK_PTR)
2114 rtx insn;
2116 if (e->src->next_bb != e->dest)
2118 error
2119 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2120 e->src->index, e->dest->index);
2121 err = 1;
2123 else
2124 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2125 insn = NEXT_INSN (insn))
2126 if (BARRIER_P (insn) || INSN_P (insn))
2128 error ("verify_flow_info: Incorrect fallthru %i->%i",
2129 e->src->index, e->dest->index);
2130 fatal_insn ("wrong insn in the fallthru edge", insn);
2131 err = 1;
2136 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2138 /* Check that the code before the first basic block has NULL
2139 bb field. */
2140 if (!BARRIER_P (x)
2141 && BLOCK_FOR_INSN (x) != NULL)
2143 error ("insn %d outside of basic blocks has non-NULL bb field",
2144 INSN_UID (x));
2145 err = 1;
2148 free (bb_info);
2150 num_bb_notes = 0;
2151 last_bb_seen = ENTRY_BLOCK_PTR;
2153 for (x = rtx_first; x; x = NEXT_INSN (x))
2155 if (NOTE_INSN_BASIC_BLOCK_P (x))
2157 bb = NOTE_BASIC_BLOCK (x);
2159 num_bb_notes++;
2160 if (bb != last_bb_seen->next_bb)
2161 internal_error ("basic blocks not laid down consecutively");
2163 curr_bb = last_bb_seen = bb;
2166 if (!curr_bb)
2168 switch (GET_CODE (x))
2170 case BARRIER:
2171 case NOTE:
2172 break;
2174 case CODE_LABEL:
2175 /* An addr_vec is placed outside any basic block. */
2176 if (NEXT_INSN (x)
2177 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
2178 x = NEXT_INSN (x);
2180 /* But in any case, non-deletable labels can appear anywhere. */
2181 break;
2183 default:
2184 fatal_insn ("insn outside basic block", x);
2188 if (JUMP_P (x)
2189 && returnjump_p (x) && ! condjump_p (x)
2190 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
2191 fatal_insn ("return not followed by barrier", x);
2192 if (curr_bb && x == BB_END (curr_bb))
2193 curr_bb = NULL;
2196 if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
2197 internal_error
2198 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2199 num_bb_notes, n_basic_blocks);
2201 return err;
2204 /* Assume that the preceding pass has possibly eliminated jump instructions
2205 or converted the unconditional jumps. Eliminate the edges from CFG.
2206 Return true if any edges are eliminated. */
2208 bool
2209 purge_dead_edges (basic_block bb)
2211 edge e;
2212 rtx insn = BB_END (bb), note;
2213 bool purged = false;
2214 bool found;
2215 edge_iterator ei;
2217 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
2219 insn = PREV_INSN (insn);
2220 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
2222 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2223 if (NONJUMP_INSN_P (insn)
2224 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2226 rtx eqnote;
2228 if (! may_trap_p (PATTERN (insn))
2229 || ((eqnote = find_reg_equal_equiv_note (insn))
2230 && ! may_trap_p (XEXP (eqnote, 0))))
2231 remove_note (insn, note);
2234 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2235 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2237 bool remove = false;
2239 /* There are three types of edges we need to handle correctly here: EH
2240 edges, abnormal call EH edges, and abnormal call non-EH edges. The
2241 latter can appear when nonlocal gotos are used. */
2242 if (e->flags & EDGE_ABNORMAL_CALL)
2244 if (!CALL_P (insn))
2245 remove = true;
2246 else if (can_nonlocal_goto (insn))
2248 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2250 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
2252 else
2253 remove = true;
2255 else if (e->flags & EDGE_EH)
2256 remove = !can_throw_internal (insn);
2258 if (remove)
2260 remove_edge (e);
2261 df_set_bb_dirty (bb);
2262 purged = true;
2264 else
2265 ei_next (&ei);
2268 if (JUMP_P (insn))
2270 rtx note;
2271 edge b,f;
2272 edge_iterator ei;
2274 /* We do care only about conditional jumps and simplejumps. */
2275 if (!any_condjump_p (insn)
2276 && !returnjump_p (insn)
2277 && !simplejump_p (insn))
2278 return purged;
2280 /* Branch probability/prediction notes are defined only for
2281 condjumps. We've possibly turned condjump into simplejump. */
2282 if (simplejump_p (insn))
2284 note = find_reg_note (insn, REG_BR_PROB, NULL);
2285 if (note)
2286 remove_note (insn, note);
2287 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2288 remove_note (insn, note);
2291 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2293 /* Avoid abnormal flags to leak from computed jumps turned
2294 into simplejumps. */
2296 e->flags &= ~EDGE_ABNORMAL;
2298 /* See if this edge is one we should keep. */
2299 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2300 /* A conditional jump can fall through into the next
2301 block, so we should keep the edge. */
2303 ei_next (&ei);
2304 continue;
2306 else if (e->dest != EXIT_BLOCK_PTR
2307 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
2308 /* If the destination block is the target of the jump,
2309 keep the edge. */
2311 ei_next (&ei);
2312 continue;
2314 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2315 /* If the destination block is the exit block, and this
2316 instruction is a return, then keep the edge. */
2318 ei_next (&ei);
2319 continue;
2321 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2322 /* Keep the edges that correspond to exceptions thrown by
2323 this instruction and rematerialize the EDGE_ABNORMAL
2324 flag we just cleared above. */
2326 e->flags |= EDGE_ABNORMAL;
2327 ei_next (&ei);
2328 continue;
2331 /* We do not need this edge. */
2332 df_set_bb_dirty (bb);
2333 purged = true;
2334 remove_edge (e);
2337 if (EDGE_COUNT (bb->succs) == 0 || !purged)
2338 return purged;
2340 if (dump_file)
2341 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
2343 if (!optimize)
2344 return purged;
2346 /* Redistribute probabilities. */
2347 if (single_succ_p (bb))
2349 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2350 single_succ_edge (bb)->count = bb->count;
2352 else
2354 note = find_reg_note (insn, REG_BR_PROB, NULL);
2355 if (!note)
2356 return purged;
2358 b = BRANCH_EDGE (bb);
2359 f = FALLTHRU_EDGE (bb);
2360 b->probability = INTVAL (XEXP (note, 0));
2361 f->probability = REG_BR_PROB_BASE - b->probability;
2362 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2363 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2366 return purged;
2368 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
2370 /* First, there should not be any EH or ABCALL edges resulting
2371 from non-local gotos and the like. If there were, we shouldn't
2372 have created the sibcall in the first place. Second, there
2373 should of course never have been a fallthru edge. */
2374 gcc_assert (single_succ_p (bb));
2375 gcc_assert (single_succ_edge (bb)->flags
2376 == (EDGE_SIBCALL | EDGE_ABNORMAL));
2378 return 0;
2381 /* If we don't see a jump insn, we don't know exactly why the block would
2382 have been broken at this point. Look for a simple, non-fallthru edge,
2383 as these are only created by conditional branches. If we find such an
2384 edge we know that there used to be a jump here and can then safely
2385 remove all non-fallthru edges. */
2386 found = false;
2387 FOR_EACH_EDGE (e, ei, bb->succs)
2388 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
2390 found = true;
2391 break;
2394 if (!found)
2395 return purged;
2397 /* Remove all but the fake and fallthru edges. The fake edge may be
2398 the only successor for this block in the case of noreturn
2399 calls. */
2400 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2402 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
2404 df_set_bb_dirty (bb);
2405 remove_edge (e);
2406 purged = true;
2408 else
2409 ei_next (&ei);
2412 gcc_assert (single_succ_p (bb));
2414 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2415 single_succ_edge (bb)->count = bb->count;
2417 if (dump_file)
2418 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
2419 bb->index);
2420 return purged;
2423 /* Search all basic blocks for potentially dead edges and purge them. Return
2424 true if some edge has been eliminated. */
2426 bool
2427 purge_all_dead_edges (void)
2429 int purged = false;
2430 basic_block bb;
2432 FOR_EACH_BB (bb)
2434 bool purged_here = purge_dead_edges (bb);
2436 purged |= purged_here;
2439 return purged;
2442 /* This is used by a few passes that emit some instructions after abnormal
2443 calls, moving the basic block's end, while they in fact do want to emit
2444 them on the fallthru edge. Look for abnormal call edges, find backward
2445 the call in the block and insert the instructions on the edge instead.
2447 Similarly, handle instructions throwing exceptions internally.
2449 Return true when instructions have been found and inserted on edges. */
2451 bool
2452 fixup_abnormal_edges (void)
2454 bool inserted = false;
2455 basic_block bb;
2457 FOR_EACH_BB (bb)
2459 edge e;
2460 edge_iterator ei;
2462 /* Look for cases we are interested in - calls or instructions causing
2463 exceptions. */
2464 FOR_EACH_EDGE (e, ei, bb->succs)
2465 if ((e->flags & EDGE_ABNORMAL_CALL)
2466 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
2467 == (EDGE_ABNORMAL | EDGE_EH)))
2468 break;
2470 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
2472 rtx insn;
2474 /* Get past the new insns generated. Allow notes, as the insns
2475 may be already deleted. */
2476 insn = BB_END (bb);
2477 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
2478 && !can_throw_internal (insn)
2479 && insn != BB_HEAD (bb))
2480 insn = PREV_INSN (insn);
2482 if (CALL_P (insn) || can_throw_internal (insn))
2484 rtx stop, next;
2486 e = find_fallthru_edge (bb->succs);
2488 stop = NEXT_INSN (BB_END (bb));
2489 BB_END (bb) = insn;
2491 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
2493 next = NEXT_INSN (insn);
2494 if (INSN_P (insn))
2496 delete_insn (insn);
2498 /* Sometimes there's still the return value USE.
2499 If it's placed after a trapping call (i.e. that
2500 call is the last insn anyway), we have no fallthru
2501 edge. Simply delete this use and don't try to insert
2502 on the non-existent edge. */
2503 if (GET_CODE (PATTERN (insn)) != USE)
2505 /* We're not deleting it, we're moving it. */
2506 INSN_DELETED_P (insn) = 0;
2507 PREV_INSN (insn) = NULL_RTX;
2508 NEXT_INSN (insn) = NULL_RTX;
2510 insert_insn_on_edge (insn, e);
2511 inserted = true;
2514 else if (!BARRIER_P (insn))
2515 set_block_for_insn (insn, NULL);
2519 /* It may be that we don't find any trapping insn. In this
2520 case we discovered quite late that the insn that had been
2521 marked as can_throw_internal in fact couldn't trap at all.
2522 So we should in fact delete the EH edges out of the block. */
2523 else
2524 purge_dead_edges (bb);
2528 return inserted;
2531 /* Same as split_block but update cfg_layout structures. */
2533 static basic_block
2534 cfg_layout_split_block (basic_block bb, void *insnp)
2536 rtx insn = (rtx) insnp;
2537 basic_block new_bb = rtl_split_block (bb, insn);
2539 new_bb->il.rtl->footer = bb->il.rtl->footer;
2540 bb->il.rtl->footer = NULL;
2542 return new_bb;
2545 /* Redirect Edge to DEST. */
2546 static edge
2547 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
2549 basic_block src = e->src;
2550 edge ret;
2552 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
2553 return NULL;
2555 if (e->dest == dest)
2556 return e;
2558 if (e->src != ENTRY_BLOCK_PTR
2559 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
2561 df_set_bb_dirty (src);
2562 return ret;
2565 if (e->src == ENTRY_BLOCK_PTR
2566 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
2568 if (dump_file)
2569 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
2570 e->src->index, dest->index);
2572 df_set_bb_dirty (e->src);
2573 redirect_edge_succ (e, dest);
2574 return e;
2577 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
2578 in the case the basic block appears to be in sequence. Avoid this
2579 transformation. */
2581 if (e->flags & EDGE_FALLTHRU)
2583 /* Redirect any branch edges unified with the fallthru one. */
2584 if (JUMP_P (BB_END (src))
2585 && label_is_jump_target_p (BB_HEAD (e->dest),
2586 BB_END (src)))
2588 edge redirected;
2590 if (dump_file)
2591 fprintf (dump_file, "Fallthru edge unified with branch "
2592 "%i->%i redirected to %i\n",
2593 e->src->index, e->dest->index, dest->index);
2594 e->flags &= ~EDGE_FALLTHRU;
2595 redirected = redirect_branch_edge (e, dest);
2596 gcc_assert (redirected);
2597 redirected->flags |= EDGE_FALLTHRU;
2598 df_set_bb_dirty (redirected->src);
2599 return redirected;
2601 /* In case we are redirecting fallthru edge to the branch edge
2602 of conditional jump, remove it. */
2603 if (EDGE_COUNT (src->succs) == 2)
2605 /* Find the edge that is different from E. */
2606 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
2608 if (s->dest == dest
2609 && any_condjump_p (BB_END (src))
2610 && onlyjump_p (BB_END (src)))
2611 delete_insn (BB_END (src));
2613 if (dump_file)
2614 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
2615 e->src->index, e->dest->index, dest->index);
2616 ret = redirect_edge_succ_nodup (e, dest);
2618 else
2619 ret = redirect_branch_edge (e, dest);
2621 /* We don't want simplejumps in the insn stream during cfglayout. */
2622 gcc_assert (!simplejump_p (BB_END (src)));
2624 df_set_bb_dirty (src);
2625 return ret;
2628 /* Simple wrapper as we always can redirect fallthru edges. */
2629 static basic_block
2630 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
2632 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
2634 gcc_assert (redirected);
2635 return NULL;
2638 /* Same as delete_basic_block but update cfg_layout structures. */
2640 static void
2641 cfg_layout_delete_block (basic_block bb)
2643 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
2645 if (bb->il.rtl->header)
2647 next = BB_HEAD (bb);
2648 if (prev)
2649 NEXT_INSN (prev) = bb->il.rtl->header;
2650 else
2651 set_first_insn (bb->il.rtl->header);
2652 PREV_INSN (bb->il.rtl->header) = prev;
2653 insn = bb->il.rtl->header;
2654 while (NEXT_INSN (insn))
2655 insn = NEXT_INSN (insn);
2656 NEXT_INSN (insn) = next;
2657 PREV_INSN (next) = insn;
2659 next = NEXT_INSN (BB_END (bb));
2660 if (bb->il.rtl->footer)
2662 insn = bb->il.rtl->footer;
2663 while (insn)
2665 if (BARRIER_P (insn))
2667 if (PREV_INSN (insn))
2668 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2669 else
2670 bb->il.rtl->footer = NEXT_INSN (insn);
2671 if (NEXT_INSN (insn))
2672 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2674 if (LABEL_P (insn))
2675 break;
2676 insn = NEXT_INSN (insn);
2678 if (bb->il.rtl->footer)
2680 insn = BB_END (bb);
2681 NEXT_INSN (insn) = bb->il.rtl->footer;
2682 PREV_INSN (bb->il.rtl->footer) = insn;
2683 while (NEXT_INSN (insn))
2684 insn = NEXT_INSN (insn);
2685 NEXT_INSN (insn) = next;
2686 if (next)
2687 PREV_INSN (next) = insn;
2688 else
2689 set_last_insn (insn);
2692 if (bb->next_bb != EXIT_BLOCK_PTR)
2693 to = &bb->next_bb->il.rtl->header;
2694 else
2695 to = &cfg_layout_function_footer;
2697 rtl_delete_block (bb);
2699 if (prev)
2700 prev = NEXT_INSN (prev);
2701 else
2702 prev = get_insns ();
2703 if (next)
2704 next = PREV_INSN (next);
2705 else
2706 next = get_last_insn ();
2708 if (next && NEXT_INSN (next) != prev)
2710 remaints = unlink_insn_chain (prev, next);
2711 insn = remaints;
2712 while (NEXT_INSN (insn))
2713 insn = NEXT_INSN (insn);
2714 NEXT_INSN (insn) = *to;
2715 if (*to)
2716 PREV_INSN (*to) = insn;
2717 *to = remaints;
2721 /* Return true when blocks A and B can be safely merged. */
2723 static bool
2724 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
2726 /* If we are partitioning hot/cold basic blocks, we don't want to
2727 mess up unconditional or indirect jumps that cross between hot
2728 and cold sections.
2730 Basic block partitioning may result in some jumps that appear to
2731 be optimizable (or blocks that appear to be mergeable), but which really
2732 must be left untouched (they are required to make it safely across
2733 partition boundaries). See the comments at the top of
2734 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
2736 if (BB_PARTITION (a) != BB_PARTITION (b))
2737 return false;
2739 /* If we would end up moving B's instructions, make sure it doesn't fall
2740 through into the exit block, since we cannot recover from a fallthrough
2741 edge into the exit block occurring in the middle of a function. */
2742 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
2744 edge e = find_fallthru_edge (b->succs);
2745 if (e && e->dest == EXIT_BLOCK_PTR)
2746 return false;
2749 /* There must be exactly one edge in between the blocks. */
2750 return (single_succ_p (a)
2751 && single_succ (a) == b
2752 && single_pred_p (b) == 1
2753 && a != b
2754 /* Must be simple edge. */
2755 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
2756 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
2757 /* If the jump insn has side effects, we can't kill the edge.
2758 When not optimizing, try_redirect_by_replacing_jump will
2759 not allow us to redirect an edge by replacing a table jump. */
2760 && (!JUMP_P (BB_END (a))
2761 || ((!optimize || reload_completed)
2762 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
2765 /* Merge block A and B. The blocks must be mergeable. */
2767 static void
2768 cfg_layout_merge_blocks (basic_block a, basic_block b)
2770 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
2772 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
2774 if (dump_file)
2775 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
2776 a->index);
2778 /* If there was a CODE_LABEL beginning B, delete it. */
2779 if (LABEL_P (BB_HEAD (b)))
2781 delete_insn (BB_HEAD (b));
2784 /* We should have fallthru edge in a, or we can do dummy redirection to get
2785 it cleaned up. */
2786 if (JUMP_P (BB_END (a)))
2787 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
2788 gcc_assert (!JUMP_P (BB_END (a)));
2790 /* When not optimizing and the edge is the only place in RTL which holds
2791 some unique locus, emit a nop with that locus in between. */
2792 if (!optimize && EDGE_SUCC (a, 0)->goto_locus)
2794 rtx insn = BB_END (a), end = PREV_INSN (BB_HEAD (a));
2795 int goto_locus = EDGE_SUCC (a, 0)->goto_locus;
2797 while (insn != end && (!INSN_P (insn) || INSN_LOCATOR (insn) == 0))
2798 insn = PREV_INSN (insn);
2799 if (insn != end && locator_eq (INSN_LOCATOR (insn), goto_locus))
2800 goto_locus = 0;
2801 else
2803 insn = BB_HEAD (b);
2804 end = NEXT_INSN (BB_END (b));
2805 while (insn != end && !INSN_P (insn))
2806 insn = NEXT_INSN (insn);
2807 if (insn != end && INSN_LOCATOR (insn) != 0
2808 && locator_eq (INSN_LOCATOR (insn), goto_locus))
2809 goto_locus = 0;
2811 if (goto_locus)
2813 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
2814 INSN_LOCATOR (BB_END (a)) = goto_locus;
2818 /* Possible line number notes should appear in between. */
2819 if (b->il.rtl->header)
2821 rtx first = BB_END (a), last;
2823 last = emit_insn_after_noloc (b->il.rtl->header, BB_END (a), a);
2824 delete_insn_chain (NEXT_INSN (first), last, false);
2825 b->il.rtl->header = NULL;
2828 /* In the case basic blocks are not adjacent, move them around. */
2829 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
2831 rtx first = unlink_insn_chain (BB_HEAD (b), BB_END (b));
2833 emit_insn_after_noloc (first, BB_END (a), a);
2834 /* Skip possible DELETED_LABEL insn. */
2835 if (!NOTE_INSN_BASIC_BLOCK_P (first))
2836 first = NEXT_INSN (first);
2837 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (first));
2838 BB_HEAD (b) = NULL;
2840 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
2841 We need to explicitly call. */
2842 update_bb_for_insn_chain (NEXT_INSN (first),
2843 BB_END (b),
2846 delete_insn (first);
2848 /* Otherwise just re-associate the instructions. */
2849 else
2851 rtx insn;
2853 update_bb_for_insn_chain (BB_HEAD (b), BB_END (b), a);
2855 insn = BB_HEAD (b);
2856 /* Skip possible DELETED_LABEL insn. */
2857 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
2858 insn = NEXT_INSN (insn);
2859 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
2860 BB_HEAD (b) = NULL;
2861 BB_END (a) = BB_END (b);
2862 delete_insn (insn);
2865 df_bb_delete (b->index);
2867 /* Possible tablejumps and barriers should appear after the block. */
2868 if (b->il.rtl->footer)
2870 if (!a->il.rtl->footer)
2871 a->il.rtl->footer = b->il.rtl->footer;
2872 else
2874 rtx last = a->il.rtl->footer;
2876 while (NEXT_INSN (last))
2877 last = NEXT_INSN (last);
2878 NEXT_INSN (last) = b->il.rtl->footer;
2879 PREV_INSN (b->il.rtl->footer) = last;
2881 b->il.rtl->footer = NULL;
2884 /* If B was a forwarder block, propagate the locus on the edge. */
2885 if (forwarder_p && !EDGE_SUCC (b, 0)->goto_locus)
2886 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
2888 if (dump_file)
2889 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
2892 /* Split edge E. */
2894 static basic_block
2895 cfg_layout_split_edge (edge e)
2897 basic_block new_bb =
2898 create_basic_block (e->src != ENTRY_BLOCK_PTR
2899 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
2900 NULL_RTX, e->src);
2902 if (e->dest == EXIT_BLOCK_PTR)
2903 BB_COPY_PARTITION (new_bb, e->src);
2904 else
2905 BB_COPY_PARTITION (new_bb, e->dest);
2906 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
2907 redirect_edge_and_branch_force (e, new_bb);
2909 return new_bb;
2912 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
2914 static void
2915 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
2919 /* Return 1 if BB ends with a call, possibly followed by some
2920 instructions that must stay with the call, 0 otherwise. */
2922 static bool
2923 rtl_block_ends_with_call_p (basic_block bb)
2925 rtx insn = BB_END (bb);
2927 while (!CALL_P (insn)
2928 && insn != BB_HEAD (bb)
2929 && (keep_with_call_p (insn)
2930 || NOTE_P (insn)
2931 || DEBUG_INSN_P (insn)))
2932 insn = PREV_INSN (insn);
2933 return (CALL_P (insn));
2936 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
2938 static bool
2939 rtl_block_ends_with_condjump_p (const_basic_block bb)
2941 return any_condjump_p (BB_END (bb));
2944 /* Return true if we need to add fake edge to exit.
2945 Helper function for rtl_flow_call_edges_add. */
2947 static bool
2948 need_fake_edge_p (const_rtx insn)
2950 if (!INSN_P (insn))
2951 return false;
2953 if ((CALL_P (insn)
2954 && !SIBLING_CALL_P (insn)
2955 && !find_reg_note (insn, REG_NORETURN, NULL)
2956 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
2957 return true;
2959 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2960 && MEM_VOLATILE_P (PATTERN (insn)))
2961 || (GET_CODE (PATTERN (insn)) == PARALLEL
2962 && asm_noperands (insn) != -1
2963 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
2964 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2967 /* Add fake edges to the function exit for any non constant and non noreturn
2968 calls, volatile inline assembly in the bitmap of blocks specified by
2969 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
2970 that were split.
2972 The goal is to expose cases in which entering a basic block does not imply
2973 that all subsequent instructions must be executed. */
2975 static int
2976 rtl_flow_call_edges_add (sbitmap blocks)
2978 int i;
2979 int blocks_split = 0;
2980 int last_bb = last_basic_block;
2981 bool check_last_block = false;
2983 if (n_basic_blocks == NUM_FIXED_BLOCKS)
2984 return 0;
2986 if (! blocks)
2987 check_last_block = true;
2988 else
2989 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
2991 /* In the last basic block, before epilogue generation, there will be
2992 a fallthru edge to EXIT. Special care is required if the last insn
2993 of the last basic block is a call because make_edge folds duplicate
2994 edges, which would result in the fallthru edge also being marked
2995 fake, which would result in the fallthru edge being removed by
2996 remove_fake_edges, which would result in an invalid CFG.
2998 Moreover, we can't elide the outgoing fake edge, since the block
2999 profiler needs to take this into account in order to solve the minimal
3000 spanning tree in the case that the call doesn't return.
3002 Handle this by adding a dummy instruction in a new last basic block. */
3003 if (check_last_block)
3005 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
3006 rtx insn = BB_END (bb);
3008 /* Back up past insns that must be kept in the same block as a call. */
3009 while (insn != BB_HEAD (bb)
3010 && keep_with_call_p (insn))
3011 insn = PREV_INSN (insn);
3013 if (need_fake_edge_p (insn))
3015 edge e;
3017 e = find_edge (bb, EXIT_BLOCK_PTR);
3018 if (e)
3020 insert_insn_on_edge (gen_use (const0_rtx), e);
3021 commit_edge_insertions ();
3026 /* Now add fake edges to the function exit for any non constant
3027 calls since there is no way that we can determine if they will
3028 return or not... */
3030 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
3032 basic_block bb = BASIC_BLOCK (i);
3033 rtx insn;
3034 rtx prev_insn;
3036 if (!bb)
3037 continue;
3039 if (blocks && !TEST_BIT (blocks, i))
3040 continue;
3042 for (insn = BB_END (bb); ; insn = prev_insn)
3044 prev_insn = PREV_INSN (insn);
3045 if (need_fake_edge_p (insn))
3047 edge e;
3048 rtx split_at_insn = insn;
3050 /* Don't split the block between a call and an insn that should
3051 remain in the same block as the call. */
3052 if (CALL_P (insn))
3053 while (split_at_insn != BB_END (bb)
3054 && keep_with_call_p (NEXT_INSN (split_at_insn)))
3055 split_at_insn = NEXT_INSN (split_at_insn);
3057 /* The handling above of the final block before the epilogue
3058 should be enough to verify that there is no edge to the exit
3059 block in CFG already. Calling make_edge in such case would
3060 cause us to mark that edge as fake and remove it later. */
3062 #ifdef ENABLE_CHECKING
3063 if (split_at_insn == BB_END (bb))
3065 e = find_edge (bb, EXIT_BLOCK_PTR);
3066 gcc_assert (e == NULL);
3068 #endif
3070 /* Note that the following may create a new basic block
3071 and renumber the existing basic blocks. */
3072 if (split_at_insn != BB_END (bb))
3074 e = split_block (bb, split_at_insn);
3075 if (e)
3076 blocks_split++;
3079 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
3082 if (insn == BB_HEAD (bb))
3083 break;
3087 if (blocks_split)
3088 verify_flow_info ();
3090 return blocks_split;
3093 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
3094 the conditional branch target, SECOND_HEAD should be the fall-thru
3095 there is no need to handle this here the loop versioning code handles
3096 this. the reason for SECON_HEAD is that it is needed for condition
3097 in trees, and this should be of the same type since it is a hook. */
3098 static void
3099 rtl_lv_add_condition_to_bb (basic_block first_head ,
3100 basic_block second_head ATTRIBUTE_UNUSED,
3101 basic_block cond_bb, void *comp_rtx)
3103 rtx label, seq, jump;
3104 rtx op0 = XEXP ((rtx)comp_rtx, 0);
3105 rtx op1 = XEXP ((rtx)comp_rtx, 1);
3106 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
3107 enum machine_mode mode;
3110 label = block_label (first_head);
3111 mode = GET_MODE (op0);
3112 if (mode == VOIDmode)
3113 mode = GET_MODE (op1);
3115 start_sequence ();
3116 op0 = force_operand (op0, NULL_RTX);
3117 op1 = force_operand (op1, NULL_RTX);
3118 do_compare_rtx_and_jump (op0, op1, comp, 0,
3119 mode, NULL_RTX, NULL_RTX, label, -1);
3120 jump = get_last_insn ();
3121 JUMP_LABEL (jump) = label;
3122 LABEL_NUSES (label)++;
3123 seq = get_insns ();
3124 end_sequence ();
3126 /* Add the new cond , in the new head. */
3127 emit_insn_after(seq, BB_END(cond_bb));
3131 /* Given a block B with unconditional branch at its end, get the
3132 store the return the branch edge and the fall-thru edge in
3133 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
3134 static void
3135 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
3136 edge *fallthru_edge)
3138 edge e = EDGE_SUCC (b, 0);
3140 if (e->flags & EDGE_FALLTHRU)
3142 *fallthru_edge = e;
3143 *branch_edge = EDGE_SUCC (b, 1);
3145 else
3147 *branch_edge = e;
3148 *fallthru_edge = EDGE_SUCC (b, 1);
3152 void
3153 init_rtl_bb_info (basic_block bb)
3155 gcc_assert (!bb->il.rtl);
3156 bb->il.rtl = ggc_alloc_cleared_rtl_bb_info ();
3159 /* Returns true if it is possible to remove edge E by redirecting
3160 it to the destination of the other edge from E->src. */
3162 static bool
3163 rtl_can_remove_branch_p (const_edge e)
3165 const_basic_block src = e->src;
3166 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
3167 const_rtx insn = BB_END (src), set;
3169 /* The conditions are taken from try_redirect_by_replacing_jump. */
3170 if (target == EXIT_BLOCK_PTR)
3171 return false;
3173 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
3174 return false;
3176 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
3177 || BB_PARTITION (src) != BB_PARTITION (target))
3178 return false;
3180 if (!onlyjump_p (insn)
3181 || tablejump_p (insn, NULL, NULL))
3182 return false;
3184 set = single_set (insn);
3185 if (!set || side_effects_p (set))
3186 return false;
3188 return true;
3191 /* We do not want to declare these functions in a header file, since they
3192 should only be used through the cfghooks interface, and we do not want to
3193 move them here since it would require also moving quite a lot of related
3194 code. They are in cfglayout.c. */
3195 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block);
3196 extern basic_block cfg_layout_duplicate_bb (basic_block);
3198 static basic_block
3199 rtl_duplicate_bb (basic_block bb)
3201 bb = cfg_layout_duplicate_bb (bb);
3202 bb->aux = NULL;
3203 return bb;
3206 /* Implementation of CFG manipulation for linearized RTL. */
3207 struct cfg_hooks rtl_cfg_hooks = {
3208 "rtl",
3209 rtl_verify_flow_info,
3210 rtl_dump_bb,
3211 rtl_create_basic_block,
3212 rtl_redirect_edge_and_branch,
3213 rtl_redirect_edge_and_branch_force,
3214 rtl_can_remove_branch_p,
3215 rtl_delete_block,
3216 rtl_split_block,
3217 rtl_move_block_after,
3218 rtl_can_merge_blocks, /* can_merge_blocks_p */
3219 rtl_merge_blocks,
3220 rtl_predict_edge,
3221 rtl_predicted_by_p,
3222 cfg_layout_can_duplicate_bb_p,
3223 rtl_duplicate_bb,
3224 rtl_split_edge,
3225 rtl_make_forwarder_block,
3226 rtl_tidy_fallthru_edge,
3227 rtl_force_nonfallthru,
3228 rtl_block_ends_with_call_p,
3229 rtl_block_ends_with_condjump_p,
3230 rtl_flow_call_edges_add,
3231 NULL, /* execute_on_growing_pred */
3232 NULL, /* execute_on_shrinking_pred */
3233 NULL, /* duplicate loop for trees */
3234 NULL, /* lv_add_condition_to_bb */
3235 NULL, /* lv_adjust_loop_header_phi*/
3236 NULL, /* extract_cond_bb_edges */
3237 NULL /* flush_pending_stmts */
3240 /* Implementation of CFG manipulation for cfg layout RTL, where
3241 basic block connected via fallthru edges does not have to be adjacent.
3242 This representation will hopefully become the default one in future
3243 version of the compiler. */
3245 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
3246 "cfglayout mode",
3247 rtl_verify_flow_info_1,
3248 rtl_dump_bb,
3249 cfg_layout_create_basic_block,
3250 cfg_layout_redirect_edge_and_branch,
3251 cfg_layout_redirect_edge_and_branch_force,
3252 rtl_can_remove_branch_p,
3253 cfg_layout_delete_block,
3254 cfg_layout_split_block,
3255 rtl_move_block_after,
3256 cfg_layout_can_merge_blocks_p,
3257 cfg_layout_merge_blocks,
3258 rtl_predict_edge,
3259 rtl_predicted_by_p,
3260 cfg_layout_can_duplicate_bb_p,
3261 cfg_layout_duplicate_bb,
3262 cfg_layout_split_edge,
3263 rtl_make_forwarder_block,
3264 NULL, /* tidy_fallthru_edge */
3265 rtl_force_nonfallthru,
3266 rtl_block_ends_with_call_p,
3267 rtl_block_ends_with_condjump_p,
3268 rtl_flow_call_edges_add,
3269 NULL, /* execute_on_growing_pred */
3270 NULL, /* execute_on_shrinking_pred */
3271 duplicate_loop_to_header_edge, /* duplicate loop for trees */
3272 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
3273 NULL, /* lv_adjust_loop_header_phi*/
3274 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
3275 NULL /* flush_pending_stmts */