Fix a bug that broke -freorder-functions
[official-gcc.git] / gcc / cfgrtl.c
blobb60041ad9c89205dafb0be8b9ca745b3d8e54ed0
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
25 Available functionality:
26 - Basic CFG/RTL manipulation API documented in cfghooks.h
27 - CFG-aware instruction chain manipulation
28 delete_insn, delete_insn_chain
29 - Edge splitting and committing to edges
30 insert_insn_on_edge, commit_edge_insertions
31 - CFG updating after insn simplification
32 purge_dead_edges, purge_all_dead_edges
33 - CFG fixing after coarse manipulation
34 fixup_abnormal_edges
36 Functions not supposed for generic use:
37 - Infrastructure to determine quickly basic block for insn
38 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
39 - Edge redirection with updating and optimizing of insn chain
40 block_label, tidy_fallthru_edge, force_nonfallthru */
42 #include "config.h"
43 #include "system.h"
44 #include "coretypes.h"
45 #include "tm.h"
46 #include "tree.h"
47 #include "hard-reg-set.h"
48 #include "basic-block.h"
49 #include "regs.h"
50 #include "flags.h"
51 #include "output.h"
52 #include "function.h"
53 #include "except.h"
54 #include "rtl-error.h"
55 #include "tm_p.h"
56 #include "obstack.h"
57 #include "insn-attr.h"
58 #include "insn-config.h"
59 #include "cfglayout.h"
60 #include "expr.h"
61 #include "target.h"
62 #include "common/common-target.h"
63 #include "cfgloop.h"
64 #include "ggc.h"
65 #include "tree-pass.h"
66 #include "df.h"
68 static int can_delete_note_p (const_rtx);
69 static int can_delete_label_p (const_rtx);
70 static basic_block rtl_split_edge (edge);
71 static bool rtl_move_block_after (basic_block, basic_block);
72 static int rtl_verify_flow_info (void);
73 static basic_block cfg_layout_split_block (basic_block, void *);
74 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
75 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
76 static void cfg_layout_delete_block (basic_block);
77 static void rtl_delete_block (basic_block);
78 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
79 static edge rtl_redirect_edge_and_branch (edge, basic_block);
80 static basic_block rtl_split_block (basic_block, void *);
81 static void rtl_dump_bb (basic_block, FILE *, int, int);
82 static int rtl_verify_flow_info_1 (void);
83 static void rtl_make_forwarder_block (edge);
85 /* Return true if NOTE is not one of the ones that must be kept paired,
86 so that we may simply delete it. */
88 static int
89 can_delete_note_p (const_rtx note)
91 switch (NOTE_KIND (note))
93 case NOTE_INSN_DELETED:
94 case NOTE_INSN_BASIC_BLOCK:
95 case NOTE_INSN_EPILOGUE_BEG:
96 return true;
98 default:
99 return false;
103 /* True if a given label can be deleted. */
105 static int
106 can_delete_label_p (const_rtx label)
108 return (!LABEL_PRESERVE_P (label)
109 /* User declared labels must be preserved. */
110 && LABEL_NAME (label) == 0
111 && !in_expr_list_p (forced_labels, label));
114 /* Delete INSN by patching it out. Return the next insn. */
117 delete_insn (rtx insn)
119 rtx next = NEXT_INSN (insn);
120 rtx note;
121 bool really_delete = true;
123 if (LABEL_P (insn))
125 /* Some labels can't be directly removed from the INSN chain, as they
126 might be references via variables, constant pool etc.
127 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
128 if (! can_delete_label_p (insn))
130 const char *name = LABEL_NAME (insn);
132 really_delete = false;
133 PUT_CODE (insn, NOTE);
134 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
135 NOTE_DELETED_LABEL_NAME (insn) = name;
138 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
141 if (really_delete)
143 /* If this insn has already been deleted, something is very wrong. */
144 gcc_assert (!INSN_DELETED_P (insn));
145 remove_insn (insn);
146 INSN_DELETED_P (insn) = 1;
149 /* If deleting a jump, decrement the use count of the label. Deleting
150 the label itself should happen in the normal course of block merging. */
151 if (JUMP_P (insn))
153 if (JUMP_LABEL (insn)
154 && LABEL_P (JUMP_LABEL (insn)))
155 LABEL_NUSES (JUMP_LABEL (insn))--;
157 /* If there are more targets, remove them too. */
158 while ((note
159 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
160 && LABEL_P (XEXP (note, 0)))
162 LABEL_NUSES (XEXP (note, 0))--;
163 remove_note (insn, note);
167 /* Also if deleting any insn that references a label as an operand. */
168 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
169 && LABEL_P (XEXP (note, 0)))
171 LABEL_NUSES (XEXP (note, 0))--;
172 remove_note (insn, note);
175 if (JUMP_TABLE_DATA_P (insn))
177 rtx pat = PATTERN (insn);
178 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
179 int len = XVECLEN (pat, diff_vec_p);
180 int i;
182 for (i = 0; i < len; i++)
184 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
186 /* When deleting code in bulk (e.g. removing many unreachable
187 blocks) we can delete a label that's a target of the vector
188 before deleting the vector itself. */
189 if (!NOTE_P (label))
190 LABEL_NUSES (label)--;
194 return next;
197 /* Like delete_insn but also purge dead edges from BB. */
200 delete_insn_and_edges (rtx insn)
202 rtx x;
203 bool purge = false;
205 if (INSN_P (insn)
206 && BLOCK_FOR_INSN (insn)
207 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
208 purge = true;
209 x = delete_insn (insn);
210 if (purge)
211 purge_dead_edges (BLOCK_FOR_INSN (insn));
212 return x;
215 /* Unlink a chain of insns between START and FINISH, leaving notes
216 that must be paired. If CLEAR_BB is true, we set bb field for
217 insns that cannot be removed to NULL. */
219 void
220 delete_insn_chain (rtx start, rtx finish, bool clear_bb)
222 rtx next;
224 /* Unchain the insns one by one. It would be quicker to delete all of these
225 with a single unchaining, rather than one at a time, but we need to keep
226 the NOTE's. */
227 while (1)
229 next = NEXT_INSN (start);
230 if (NOTE_P (start) && !can_delete_note_p (start))
232 else
233 next = delete_insn (start);
235 if (clear_bb && !INSN_DELETED_P (start))
236 set_block_for_insn (start, NULL);
238 if (start == finish)
239 break;
240 start = next;
244 /* Create a new basic block consisting of the instructions between HEAD and END
245 inclusive. This function is designed to allow fast BB construction - reuses
246 the note and basic block struct in BB_NOTE, if any and do not grow
247 BASIC_BLOCK chain and should be used directly only by CFG construction code.
248 END can be NULL in to create new empty basic block before HEAD. Both END
249 and HEAD can be NULL to create basic block at the end of INSN chain.
250 AFTER is the basic block we should be put after. */
252 basic_block
253 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
255 basic_block bb;
257 if (bb_note
258 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
259 && bb->aux == NULL)
261 /* If we found an existing note, thread it back onto the chain. */
263 rtx after;
265 if (LABEL_P (head))
266 after = head;
267 else
269 after = PREV_INSN (head);
270 head = bb_note;
273 if (after != bb_note && NEXT_INSN (after) != bb_note)
274 reorder_insns_nobb (bb_note, bb_note, after);
276 else
278 /* Otherwise we must create a note and a basic block structure. */
280 bb = alloc_block ();
282 init_rtl_bb_info (bb);
283 if (!head && !end)
284 head = end = bb_note
285 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
286 else if (LABEL_P (head) && end)
288 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
289 if (head == end)
290 end = bb_note;
292 else
294 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
295 head = bb_note;
296 if (!end)
297 end = head;
300 NOTE_BASIC_BLOCK (bb_note) = bb;
303 /* Always include the bb note in the block. */
304 if (NEXT_INSN (end) == bb_note)
305 end = bb_note;
307 BB_HEAD (bb) = head;
308 BB_END (bb) = end;
309 bb->index = last_basic_block++;
310 bb->flags = BB_NEW | BB_RTL;
311 link_block (bb, after);
312 SET_BASIC_BLOCK (bb->index, bb);
313 df_bb_refs_record (bb->index, false);
314 update_bb_for_insn (bb);
315 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
317 /* Tag the block so that we know it has been used when considering
318 other basic block notes. */
319 bb->aux = bb;
321 return bb;
324 /* Create new basic block consisting of instructions in between HEAD and END
325 and place it to the BB chain after block AFTER. END can be NULL in to
326 create new empty basic block before HEAD. Both END and HEAD can be NULL to
327 create basic block at the end of INSN chain. */
329 static basic_block
330 rtl_create_basic_block (void *headp, void *endp, basic_block after)
332 rtx head = (rtx) headp, end = (rtx) endp;
333 basic_block bb;
335 /* Grow the basic block array if needed. */
336 if ((size_t) last_basic_block >= VEC_length (basic_block, basic_block_info))
338 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
339 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
342 n_basic_blocks++;
344 bb = create_basic_block_structure (head, end, NULL, after);
345 bb->aux = NULL;
346 return bb;
349 static basic_block
350 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
352 basic_block newbb = rtl_create_basic_block (head, end, after);
354 return newbb;
357 /* Delete the insns in a (non-live) block. We physically delete every
358 non-deleted-note insn, and update the flow graph appropriately.
360 Return nonzero if we deleted an exception handler. */
362 /* ??? Preserving all such notes strikes me as wrong. It would be nice
363 to post-process the stream to remove empty blocks, loops, ranges, etc. */
365 static void
366 rtl_delete_block (basic_block b)
368 rtx insn, end;
370 /* If the head of this block is a CODE_LABEL, then it might be the
371 label for an exception handler which can't be reached. We need
372 to remove the label from the exception_handler_label list. */
373 insn = BB_HEAD (b);
375 end = get_last_bb_insn (b);
377 /* Selectively delete the entire chain. */
378 BB_HEAD (b) = NULL;
379 delete_insn_chain (insn, end, true);
382 if (dump_file)
383 fprintf (dump_file, "deleting block %d\n", b->index);
384 df_bb_delete (b->index);
387 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
389 void
390 compute_bb_for_insn (void)
392 basic_block bb;
394 FOR_EACH_BB (bb)
396 rtx end = BB_END (bb);
397 rtx insn;
399 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
401 BLOCK_FOR_INSN (insn) = bb;
402 if (insn == end)
403 break;
408 /* Release the basic_block_for_insn array. */
410 unsigned int
411 free_bb_for_insn (void)
413 rtx insn;
414 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
415 if (!BARRIER_P (insn))
416 BLOCK_FOR_INSN (insn) = NULL;
417 return 0;
420 static unsigned int
421 rest_of_pass_free_cfg (void)
423 #ifdef DELAY_SLOTS
424 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
425 valid at that point so it would be too late to call df_analyze. */
426 if (optimize > 0 && flag_delayed_branch)
428 df_note_add_problem ();
429 df_analyze ();
431 #endif
433 free_bb_for_insn ();
434 return 0;
437 struct rtl_opt_pass pass_free_cfg =
440 RTL_PASS,
441 "*free_cfg", /* name */
442 NULL, /* gate */
443 rest_of_pass_free_cfg, /* execute */
444 NULL, /* sub */
445 NULL, /* next */
446 0, /* static_pass_number */
447 TV_NONE, /* tv_id */
448 0, /* properties_required */
449 0, /* properties_provided */
450 PROP_cfg, /* properties_destroyed */
451 0, /* todo_flags_start */
452 0, /* todo_flags_finish */
456 /* Return RTX to emit after when we want to emit code on the entry of function. */
458 entry_of_function (void)
460 return (n_basic_blocks > NUM_FIXED_BLOCKS ?
461 BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
464 /* Emit INSN at the entry point of the function, ensuring that it is only
465 executed once per function. */
466 void
467 emit_insn_at_entry (rtx insn)
469 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
470 edge e = ei_safe_edge (ei);
471 gcc_assert (e->flags & EDGE_FALLTHRU);
473 insert_insn_on_edge (insn, e);
474 commit_edge_insertions ();
477 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
478 (or BARRIER if found) and notify df of the bb change.
479 The insn chain range is inclusive
480 (i.e. both BEGIN and END will be updated. */
482 static void
483 update_bb_for_insn_chain (rtx begin, rtx end, basic_block bb)
485 rtx insn;
487 end = NEXT_INSN (end);
488 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
489 if (!BARRIER_P (insn))
490 df_insn_change_bb (insn, bb);
493 /* Update BLOCK_FOR_INSN of insns in BB to BB,
494 and notify df of the change. */
496 void
497 update_bb_for_insn (basic_block bb)
499 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
503 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
504 note associated with the BLOCK. */
506 static rtx
507 first_insn_after_basic_block_note (basic_block block)
509 rtx insn;
511 /* Get the first instruction in the block. */
512 insn = BB_HEAD (block);
514 if (insn == NULL_RTX)
515 return NULL_RTX;
516 if (LABEL_P (insn))
517 insn = NEXT_INSN (insn);
518 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
520 return NEXT_INSN (insn);
523 /* Creates a new basic block just after basic block B by splitting
524 everything after specified instruction I. */
526 static basic_block
527 rtl_split_block (basic_block bb, void *insnp)
529 basic_block new_bb;
530 rtx insn = (rtx) insnp;
531 edge e;
532 edge_iterator ei;
534 if (!insn)
536 insn = first_insn_after_basic_block_note (bb);
538 if (insn)
540 rtx next = insn;
542 insn = PREV_INSN (insn);
544 /* If the block contains only debug insns, insn would have
545 been NULL in a non-debug compilation, and then we'd end
546 up emitting a DELETED note. For -fcompare-debug
547 stability, emit the note too. */
548 if (insn != BB_END (bb)
549 && DEBUG_INSN_P (next)
550 && DEBUG_INSN_P (BB_END (bb)))
552 while (next != BB_END (bb) && DEBUG_INSN_P (next))
553 next = NEXT_INSN (next);
555 if (next == BB_END (bb))
556 emit_note_after (NOTE_INSN_DELETED, next);
559 else
560 insn = get_last_insn ();
563 /* We probably should check type of the insn so that we do not create
564 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
565 bother. */
566 if (insn == BB_END (bb))
567 emit_note_after (NOTE_INSN_DELETED, insn);
569 /* Create the new basic block. */
570 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
571 BB_COPY_PARTITION (new_bb, bb);
572 BB_END (bb) = insn;
574 /* Redirect the outgoing edges. */
575 new_bb->succs = bb->succs;
576 bb->succs = NULL;
577 FOR_EACH_EDGE (e, ei, new_bb->succs)
578 e->src = new_bb;
580 /* The new block starts off being dirty. */
581 df_set_bb_dirty (bb);
582 return new_bb;
585 /* Blocks A and B are to be merged into a single block A. The insns
586 are already contiguous. */
588 static void
589 rtl_merge_blocks (basic_block a, basic_block b)
591 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
592 rtx del_first = NULL_RTX, del_last = NULL_RTX;
593 rtx b_debug_start = b_end, b_debug_end = b_end;
594 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
595 int b_empty = 0;
597 if (dump_file)
598 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
599 a->index);
601 while (DEBUG_INSN_P (b_end))
602 b_end = PREV_INSN (b_debug_start = b_end);
604 /* If there was a CODE_LABEL beginning B, delete it. */
605 if (LABEL_P (b_head))
607 /* Detect basic blocks with nothing but a label. This can happen
608 in particular at the end of a function. */
609 if (b_head == b_end)
610 b_empty = 1;
612 del_first = del_last = b_head;
613 b_head = NEXT_INSN (b_head);
616 /* Delete the basic block note and handle blocks containing just that
617 note. */
618 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
620 if (b_head == b_end)
621 b_empty = 1;
622 if (! del_last)
623 del_first = b_head;
625 del_last = b_head;
626 b_head = NEXT_INSN (b_head);
629 /* If there was a jump out of A, delete it. */
630 if (JUMP_P (a_end))
632 rtx prev;
634 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
635 if (!NOTE_P (prev)
636 || NOTE_INSN_BASIC_BLOCK_P (prev)
637 || prev == BB_HEAD (a))
638 break;
640 del_first = a_end;
642 #ifdef HAVE_cc0
643 /* If this was a conditional jump, we need to also delete
644 the insn that set cc0. */
645 if (only_sets_cc0_p (prev))
647 rtx tmp = prev;
649 prev = prev_nonnote_insn (prev);
650 if (!prev)
651 prev = BB_HEAD (a);
652 del_first = tmp;
654 #endif
656 a_end = PREV_INSN (del_first);
658 else if (BARRIER_P (NEXT_INSN (a_end)))
659 del_first = NEXT_INSN (a_end);
661 /* Delete everything marked above as well as crap that might be
662 hanging out between the two blocks. */
663 BB_HEAD (b) = NULL;
664 delete_insn_chain (del_first, del_last, true);
666 /* Reassociate the insns of B with A. */
667 if (!b_empty)
669 update_bb_for_insn_chain (a_end, b_debug_end, a);
671 a_end = b_debug_end;
673 else if (b_end != b_debug_end)
675 /* Move any deleted labels and other notes between the end of A
676 and the debug insns that make up B after the debug insns,
677 bringing the debug insns into A while keeping the notes after
678 the end of A. */
679 if (NEXT_INSN (a_end) != b_debug_start)
680 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
681 b_debug_end);
682 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
683 a_end = b_debug_end;
686 df_bb_delete (b->index);
687 BB_END (a) = a_end;
689 /* If B was a forwarder block, propagate the locus on the edge. */
690 if (forwarder_p && !EDGE_SUCC (b, 0)->goto_locus)
691 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
693 if (dump_file)
694 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
698 /* Return true when block A and B can be merged. */
700 static bool
701 rtl_can_merge_blocks (basic_block a, basic_block b)
703 /* If we are partitioning hot/cold basic blocks, we don't want to
704 mess up unconditional or indirect jumps that cross between hot
705 and cold sections.
707 Basic block partitioning may result in some jumps that appear to
708 be optimizable (or blocks that appear to be mergeable), but which really
709 must be left untouched (they are required to make it safely across
710 partition boundaries). See the comments at the top of
711 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
713 if (BB_PARTITION (a) != BB_PARTITION (b))
714 return false;
716 /* There must be exactly one edge in between the blocks. */
717 return (single_succ_p (a)
718 && single_succ (a) == b
719 && single_pred_p (b)
720 && a != b
721 /* Must be simple edge. */
722 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
723 && a->next_bb == b
724 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
725 /* If the jump insn has side effects,
726 we can't kill the edge. */
727 && (!JUMP_P (BB_END (a))
728 || (reload_completed
729 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
732 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
733 exist. */
736 block_label (basic_block block)
738 if (block == EXIT_BLOCK_PTR)
739 return NULL_RTX;
741 if (!LABEL_P (BB_HEAD (block)))
743 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
746 return BB_HEAD (block);
749 /* Attempt to perform edge redirection by replacing possibly complex jump
750 instruction by unconditional jump or removing jump completely. This can
751 apply only if all edges now point to the same block. The parameters and
752 return values are equivalent to redirect_edge_and_branch. */
754 edge
755 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
757 basic_block src = e->src;
758 rtx insn = BB_END (src), kill_from;
759 rtx set;
760 int fallthru = 0;
762 /* If we are partitioning hot/cold basic blocks, we don't want to
763 mess up unconditional or indirect jumps that cross between hot
764 and cold sections.
766 Basic block partitioning may result in some jumps that appear to
767 be optimizable (or blocks that appear to be mergeable), but which really
768 must be left untouched (they are required to make it safely across
769 partition boundaries). See the comments at the top of
770 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
772 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
773 || BB_PARTITION (src) != BB_PARTITION (target))
774 return NULL;
776 /* We can replace or remove a complex jump only when we have exactly
777 two edges. Also, if we have exactly one outgoing edge, we can
778 redirect that. */
779 if (EDGE_COUNT (src->succs) >= 3
780 /* Verify that all targets will be TARGET. Specifically, the
781 edge that is not E must also go to TARGET. */
782 || (EDGE_COUNT (src->succs) == 2
783 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
784 return NULL;
786 if (!onlyjump_p (insn))
787 return NULL;
788 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
789 return NULL;
791 /* Avoid removing branch with side effects. */
792 set = single_set (insn);
793 if (!set || side_effects_p (set))
794 return NULL;
796 /* In case we zap a conditional jump, we'll need to kill
797 the cc0 setter too. */
798 kill_from = insn;
799 #ifdef HAVE_cc0
800 if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
801 && only_sets_cc0_p (PREV_INSN (insn)))
802 kill_from = PREV_INSN (insn);
803 #endif
805 /* See if we can create the fallthru edge. */
806 if (in_cfglayout || can_fallthru (src, target))
808 if (dump_file)
809 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
810 fallthru = 1;
812 /* Selectively unlink whole insn chain. */
813 if (in_cfglayout)
815 rtx insn = src->il.rtl->footer;
817 delete_insn_chain (kill_from, BB_END (src), false);
819 /* Remove barriers but keep jumptables. */
820 while (insn)
822 if (BARRIER_P (insn))
824 if (PREV_INSN (insn))
825 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
826 else
827 src->il.rtl->footer = NEXT_INSN (insn);
828 if (NEXT_INSN (insn))
829 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
831 if (LABEL_P (insn))
832 break;
833 insn = NEXT_INSN (insn);
836 else
837 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
838 false);
841 /* If this already is simplejump, redirect it. */
842 else if (simplejump_p (insn))
844 if (e->dest == target)
845 return NULL;
846 if (dump_file)
847 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
848 INSN_UID (insn), e->dest->index, target->index);
849 if (!redirect_jump (insn, block_label (target), 0))
851 gcc_assert (target == EXIT_BLOCK_PTR);
852 return NULL;
856 /* Cannot do anything for target exit block. */
857 else if (target == EXIT_BLOCK_PTR)
858 return NULL;
860 /* Or replace possibly complicated jump insn by simple jump insn. */
861 else
863 rtx target_label = block_label (target);
864 rtx barrier, label, table;
866 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
867 JUMP_LABEL (BB_END (src)) = target_label;
868 LABEL_NUSES (target_label)++;
869 if (dump_file)
870 fprintf (dump_file, "Replacing insn %i by jump %i\n",
871 INSN_UID (insn), INSN_UID (BB_END (src)));
874 delete_insn_chain (kill_from, insn, false);
876 /* Recognize a tablejump that we are converting to a
877 simple jump and remove its associated CODE_LABEL
878 and ADDR_VEC or ADDR_DIFF_VEC. */
879 if (tablejump_p (insn, &label, &table))
880 delete_insn_chain (label, table, false);
882 barrier = next_nonnote_insn (BB_END (src));
883 if (!barrier || !BARRIER_P (barrier))
884 emit_barrier_after (BB_END (src));
885 else
887 if (barrier != NEXT_INSN (BB_END (src)))
889 /* Move the jump before barrier so that the notes
890 which originally were or were created before jump table are
891 inside the basic block. */
892 rtx new_insn = BB_END (src);
894 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
895 PREV_INSN (barrier), src);
897 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
898 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
900 NEXT_INSN (new_insn) = barrier;
901 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
903 PREV_INSN (new_insn) = PREV_INSN (barrier);
904 PREV_INSN (barrier) = new_insn;
909 /* Keep only one edge out and set proper flags. */
910 if (!single_succ_p (src))
911 remove_edge (e);
912 gcc_assert (single_succ_p (src));
914 e = single_succ_edge (src);
915 if (fallthru)
916 e->flags = EDGE_FALLTHRU;
917 else
918 e->flags = 0;
920 e->probability = REG_BR_PROB_BASE;
921 e->count = src->count;
923 if (e->dest != target)
924 redirect_edge_succ (e, target);
925 return e;
928 /* Subroutine of redirect_branch_edge that tries to patch the jump
929 instruction INSN so that it reaches block NEW. Do this
930 only when it originally reached block OLD. Return true if this
931 worked or the original target wasn't OLD, return false if redirection
932 doesn't work. */
934 static bool
935 patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
937 rtx tmp;
938 /* Recognize a tablejump and adjust all matching cases. */
939 if (tablejump_p (insn, NULL, &tmp))
941 rtvec vec;
942 int j;
943 rtx new_label = block_label (new_bb);
945 if (new_bb == EXIT_BLOCK_PTR)
946 return false;
947 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
948 vec = XVEC (PATTERN (tmp), 0);
949 else
950 vec = XVEC (PATTERN (tmp), 1);
952 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
953 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
955 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
956 --LABEL_NUSES (old_label);
957 ++LABEL_NUSES (new_label);
960 /* Handle casesi dispatch insns. */
961 if ((tmp = single_set (insn)) != NULL
962 && SET_DEST (tmp) == pc_rtx
963 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
964 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
965 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
967 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
968 new_label);
969 --LABEL_NUSES (old_label);
970 ++LABEL_NUSES (new_label);
973 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
975 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
976 rtx new_label, note;
978 if (new_bb == EXIT_BLOCK_PTR)
979 return false;
980 new_label = block_label (new_bb);
982 for (i = 0; i < n; ++i)
984 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
985 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
986 if (XEXP (old_ref, 0) == old_label)
988 ASM_OPERANDS_LABEL (tmp, i)
989 = gen_rtx_LABEL_REF (Pmode, new_label);
990 --LABEL_NUSES (old_label);
991 ++LABEL_NUSES (new_label);
995 if (JUMP_LABEL (insn) == old_label)
997 JUMP_LABEL (insn) = new_label;
998 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
999 if (note)
1000 remove_note (insn, note);
1002 else
1004 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1005 if (note)
1006 remove_note (insn, note);
1007 if (JUMP_LABEL (insn) != new_label
1008 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1009 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1011 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1012 != NULL_RTX)
1013 XEXP (note, 0) = new_label;
1015 else
1017 /* ?? We may play the games with moving the named labels from
1018 one basic block to the other in case only one computed_jump is
1019 available. */
1020 if (computed_jump_p (insn)
1021 /* A return instruction can't be redirected. */
1022 || returnjump_p (insn))
1023 return false;
1025 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1027 /* If the insn doesn't go where we think, we're confused. */
1028 gcc_assert (JUMP_LABEL (insn) == old_label);
1030 /* If the substitution doesn't succeed, die. This can happen
1031 if the back end emitted unrecognizable instructions or if
1032 target is exit block on some arches. */
1033 if (!redirect_jump (insn, block_label (new_bb), 0))
1035 gcc_assert (new_bb == EXIT_BLOCK_PTR);
1036 return false;
1040 return true;
1044 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1045 NULL on failure */
1046 static edge
1047 redirect_branch_edge (edge e, basic_block target)
1049 rtx old_label = BB_HEAD (e->dest);
1050 basic_block src = e->src;
1051 rtx insn = BB_END (src);
1053 /* We can only redirect non-fallthru edges of jump insn. */
1054 if (e->flags & EDGE_FALLTHRU)
1055 return NULL;
1056 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1057 return NULL;
1059 if (!currently_expanding_to_rtl)
1061 if (!patch_jump_insn (insn, old_label, target))
1062 return NULL;
1064 else
1065 /* When expanding this BB might actually contain multiple
1066 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1067 Redirect all of those that match our label. */
1068 FOR_BB_INSNS (src, insn)
1069 if (JUMP_P (insn) && !patch_jump_insn (insn, old_label, target))
1070 return NULL;
1072 if (dump_file)
1073 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1074 e->src->index, e->dest->index, target->index);
1076 if (e->dest != target)
1077 e = redirect_edge_succ_nodup (e, target);
1079 return e;
1082 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
1083 expense of adding new instructions or reordering basic blocks.
1085 Function can be also called with edge destination equivalent to the TARGET.
1086 Then it should try the simplifications and do nothing if none is possible.
1088 Return edge representing the branch if transformation succeeded. Return NULL
1089 on failure.
1090 We still return NULL in case E already destinated TARGET and we didn't
1091 managed to simplify instruction stream. */
1093 static edge
1094 rtl_redirect_edge_and_branch (edge e, basic_block target)
1096 edge ret;
1097 basic_block src = e->src;
1099 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1100 return NULL;
1102 if (e->dest == target)
1103 return e;
1105 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1107 df_set_bb_dirty (src);
1108 return ret;
1111 ret = redirect_branch_edge (e, target);
1112 if (!ret)
1113 return NULL;
1115 df_set_bb_dirty (src);
1116 return ret;
1119 /* Like force_nonfallthru below, but additionally performs redirection
1120 Used by redirect_edge_and_branch_force. */
1122 basic_block
1123 force_nonfallthru_and_redirect (edge e, basic_block target)
1125 basic_block jump_block, new_bb = NULL, src = e->src;
1126 rtx note;
1127 edge new_edge;
1128 int abnormal_edge_flags = 0;
1129 int loc;
1131 /* In the case the last instruction is conditional jump to the next
1132 instruction, first redirect the jump itself and then continue
1133 by creating a basic block afterwards to redirect fallthru edge. */
1134 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
1135 && any_condjump_p (BB_END (e->src))
1136 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1138 rtx note;
1139 edge b = unchecked_make_edge (e->src, target, 0);
1140 bool redirected;
1142 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1143 gcc_assert (redirected);
1145 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1146 if (note)
1148 int prob = INTVAL (XEXP (note, 0));
1150 b->probability = prob;
1151 b->count = e->count * prob / REG_BR_PROB_BASE;
1152 e->probability -= e->probability;
1153 e->count -= b->count;
1154 if (e->probability < 0)
1155 e->probability = 0;
1156 if (e->count < 0)
1157 e->count = 0;
1161 if (e->flags & EDGE_ABNORMAL)
1163 /* Irritating special case - fallthru edge to the same block as abnormal
1164 edge.
1165 We can't redirect abnormal edge, but we still can split the fallthru
1166 one and create separate abnormal edge to original destination.
1167 This allows bb-reorder to make such edge non-fallthru. */
1168 gcc_assert (e->dest == target);
1169 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
1170 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
1172 else
1174 gcc_assert (e->flags & EDGE_FALLTHRU);
1175 if (e->src == ENTRY_BLOCK_PTR)
1177 /* We can't redirect the entry block. Create an empty block
1178 at the start of the function which we use to add the new
1179 jump. */
1180 edge tmp;
1181 edge_iterator ei;
1182 bool found = false;
1184 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
1186 /* Change the existing edge's source to be the new block, and add
1187 a new edge from the entry block to the new block. */
1188 e->src = bb;
1189 for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1191 if (tmp == e)
1193 VEC_unordered_remove (edge, ENTRY_BLOCK_PTR->succs, ei.index);
1194 found = true;
1195 break;
1197 else
1198 ei_next (&ei);
1201 gcc_assert (found);
1203 VEC_safe_push (edge, gc, bb->succs, e);
1204 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1208 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags)
1210 /* Create the new structures. */
1212 /* If the old block ended with a tablejump, skip its table
1213 by searching forward from there. Otherwise start searching
1214 forward from the last instruction of the old block. */
1215 if (!tablejump_p (BB_END (e->src), NULL, &note))
1216 note = BB_END (e->src);
1217 note = NEXT_INSN (note);
1219 jump_block = create_basic_block (note, NULL, e->src);
1220 jump_block->count = e->count;
1221 jump_block->frequency = EDGE_FREQUENCY (e);
1222 jump_block->loop_depth = target->loop_depth;
1224 /* Make sure new block ends up in correct hot/cold section. */
1226 BB_COPY_PARTITION (jump_block, e->src);
1227 if (flag_reorder_blocks_and_partition
1228 && targetm_common.have_named_sections
1229 && JUMP_P (BB_END (jump_block))
1230 && !any_condjump_p (BB_END (jump_block))
1231 && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
1232 add_reg_note (BB_END (jump_block), REG_CROSSING_JUMP, NULL_RTX);
1234 /* Wire edge in. */
1235 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1236 new_edge->probability = e->probability;
1237 new_edge->count = e->count;
1239 /* Redirect old edge. */
1240 redirect_edge_pred (e, jump_block);
1241 e->probability = REG_BR_PROB_BASE;
1243 new_bb = jump_block;
1245 else
1246 jump_block = e->src;
1248 if (e->goto_locus && e->goto_block == NULL)
1249 loc = e->goto_locus;
1250 else
1251 loc = 0;
1252 e->flags &= ~EDGE_FALLTHRU;
1253 if (target == EXIT_BLOCK_PTR)
1255 #ifdef HAVE_return
1256 emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
1257 JUMP_LABEL (BB_END (jump_block)) = ret_rtx;
1258 #else
1259 gcc_unreachable ();
1260 #endif
1262 else
1264 rtx label = block_label (target);
1265 emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
1266 JUMP_LABEL (BB_END (jump_block)) = label;
1267 LABEL_NUSES (label)++;
1270 emit_barrier_after (BB_END (jump_block));
1271 redirect_edge_succ_nodup (e, target);
1273 if (abnormal_edge_flags)
1274 make_edge (src, target, abnormal_edge_flags);
1276 df_mark_solutions_dirty ();
1277 return new_bb;
1280 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1281 (and possibly create new basic block) to make edge non-fallthru.
1282 Return newly created BB or NULL if none. */
1284 static basic_block
1285 rtl_force_nonfallthru (edge e)
1287 return force_nonfallthru_and_redirect (e, e->dest);
1290 /* Redirect edge even at the expense of creating new jump insn or
1291 basic block. Return new basic block if created, NULL otherwise.
1292 Conversion must be possible. */
1294 static basic_block
1295 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1297 if (redirect_edge_and_branch (e, target)
1298 || e->dest == target)
1299 return NULL;
1301 /* In case the edge redirection failed, try to force it to be non-fallthru
1302 and redirect newly created simplejump. */
1303 df_set_bb_dirty (e->src);
1304 return force_nonfallthru_and_redirect (e, target);
1307 /* The given edge should potentially be a fallthru edge. If that is in
1308 fact true, delete the jump and barriers that are in the way. */
1310 static void
1311 rtl_tidy_fallthru_edge (edge e)
1313 rtx q;
1314 basic_block b = e->src, c = b->next_bb;
1316 /* ??? In a late-running flow pass, other folks may have deleted basic
1317 blocks by nopping out blocks, leaving multiple BARRIERs between here
1318 and the target label. They ought to be chastised and fixed.
1320 We can also wind up with a sequence of undeletable labels between
1321 one block and the next.
1323 So search through a sequence of barriers, labels, and notes for
1324 the head of block C and assert that we really do fall through. */
1326 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1327 if (INSN_P (q))
1328 return;
1330 /* Remove what will soon cease being the jump insn from the source block.
1331 If block B consisted only of this single jump, turn it into a deleted
1332 note. */
1333 q = BB_END (b);
1334 if (JUMP_P (q)
1335 && onlyjump_p (q)
1336 && (any_uncondjump_p (q)
1337 || single_succ_p (b)))
1339 #ifdef HAVE_cc0
1340 /* If this was a conditional jump, we need to also delete
1341 the insn that set cc0. */
1342 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1343 q = PREV_INSN (q);
1344 #endif
1346 q = PREV_INSN (q);
1349 /* Selectively unlink the sequence. */
1350 if (q != PREV_INSN (BB_HEAD (c)))
1351 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1353 e->flags |= EDGE_FALLTHRU;
1356 /* Should move basic block BB after basic block AFTER. NIY. */
1358 static bool
1359 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1360 basic_block after ATTRIBUTE_UNUSED)
1362 return false;
1365 /* Split a (typically critical) edge. Return the new block.
1366 The edge must not be abnormal.
1368 ??? The code generally expects to be called on critical edges.
1369 The case of a block ending in an unconditional jump to a
1370 block with multiple predecessors is not handled optimally. */
1372 static basic_block
1373 rtl_split_edge (edge edge_in)
1375 basic_block bb;
1376 rtx before;
1378 /* Abnormal edges cannot be split. */
1379 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1381 /* We are going to place the new block in front of edge destination.
1382 Avoid existence of fallthru predecessors. */
1383 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1385 edge e = find_fallthru_edge (edge_in->dest->preds);
1387 if (e)
1388 force_nonfallthru (e);
1391 /* Create the basic block note. */
1392 if (edge_in->dest != EXIT_BLOCK_PTR)
1393 before = BB_HEAD (edge_in->dest);
1394 else
1395 before = NULL_RTX;
1397 /* If this is a fall through edge to the exit block, the blocks might be
1398 not adjacent, and the right place is the after the source. */
1399 if (edge_in->flags & EDGE_FALLTHRU && edge_in->dest == EXIT_BLOCK_PTR)
1401 before = NEXT_INSN (BB_END (edge_in->src));
1402 bb = create_basic_block (before, NULL, edge_in->src);
1403 BB_COPY_PARTITION (bb, edge_in->src);
1405 else
1407 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1408 /* ??? Why not edge_in->dest->prev_bb here? */
1409 BB_COPY_PARTITION (bb, edge_in->dest);
1412 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1414 /* For non-fallthru edges, we must adjust the predecessor's
1415 jump instruction to target our new block. */
1416 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1418 edge redirected = redirect_edge_and_branch (edge_in, bb);
1419 gcc_assert (redirected);
1421 else
1423 if (edge_in->src != ENTRY_BLOCK_PTR)
1425 /* For asm goto even splitting of fallthru edge might
1426 need insn patching, as other labels might point to the
1427 old label. */
1428 rtx last = BB_END (edge_in->src);
1429 if (last
1430 && JUMP_P (last)
1431 && edge_in->dest != EXIT_BLOCK_PTR
1432 && extract_asm_operands (PATTERN (last)) != NULL_RTX
1433 && patch_jump_insn (last, before, bb))
1434 df_set_bb_dirty (edge_in->src);
1436 redirect_edge_succ (edge_in, bb);
1439 return bb;
1442 /* Queue instructions for insertion on an edge between two basic blocks.
1443 The new instructions and basic blocks (if any) will not appear in the
1444 CFG until commit_edge_insertions is called. */
1446 void
1447 insert_insn_on_edge (rtx pattern, edge e)
1449 /* We cannot insert instructions on an abnormal critical edge.
1450 It will be easier to find the culprit if we die now. */
1451 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1453 if (e->insns.r == NULL_RTX)
1454 start_sequence ();
1455 else
1456 push_to_sequence (e->insns.r);
1458 emit_insn (pattern);
1460 e->insns.r = get_insns ();
1461 end_sequence ();
1464 /* Update the CFG for the instructions queued on edge E. */
1466 void
1467 commit_one_edge_insertion (edge e)
1469 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1470 basic_block bb;
1472 /* Pull the insns off the edge now since the edge might go away. */
1473 insns = e->insns.r;
1474 e->insns.r = NULL_RTX;
1476 /* Figure out where to put these insns. If the destination has
1477 one predecessor, insert there. Except for the exit block. */
1478 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
1480 bb = e->dest;
1482 /* Get the location correct wrt a code label, and "nice" wrt
1483 a basic block note, and before everything else. */
1484 tmp = BB_HEAD (bb);
1485 if (LABEL_P (tmp))
1486 tmp = NEXT_INSN (tmp);
1487 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1488 tmp = NEXT_INSN (tmp);
1489 if (tmp == BB_HEAD (bb))
1490 before = tmp;
1491 else if (tmp)
1492 after = PREV_INSN (tmp);
1493 else
1494 after = get_last_insn ();
1497 /* If the source has one successor and the edge is not abnormal,
1498 insert there. Except for the entry block. */
1499 else if ((e->flags & EDGE_ABNORMAL) == 0
1500 && single_succ_p (e->src)
1501 && e->src != ENTRY_BLOCK_PTR)
1503 bb = e->src;
1505 /* It is possible to have a non-simple jump here. Consider a target
1506 where some forms of unconditional jumps clobber a register. This
1507 happens on the fr30 for example.
1509 We know this block has a single successor, so we can just emit
1510 the queued insns before the jump. */
1511 if (JUMP_P (BB_END (bb)))
1512 before = BB_END (bb);
1513 else
1515 /* We'd better be fallthru, or we've lost track of what's what. */
1516 gcc_assert (e->flags & EDGE_FALLTHRU);
1518 after = BB_END (bb);
1522 /* Otherwise we must split the edge. */
1523 else
1525 bb = split_edge (e);
1526 after = BB_END (bb);
1528 if (flag_reorder_blocks_and_partition
1529 && targetm_common.have_named_sections
1530 && e->src != ENTRY_BLOCK_PTR
1531 && BB_PARTITION (e->src) == BB_COLD_PARTITION
1532 && !(e->flags & EDGE_CROSSING)
1533 && JUMP_P (after)
1534 && !any_condjump_p (after)
1535 && (single_succ_edge (bb)->flags & EDGE_CROSSING))
1536 add_reg_note (after, REG_CROSSING_JUMP, NULL_RTX);
1539 /* Now that we've found the spot, do the insertion. */
1540 if (before)
1542 emit_insn_before_noloc (insns, before, bb);
1543 last = prev_nonnote_insn (before);
1545 else
1546 last = emit_insn_after_noloc (insns, after, bb);
1548 if (returnjump_p (last))
1550 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1551 This is not currently a problem because this only happens
1552 for the (single) epilogue, which already has a fallthru edge
1553 to EXIT. */
1555 e = single_succ_edge (bb);
1556 gcc_assert (e->dest == EXIT_BLOCK_PTR
1557 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
1559 e->flags &= ~EDGE_FALLTHRU;
1560 emit_barrier_after (last);
1562 if (before)
1563 delete_insn (before);
1565 else
1566 gcc_assert (!JUMP_P (last));
1569 /* Update the CFG for all queued instructions. */
1571 void
1572 commit_edge_insertions (void)
1574 basic_block bb;
1576 #ifdef ENABLE_CHECKING
1577 verify_flow_info ();
1578 #endif
1580 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1582 edge e;
1583 edge_iterator ei;
1585 FOR_EACH_EDGE (e, ei, bb->succs)
1586 if (e->insns.r)
1587 commit_one_edge_insertion (e);
1592 /* Print out RTL-specific basic block information (live information
1593 at start and end). */
1595 static void
1596 rtl_dump_bb (basic_block bb, FILE *outf, int indent, int flags ATTRIBUTE_UNUSED)
1598 rtx insn;
1599 rtx last;
1600 char *s_indent;
1602 s_indent = (char *) alloca ((size_t) indent + 1);
1603 memset (s_indent, ' ', (size_t) indent);
1604 s_indent[indent] = '\0';
1606 if (df)
1608 df_dump_top (bb, outf);
1609 putc ('\n', outf);
1612 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1613 insn = NEXT_INSN (insn))
1614 print_rtl_single (outf, insn);
1616 if (df)
1618 df_dump_bottom (bb, outf);
1619 putc ('\n', outf);
1624 /* Like print_rtl, but also print out live information for the start of each
1625 basic block. */
1627 void
1628 print_rtl_with_bb (FILE *outf, const_rtx rtx_first)
1630 const_rtx tmp_rtx;
1631 if (rtx_first == 0)
1632 fprintf (outf, "(nil)\n");
1633 else
1635 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1636 int max_uid = get_max_uid ();
1637 basic_block *start = XCNEWVEC (basic_block, max_uid);
1638 basic_block *end = XCNEWVEC (basic_block, max_uid);
1639 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
1641 basic_block bb;
1643 if (df)
1644 df_dump_start (outf);
1646 FOR_EACH_BB_REVERSE (bb)
1648 rtx x;
1650 start[INSN_UID (BB_HEAD (bb))] = bb;
1651 end[INSN_UID (BB_END (bb))] = bb;
1652 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1654 enum bb_state state = IN_MULTIPLE_BB;
1656 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1657 state = IN_ONE_BB;
1658 in_bb_p[INSN_UID (x)] = state;
1660 if (x == BB_END (bb))
1661 break;
1665 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1667 int did_output;
1669 bb = start[INSN_UID (tmp_rtx)];
1670 if (bb != NULL)
1671 dump_bb_info (bb, true, false, dump_flags, ";; ", outf);
1673 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1674 && !NOTE_P (tmp_rtx)
1675 && !BARRIER_P (tmp_rtx))
1676 fprintf (outf, ";; Insn is not within a basic block\n");
1677 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1678 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1680 did_output = print_rtl_single (outf, tmp_rtx);
1682 bb = end[INSN_UID (tmp_rtx)];
1683 if (bb != NULL)
1684 dump_bb_info (bb, false, true, dump_flags, ";; ", outf);
1685 if (did_output)
1686 putc ('\n', outf);
1689 free (start);
1690 free (end);
1691 free (in_bb_p);
1694 if (crtl->epilogue_delay_list != 0)
1696 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1697 for (tmp_rtx = crtl->epilogue_delay_list; tmp_rtx != 0;
1698 tmp_rtx = XEXP (tmp_rtx, 1))
1699 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1703 void
1704 update_br_prob_note (basic_block bb)
1706 rtx note;
1707 if (!JUMP_P (BB_END (bb)))
1708 return;
1709 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
1710 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1711 return;
1712 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1715 /* Get the last insn associated with block BB (that includes barriers and
1716 tablejumps after BB). */
1718 get_last_bb_insn (basic_block bb)
1720 rtx tmp;
1721 rtx end = BB_END (bb);
1723 /* Include any jump table following the basic block. */
1724 if (tablejump_p (end, NULL, &tmp))
1725 end = tmp;
1727 /* Include any barriers that may follow the basic block. */
1728 tmp = next_nonnote_insn_bb (end);
1729 while (tmp && BARRIER_P (tmp))
1731 end = tmp;
1732 tmp = next_nonnote_insn_bb (end);
1735 return end;
1738 /* Verify the CFG and RTL consistency common for both underlying RTL and
1739 cfglayout RTL.
1741 Currently it does following checks:
1743 - overlapping of basic blocks
1744 - insns with wrong BLOCK_FOR_INSN pointers
1745 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1746 - tails of basic blocks (ensure that boundary is necessary)
1747 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1748 and NOTE_INSN_BASIC_BLOCK
1749 - verify that no fall_thru edge crosses hot/cold partition boundaries
1750 - verify that there are no pending RTL branch predictions
1752 In future it can be extended check a lot of other stuff as well
1753 (reachability of basic blocks, life information, etc. etc.). */
1755 static int
1756 rtl_verify_flow_info_1 (void)
1758 rtx x;
1759 int err = 0;
1760 basic_block bb;
1762 /* Check the general integrity of the basic blocks. */
1763 FOR_EACH_BB_REVERSE (bb)
1765 rtx insn;
1767 if (!(bb->flags & BB_RTL))
1769 error ("BB_RTL flag not set for block %d", bb->index);
1770 err = 1;
1773 FOR_BB_INSNS (bb, insn)
1774 if (BLOCK_FOR_INSN (insn) != bb)
1776 error ("insn %d basic block pointer is %d, should be %d",
1777 INSN_UID (insn),
1778 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
1779 bb->index);
1780 err = 1;
1783 for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn))
1784 if (!BARRIER_P (insn)
1785 && BLOCK_FOR_INSN (insn) != NULL)
1787 error ("insn %d in header of bb %d has non-NULL basic block",
1788 INSN_UID (insn), bb->index);
1789 err = 1;
1791 for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
1792 if (!BARRIER_P (insn)
1793 && BLOCK_FOR_INSN (insn) != NULL)
1795 error ("insn %d in footer of bb %d has non-NULL basic block",
1796 INSN_UID (insn), bb->index);
1797 err = 1;
1801 /* Now check the basic blocks (boundaries etc.) */
1802 FOR_EACH_BB_REVERSE (bb)
1804 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1805 edge e, fallthru = NULL;
1806 rtx note;
1807 edge_iterator ei;
1809 if (JUMP_P (BB_END (bb))
1810 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
1811 && EDGE_COUNT (bb->succs) >= 2
1812 && any_condjump_p (BB_END (bb)))
1814 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
1815 && profile_status != PROFILE_ABSENT)
1817 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
1818 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1819 err = 1;
1822 FOR_EACH_EDGE (e, ei, bb->succs)
1824 bool is_crossing;
1826 if (e->flags & EDGE_FALLTHRU)
1827 n_fallthru++, fallthru = e;
1829 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
1830 && e->src != ENTRY_BLOCK_PTR
1831 && e->dest != EXIT_BLOCK_PTR);
1832 if (e->flags & EDGE_CROSSING)
1834 if (!is_crossing)
1836 error ("EDGE_CROSSING incorrectly set across same section");
1837 err = 1;
1839 if (e->flags & EDGE_FALLTHRU)
1841 error ("fallthru edge crosses section boundary (bb %i)",
1842 e->src->index);
1843 err = 1;
1845 if (e->flags & EDGE_EH)
1847 error ("EH edge crosses section boundary (bb %i)",
1848 e->src->index);
1849 err = 1;
1852 else if (is_crossing)
1854 error ("EDGE_CROSSING missing across section boundary");
1855 err = 1;
1858 if ((e->flags & ~(EDGE_DFS_BACK
1859 | EDGE_CAN_FALLTHRU
1860 | EDGE_IRREDUCIBLE_LOOP
1861 | EDGE_LOOP_EXIT
1862 | EDGE_CROSSING)) == 0)
1863 n_branch++;
1865 if (e->flags & EDGE_ABNORMAL_CALL)
1866 n_call++;
1868 if (e->flags & EDGE_EH)
1869 n_eh++;
1870 else if (e->flags & EDGE_ABNORMAL)
1871 n_abnormal++;
1874 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
1876 error ("missing REG_EH_REGION note in the end of bb %i", bb->index);
1877 err = 1;
1879 if (n_eh > 1)
1881 error ("too many eh edges %i", bb->index);
1882 err = 1;
1884 if (n_branch
1885 && (!JUMP_P (BB_END (bb))
1886 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
1887 || any_condjump_p (BB_END (bb))))))
1889 error ("too many outgoing branch edges from bb %i", bb->index);
1890 err = 1;
1892 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
1894 error ("fallthru edge after unconditional jump %i", bb->index);
1895 err = 1;
1897 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
1899 error ("wrong number of branch edges after unconditional jump %i",
1900 bb->index);
1901 err = 1;
1903 if (n_branch != 1 && any_condjump_p (BB_END (bb))
1904 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
1906 error ("wrong amount of branch edges after conditional jump %i",
1907 bb->index);
1908 err = 1;
1910 if (n_call && !CALL_P (BB_END (bb)))
1912 error ("call edges for non-call insn in bb %i", bb->index);
1913 err = 1;
1915 if (n_abnormal
1916 && (!CALL_P (BB_END (bb)) && n_call != n_abnormal)
1917 && (!JUMP_P (BB_END (bb))
1918 || any_condjump_p (BB_END (bb))
1919 || any_uncondjump_p (BB_END (bb))))
1921 error ("abnormal edges for no purpose in bb %i", bb->index);
1922 err = 1;
1925 for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
1926 /* We may have a barrier inside a basic block before dead code
1927 elimination. There is no BLOCK_FOR_INSN field in a barrier. */
1928 if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
1930 debug_rtx (x);
1931 if (! BLOCK_FOR_INSN (x))
1932 error
1933 ("insn %d inside basic block %d but block_for_insn is NULL",
1934 INSN_UID (x), bb->index);
1935 else
1936 error
1937 ("insn %d inside basic block %d but block_for_insn is %i",
1938 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
1940 err = 1;
1943 /* OK pointers are correct. Now check the header of basic
1944 block. It ought to contain optional CODE_LABEL followed
1945 by NOTE_BASIC_BLOCK. */
1946 x = BB_HEAD (bb);
1947 if (LABEL_P (x))
1949 if (BB_END (bb) == x)
1951 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1952 bb->index);
1953 err = 1;
1956 x = NEXT_INSN (x);
1959 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
1961 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1962 bb->index);
1963 err = 1;
1966 if (BB_END (bb) == x)
1967 /* Do checks for empty blocks here. */
1969 else
1970 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
1972 if (NOTE_INSN_BASIC_BLOCK_P (x))
1974 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
1975 INSN_UID (x), bb->index);
1976 err = 1;
1979 if (x == BB_END (bb))
1980 break;
1982 if (control_flow_insn_p (x))
1984 error ("in basic block %d:", bb->index);
1985 fatal_insn ("flow control insn inside a basic block", x);
1990 /* Clean up. */
1991 return err;
1994 /* Verify the CFG and RTL consistency common for both underlying RTL and
1995 cfglayout RTL.
1997 Currently it does following checks:
1998 - all checks of rtl_verify_flow_info_1
1999 - test head/end pointers
2000 - check that all insns are in the basic blocks
2001 (except the switch handling code, barriers and notes)
2002 - check that all returns are followed by barriers
2003 - check that all fallthru edge points to the adjacent blocks. */
2005 static int
2006 rtl_verify_flow_info (void)
2008 basic_block bb;
2009 int err = rtl_verify_flow_info_1 ();
2010 rtx x;
2011 rtx last_head = get_last_insn ();
2012 basic_block *bb_info;
2013 int num_bb_notes;
2014 const rtx rtx_first = get_insns ();
2015 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
2016 const int max_uid = get_max_uid ();
2018 bb_info = XCNEWVEC (basic_block, max_uid);
2020 FOR_EACH_BB_REVERSE (bb)
2022 edge e;
2023 rtx head = BB_HEAD (bb);
2024 rtx end = BB_END (bb);
2026 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2028 /* Verify the end of the basic block is in the INSN chain. */
2029 if (x == end)
2030 break;
2032 /* And that the code outside of basic blocks has NULL bb field. */
2033 if (!BARRIER_P (x)
2034 && BLOCK_FOR_INSN (x) != NULL)
2036 error ("insn %d outside of basic blocks has non-NULL bb field",
2037 INSN_UID (x));
2038 err = 1;
2042 if (!x)
2044 error ("end insn %d for block %d not found in the insn stream",
2045 INSN_UID (end), bb->index);
2046 err = 1;
2049 /* Work backwards from the end to the head of the basic block
2050 to verify the head is in the RTL chain. */
2051 for (; x != NULL_RTX; x = PREV_INSN (x))
2053 /* While walking over the insn chain, verify insns appear
2054 in only one basic block. */
2055 if (bb_info[INSN_UID (x)] != NULL)
2057 error ("insn %d is in multiple basic blocks (%d and %d)",
2058 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2059 err = 1;
2062 bb_info[INSN_UID (x)] = bb;
2064 if (x == head)
2065 break;
2067 if (!x)
2069 error ("head insn %d for block %d not found in the insn stream",
2070 INSN_UID (head), bb->index);
2071 err = 1;
2074 last_head = PREV_INSN (x);
2076 e = find_fallthru_edge (bb->succs);
2077 if (!e)
2079 rtx insn;
2081 /* Ensure existence of barrier in BB with no fallthru edges. */
2082 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2084 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
2086 error ("missing barrier after block %i", bb->index);
2087 err = 1;
2088 break;
2090 if (BARRIER_P (insn))
2091 break;
2094 else if (e->src != ENTRY_BLOCK_PTR
2095 && e->dest != EXIT_BLOCK_PTR)
2097 rtx insn;
2099 if (e->src->next_bb != e->dest)
2101 error
2102 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2103 e->src->index, e->dest->index);
2104 err = 1;
2106 else
2107 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2108 insn = NEXT_INSN (insn))
2109 if (BARRIER_P (insn) || INSN_P (insn))
2111 error ("verify_flow_info: Incorrect fallthru %i->%i",
2112 e->src->index, e->dest->index);
2113 fatal_insn ("wrong insn in the fallthru edge", insn);
2114 err = 1;
2119 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2121 /* Check that the code before the first basic block has NULL
2122 bb field. */
2123 if (!BARRIER_P (x)
2124 && BLOCK_FOR_INSN (x) != NULL)
2126 error ("insn %d outside of basic blocks has non-NULL bb field",
2127 INSN_UID (x));
2128 err = 1;
2131 free (bb_info);
2133 num_bb_notes = 0;
2134 last_bb_seen = ENTRY_BLOCK_PTR;
2136 for (x = rtx_first; x; x = NEXT_INSN (x))
2138 if (NOTE_INSN_BASIC_BLOCK_P (x))
2140 bb = NOTE_BASIC_BLOCK (x);
2142 num_bb_notes++;
2143 if (bb != last_bb_seen->next_bb)
2144 internal_error ("basic blocks not laid down consecutively");
2146 curr_bb = last_bb_seen = bb;
2149 if (!curr_bb)
2151 switch (GET_CODE (x))
2153 case BARRIER:
2154 case NOTE:
2155 break;
2157 case CODE_LABEL:
2158 /* An addr_vec is placed outside any basic block. */
2159 if (NEXT_INSN (x)
2160 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
2161 x = NEXT_INSN (x);
2163 /* But in any case, non-deletable labels can appear anywhere. */
2164 break;
2166 default:
2167 fatal_insn ("insn outside basic block", x);
2171 if (JUMP_P (x)
2172 && returnjump_p (x) && ! condjump_p (x)
2173 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
2174 fatal_insn ("return not followed by barrier", x);
2175 if (curr_bb && x == BB_END (curr_bb))
2176 curr_bb = NULL;
2179 if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
2180 internal_error
2181 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2182 num_bb_notes, n_basic_blocks);
2184 return err;
2187 /* Assume that the preceding pass has possibly eliminated jump instructions
2188 or converted the unconditional jumps. Eliminate the edges from CFG.
2189 Return true if any edges are eliminated. */
2191 bool
2192 purge_dead_edges (basic_block bb)
2194 edge e;
2195 rtx insn = BB_END (bb), note;
2196 bool purged = false;
2197 bool found;
2198 edge_iterator ei;
2200 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
2202 insn = PREV_INSN (insn);
2203 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
2205 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2206 if (NONJUMP_INSN_P (insn)
2207 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2209 rtx eqnote;
2211 if (! may_trap_p (PATTERN (insn))
2212 || ((eqnote = find_reg_equal_equiv_note (insn))
2213 && ! may_trap_p (XEXP (eqnote, 0))))
2214 remove_note (insn, note);
2217 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2218 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2220 bool remove = false;
2222 /* There are three types of edges we need to handle correctly here: EH
2223 edges, abnormal call EH edges, and abnormal call non-EH edges. The
2224 latter can appear when nonlocal gotos are used. */
2225 if (e->flags & EDGE_ABNORMAL_CALL)
2227 if (!CALL_P (insn))
2228 remove = true;
2229 else if (can_nonlocal_goto (insn))
2231 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2233 else
2234 remove = true;
2236 else if (e->flags & EDGE_EH)
2237 remove = !can_throw_internal (insn);
2239 if (remove)
2241 remove_edge (e);
2242 df_set_bb_dirty (bb);
2243 purged = true;
2245 else
2246 ei_next (&ei);
2249 if (JUMP_P (insn))
2251 rtx note;
2252 edge b,f;
2253 edge_iterator ei;
2255 /* We do care only about conditional jumps and simplejumps. */
2256 if (!any_condjump_p (insn)
2257 && !returnjump_p (insn)
2258 && !simplejump_p (insn))
2259 return purged;
2261 /* Branch probability/prediction notes are defined only for
2262 condjumps. We've possibly turned condjump into simplejump. */
2263 if (simplejump_p (insn))
2265 note = find_reg_note (insn, REG_BR_PROB, NULL);
2266 if (note)
2267 remove_note (insn, note);
2268 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2269 remove_note (insn, note);
2272 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2274 /* Avoid abnormal flags to leak from computed jumps turned
2275 into simplejumps. */
2277 e->flags &= ~EDGE_ABNORMAL;
2279 /* See if this edge is one we should keep. */
2280 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2281 /* A conditional jump can fall through into the next
2282 block, so we should keep the edge. */
2284 ei_next (&ei);
2285 continue;
2287 else if (e->dest != EXIT_BLOCK_PTR
2288 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
2289 /* If the destination block is the target of the jump,
2290 keep the edge. */
2292 ei_next (&ei);
2293 continue;
2295 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2296 /* If the destination block is the exit block, and this
2297 instruction is a return, then keep the edge. */
2299 ei_next (&ei);
2300 continue;
2302 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2303 /* Keep the edges that correspond to exceptions thrown by
2304 this instruction and rematerialize the EDGE_ABNORMAL
2305 flag we just cleared above. */
2307 e->flags |= EDGE_ABNORMAL;
2308 ei_next (&ei);
2309 continue;
2312 /* We do not need this edge. */
2313 df_set_bb_dirty (bb);
2314 purged = true;
2315 remove_edge (e);
2318 if (EDGE_COUNT (bb->succs) == 0 || !purged)
2319 return purged;
2321 if (dump_file)
2322 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
2324 if (!optimize)
2325 return purged;
2327 /* Redistribute probabilities. */
2328 if (single_succ_p (bb))
2330 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2331 single_succ_edge (bb)->count = bb->count;
2333 else
2335 note = find_reg_note (insn, REG_BR_PROB, NULL);
2336 if (!note)
2337 return purged;
2339 b = BRANCH_EDGE (bb);
2340 f = FALLTHRU_EDGE (bb);
2341 b->probability = INTVAL (XEXP (note, 0));
2342 f->probability = REG_BR_PROB_BASE - b->probability;
2343 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2344 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2347 return purged;
2349 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
2351 /* First, there should not be any EH or ABCALL edges resulting
2352 from non-local gotos and the like. If there were, we shouldn't
2353 have created the sibcall in the first place. Second, there
2354 should of course never have been a fallthru edge. */
2355 gcc_assert (single_succ_p (bb));
2356 gcc_assert (single_succ_edge (bb)->flags
2357 == (EDGE_SIBCALL | EDGE_ABNORMAL));
2359 return 0;
2362 /* If we don't see a jump insn, we don't know exactly why the block would
2363 have been broken at this point. Look for a simple, non-fallthru edge,
2364 as these are only created by conditional branches. If we find such an
2365 edge we know that there used to be a jump here and can then safely
2366 remove all non-fallthru edges. */
2367 found = false;
2368 FOR_EACH_EDGE (e, ei, bb->succs)
2369 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
2371 found = true;
2372 break;
2375 if (!found)
2376 return purged;
2378 /* Remove all but the fake and fallthru edges. The fake edge may be
2379 the only successor for this block in the case of noreturn
2380 calls. */
2381 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2383 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
2385 df_set_bb_dirty (bb);
2386 remove_edge (e);
2387 purged = true;
2389 else
2390 ei_next (&ei);
2393 gcc_assert (single_succ_p (bb));
2395 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2396 single_succ_edge (bb)->count = bb->count;
2398 if (dump_file)
2399 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
2400 bb->index);
2401 return purged;
2404 /* Search all basic blocks for potentially dead edges and purge them. Return
2405 true if some edge has been eliminated. */
2407 bool
2408 purge_all_dead_edges (void)
2410 int purged = false;
2411 basic_block bb;
2413 FOR_EACH_BB (bb)
2415 bool purged_here = purge_dead_edges (bb);
2417 purged |= purged_here;
2420 return purged;
2423 /* This is used by a few passes that emit some instructions after abnormal
2424 calls, moving the basic block's end, while they in fact do want to emit
2425 them on the fallthru edge. Look for abnormal call edges, find backward
2426 the call in the block and insert the instructions on the edge instead.
2428 Similarly, handle instructions throwing exceptions internally.
2430 Return true when instructions have been found and inserted on edges. */
2432 bool
2433 fixup_abnormal_edges (void)
2435 bool inserted = false;
2436 basic_block bb;
2438 FOR_EACH_BB (bb)
2440 edge e;
2441 edge_iterator ei;
2443 /* Look for cases we are interested in - calls or instructions causing
2444 exceptions. */
2445 FOR_EACH_EDGE (e, ei, bb->succs)
2446 if ((e->flags & EDGE_ABNORMAL_CALL)
2447 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
2448 == (EDGE_ABNORMAL | EDGE_EH)))
2449 break;
2451 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
2453 rtx insn;
2455 /* Get past the new insns generated. Allow notes, as the insns
2456 may be already deleted. */
2457 insn = BB_END (bb);
2458 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
2459 && !can_throw_internal (insn)
2460 && insn != BB_HEAD (bb))
2461 insn = PREV_INSN (insn);
2463 if (CALL_P (insn) || can_throw_internal (insn))
2465 rtx stop, next;
2467 e = find_fallthru_edge (bb->succs);
2469 stop = NEXT_INSN (BB_END (bb));
2470 BB_END (bb) = insn;
2472 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
2474 next = NEXT_INSN (insn);
2475 if (INSN_P (insn))
2477 delete_insn (insn);
2479 /* Sometimes there's still the return value USE.
2480 If it's placed after a trapping call (i.e. that
2481 call is the last insn anyway), we have no fallthru
2482 edge. Simply delete this use and don't try to insert
2483 on the non-existent edge. */
2484 if (GET_CODE (PATTERN (insn)) != USE)
2486 /* We're not deleting it, we're moving it. */
2487 INSN_DELETED_P (insn) = 0;
2488 PREV_INSN (insn) = NULL_RTX;
2489 NEXT_INSN (insn) = NULL_RTX;
2491 insert_insn_on_edge (insn, e);
2492 inserted = true;
2495 else if (!BARRIER_P (insn))
2496 set_block_for_insn (insn, NULL);
2500 /* It may be that we don't find any trapping insn. In this
2501 case we discovered quite late that the insn that had been
2502 marked as can_throw_internal in fact couldn't trap at all.
2503 So we should in fact delete the EH edges out of the block. */
2504 else
2505 purge_dead_edges (bb);
2509 return inserted;
2512 /* Same as split_block but update cfg_layout structures. */
2514 static basic_block
2515 cfg_layout_split_block (basic_block bb, void *insnp)
2517 rtx insn = (rtx) insnp;
2518 basic_block new_bb = rtl_split_block (bb, insn);
2520 new_bb->il.rtl->footer = bb->il.rtl->footer;
2521 bb->il.rtl->footer = NULL;
2523 return new_bb;
2526 /* Redirect Edge to DEST. */
2527 static edge
2528 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
2530 basic_block src = e->src;
2531 edge ret;
2533 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
2534 return NULL;
2536 if (e->dest == dest)
2537 return e;
2539 if (e->src != ENTRY_BLOCK_PTR
2540 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
2542 df_set_bb_dirty (src);
2543 return ret;
2546 if (e->src == ENTRY_BLOCK_PTR
2547 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
2549 if (dump_file)
2550 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
2551 e->src->index, dest->index);
2553 df_set_bb_dirty (e->src);
2554 redirect_edge_succ (e, dest);
2555 return e;
2558 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
2559 in the case the basic block appears to be in sequence. Avoid this
2560 transformation. */
2562 if (e->flags & EDGE_FALLTHRU)
2564 /* Redirect any branch edges unified with the fallthru one. */
2565 if (JUMP_P (BB_END (src))
2566 && label_is_jump_target_p (BB_HEAD (e->dest),
2567 BB_END (src)))
2569 edge redirected;
2571 if (dump_file)
2572 fprintf (dump_file, "Fallthru edge unified with branch "
2573 "%i->%i redirected to %i\n",
2574 e->src->index, e->dest->index, dest->index);
2575 e->flags &= ~EDGE_FALLTHRU;
2576 redirected = redirect_branch_edge (e, dest);
2577 gcc_assert (redirected);
2578 redirected->flags |= EDGE_FALLTHRU;
2579 df_set_bb_dirty (redirected->src);
2580 return redirected;
2582 /* In case we are redirecting fallthru edge to the branch edge
2583 of conditional jump, remove it. */
2584 if (EDGE_COUNT (src->succs) == 2)
2586 /* Find the edge that is different from E. */
2587 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
2589 if (s->dest == dest
2590 && any_condjump_p (BB_END (src))
2591 && onlyjump_p (BB_END (src)))
2592 delete_insn (BB_END (src));
2594 if (dump_file)
2595 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
2596 e->src->index, e->dest->index, dest->index);
2597 ret = redirect_edge_succ_nodup (e, dest);
2599 else
2600 ret = redirect_branch_edge (e, dest);
2602 /* We don't want simplejumps in the insn stream during cfglayout. */
2603 gcc_assert (!simplejump_p (BB_END (src)));
2605 df_set_bb_dirty (src);
2606 return ret;
2609 /* Simple wrapper as we always can redirect fallthru edges. */
2610 static basic_block
2611 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
2613 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
2615 gcc_assert (redirected);
2616 return NULL;
2619 /* Same as delete_basic_block but update cfg_layout structures. */
2621 static void
2622 cfg_layout_delete_block (basic_block bb)
2624 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
2626 if (bb->il.rtl->header)
2628 next = BB_HEAD (bb);
2629 if (prev)
2630 NEXT_INSN (prev) = bb->il.rtl->header;
2631 else
2632 set_first_insn (bb->il.rtl->header);
2633 PREV_INSN (bb->il.rtl->header) = prev;
2634 insn = bb->il.rtl->header;
2635 while (NEXT_INSN (insn))
2636 insn = NEXT_INSN (insn);
2637 NEXT_INSN (insn) = next;
2638 PREV_INSN (next) = insn;
2640 next = NEXT_INSN (BB_END (bb));
2641 if (bb->il.rtl->footer)
2643 insn = bb->il.rtl->footer;
2644 while (insn)
2646 if (BARRIER_P (insn))
2648 if (PREV_INSN (insn))
2649 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2650 else
2651 bb->il.rtl->footer = NEXT_INSN (insn);
2652 if (NEXT_INSN (insn))
2653 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2655 if (LABEL_P (insn))
2656 break;
2657 insn = NEXT_INSN (insn);
2659 if (bb->il.rtl->footer)
2661 insn = BB_END (bb);
2662 NEXT_INSN (insn) = bb->il.rtl->footer;
2663 PREV_INSN (bb->il.rtl->footer) = insn;
2664 while (NEXT_INSN (insn))
2665 insn = NEXT_INSN (insn);
2666 NEXT_INSN (insn) = next;
2667 if (next)
2668 PREV_INSN (next) = insn;
2669 else
2670 set_last_insn (insn);
2673 if (bb->next_bb != EXIT_BLOCK_PTR)
2674 to = &bb->next_bb->il.rtl->header;
2675 else
2676 to = &cfg_layout_function_footer;
2678 rtl_delete_block (bb);
2680 if (prev)
2681 prev = NEXT_INSN (prev);
2682 else
2683 prev = get_insns ();
2684 if (next)
2685 next = PREV_INSN (next);
2686 else
2687 next = get_last_insn ();
2689 if (next && NEXT_INSN (next) != prev)
2691 remaints = unlink_insn_chain (prev, next);
2692 insn = remaints;
2693 while (NEXT_INSN (insn))
2694 insn = NEXT_INSN (insn);
2695 NEXT_INSN (insn) = *to;
2696 if (*to)
2697 PREV_INSN (*to) = insn;
2698 *to = remaints;
2702 /* Return true when blocks A and B can be safely merged. */
2704 static bool
2705 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
2707 /* If we are partitioning hot/cold basic blocks, we don't want to
2708 mess up unconditional or indirect jumps that cross between hot
2709 and cold sections.
2711 Basic block partitioning may result in some jumps that appear to
2712 be optimizable (or blocks that appear to be mergeable), but which really
2713 must be left untouched (they are required to make it safely across
2714 partition boundaries). See the comments at the top of
2715 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
2717 if (BB_PARTITION (a) != BB_PARTITION (b))
2718 return false;
2720 /* There must be exactly one edge in between the blocks. */
2721 return (single_succ_p (a)
2722 && single_succ (a) == b
2723 && single_pred_p (b) == 1
2724 && a != b
2725 /* Must be simple edge. */
2726 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
2727 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
2728 /* If the jump insn has side effects, we can't kill the edge.
2729 When not optimizing, try_redirect_by_replacing_jump will
2730 not allow us to redirect an edge by replacing a table jump. */
2731 && (!JUMP_P (BB_END (a))
2732 || ((!optimize || reload_completed)
2733 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
2736 /* Merge block A and B. The blocks must be mergeable. */
2738 static void
2739 cfg_layout_merge_blocks (basic_block a, basic_block b)
2741 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
2743 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
2745 if (dump_file)
2746 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
2747 a->index);
2749 /* If there was a CODE_LABEL beginning B, delete it. */
2750 if (LABEL_P (BB_HEAD (b)))
2752 delete_insn (BB_HEAD (b));
2755 /* We should have fallthru edge in a, or we can do dummy redirection to get
2756 it cleaned up. */
2757 if (JUMP_P (BB_END (a)))
2758 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
2759 gcc_assert (!JUMP_P (BB_END (a)));
2761 /* When not optimizing and the edge is the only place in RTL which holds
2762 some unique locus, emit a nop with that locus in between. */
2763 if (!optimize && EDGE_SUCC (a, 0)->goto_locus)
2765 rtx insn = BB_END (a), end = PREV_INSN (BB_HEAD (a));
2766 int goto_locus = EDGE_SUCC (a, 0)->goto_locus;
2768 while (insn != end && (!INSN_P (insn) || INSN_LOCATOR (insn) == 0))
2769 insn = PREV_INSN (insn);
2770 if (insn != end && locator_eq (INSN_LOCATOR (insn), goto_locus))
2771 goto_locus = 0;
2772 else
2774 insn = BB_HEAD (b);
2775 end = NEXT_INSN (BB_END (b));
2776 while (insn != end && !INSN_P (insn))
2777 insn = NEXT_INSN (insn);
2778 if (insn != end && INSN_LOCATOR (insn) != 0
2779 && locator_eq (INSN_LOCATOR (insn), goto_locus))
2780 goto_locus = 0;
2782 if (goto_locus)
2784 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
2785 INSN_LOCATOR (BB_END (a)) = goto_locus;
2789 /* Possible line number notes should appear in between. */
2790 if (b->il.rtl->header)
2792 rtx first = BB_END (a), last;
2794 last = emit_insn_after_noloc (b->il.rtl->header, BB_END (a), a);
2795 delete_insn_chain (NEXT_INSN (first), last, false);
2796 b->il.rtl->header = NULL;
2799 /* In the case basic blocks are not adjacent, move them around. */
2800 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
2802 rtx first = unlink_insn_chain (BB_HEAD (b), BB_END (b));
2804 emit_insn_after_noloc (first, BB_END (a), a);
2805 /* Skip possible DELETED_LABEL insn. */
2806 if (!NOTE_INSN_BASIC_BLOCK_P (first))
2807 first = NEXT_INSN (first);
2808 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (first));
2809 BB_HEAD (b) = NULL;
2811 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
2812 We need to explicitly call. */
2813 update_bb_for_insn_chain (NEXT_INSN (first),
2814 BB_END (b),
2817 delete_insn (first);
2819 /* Otherwise just re-associate the instructions. */
2820 else
2822 rtx insn;
2824 update_bb_for_insn_chain (BB_HEAD (b), BB_END (b), a);
2826 insn = BB_HEAD (b);
2827 /* Skip possible DELETED_LABEL insn. */
2828 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
2829 insn = NEXT_INSN (insn);
2830 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
2831 BB_HEAD (b) = NULL;
2832 BB_END (a) = BB_END (b);
2833 delete_insn (insn);
2836 df_bb_delete (b->index);
2838 /* Possible tablejumps and barriers should appear after the block. */
2839 if (b->il.rtl->footer)
2841 if (!a->il.rtl->footer)
2842 a->il.rtl->footer = b->il.rtl->footer;
2843 else
2845 rtx last = a->il.rtl->footer;
2847 while (NEXT_INSN (last))
2848 last = NEXT_INSN (last);
2849 NEXT_INSN (last) = b->il.rtl->footer;
2850 PREV_INSN (b->il.rtl->footer) = last;
2852 b->il.rtl->footer = NULL;
2855 /* If B was a forwarder block, propagate the locus on the edge. */
2856 if (forwarder_p && !EDGE_SUCC (b, 0)->goto_locus)
2857 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
2859 if (dump_file)
2860 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
2863 /* Split edge E. */
2865 static basic_block
2866 cfg_layout_split_edge (edge e)
2868 basic_block new_bb =
2869 create_basic_block (e->src != ENTRY_BLOCK_PTR
2870 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
2871 NULL_RTX, e->src);
2873 if (e->dest == EXIT_BLOCK_PTR)
2874 BB_COPY_PARTITION (new_bb, e->src);
2875 else
2876 BB_COPY_PARTITION (new_bb, e->dest);
2877 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
2878 redirect_edge_and_branch_force (e, new_bb);
2880 return new_bb;
2883 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
2885 static void
2886 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
2890 /* Return 1 if BB ends with a call, possibly followed by some
2891 instructions that must stay with the call, 0 otherwise. */
2893 static bool
2894 rtl_block_ends_with_call_p (basic_block bb)
2896 rtx insn = BB_END (bb);
2898 while (!CALL_P (insn)
2899 && insn != BB_HEAD (bb)
2900 && (keep_with_call_p (insn)
2901 || NOTE_P (insn)
2902 || DEBUG_INSN_P (insn)))
2903 insn = PREV_INSN (insn);
2904 return (CALL_P (insn));
2907 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
2909 static bool
2910 rtl_block_ends_with_condjump_p (const_basic_block bb)
2912 return any_condjump_p (BB_END (bb));
2915 /* Return true if we need to add fake edge to exit.
2916 Helper function for rtl_flow_call_edges_add. */
2918 static bool
2919 need_fake_edge_p (const_rtx insn)
2921 if (!INSN_P (insn))
2922 return false;
2924 if ((CALL_P (insn)
2925 && !SIBLING_CALL_P (insn)
2926 && !find_reg_note (insn, REG_NORETURN, NULL)
2927 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
2928 return true;
2930 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2931 && MEM_VOLATILE_P (PATTERN (insn)))
2932 || (GET_CODE (PATTERN (insn)) == PARALLEL
2933 && asm_noperands (insn) != -1
2934 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
2935 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2938 /* Add fake edges to the function exit for any non constant and non noreturn
2939 calls, volatile inline assembly in the bitmap of blocks specified by
2940 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
2941 that were split.
2943 The goal is to expose cases in which entering a basic block does not imply
2944 that all subsequent instructions must be executed. */
2946 static int
2947 rtl_flow_call_edges_add (sbitmap blocks)
2949 int i;
2950 int blocks_split = 0;
2951 int last_bb = last_basic_block;
2952 bool check_last_block = false;
2954 if (n_basic_blocks == NUM_FIXED_BLOCKS)
2955 return 0;
2957 if (! blocks)
2958 check_last_block = true;
2959 else
2960 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
2962 /* In the last basic block, before epilogue generation, there will be
2963 a fallthru edge to EXIT. Special care is required if the last insn
2964 of the last basic block is a call because make_edge folds duplicate
2965 edges, which would result in the fallthru edge also being marked
2966 fake, which would result in the fallthru edge being removed by
2967 remove_fake_edges, which would result in an invalid CFG.
2969 Moreover, we can't elide the outgoing fake edge, since the block
2970 profiler needs to take this into account in order to solve the minimal
2971 spanning tree in the case that the call doesn't return.
2973 Handle this by adding a dummy instruction in a new last basic block. */
2974 if (check_last_block)
2976 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
2977 rtx insn = BB_END (bb);
2979 /* Back up past insns that must be kept in the same block as a call. */
2980 while (insn != BB_HEAD (bb)
2981 && keep_with_call_p (insn))
2982 insn = PREV_INSN (insn);
2984 if (need_fake_edge_p (insn))
2986 edge e;
2988 e = find_edge (bb, EXIT_BLOCK_PTR);
2989 if (e)
2991 insert_insn_on_edge (gen_use (const0_rtx), e);
2992 commit_edge_insertions ();
2997 /* Now add fake edges to the function exit for any non constant
2998 calls since there is no way that we can determine if they will
2999 return or not... */
3001 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
3003 basic_block bb = BASIC_BLOCK (i);
3004 rtx insn;
3005 rtx prev_insn;
3007 if (!bb)
3008 continue;
3010 if (blocks && !TEST_BIT (blocks, i))
3011 continue;
3013 for (insn = BB_END (bb); ; insn = prev_insn)
3015 prev_insn = PREV_INSN (insn);
3016 if (need_fake_edge_p (insn))
3018 edge e;
3019 rtx split_at_insn = insn;
3021 /* Don't split the block between a call and an insn that should
3022 remain in the same block as the call. */
3023 if (CALL_P (insn))
3024 while (split_at_insn != BB_END (bb)
3025 && keep_with_call_p (NEXT_INSN (split_at_insn)))
3026 split_at_insn = NEXT_INSN (split_at_insn);
3028 /* The handling above of the final block before the epilogue
3029 should be enough to verify that there is no edge to the exit
3030 block in CFG already. Calling make_edge in such case would
3031 cause us to mark that edge as fake and remove it later. */
3033 #ifdef ENABLE_CHECKING
3034 if (split_at_insn == BB_END (bb))
3036 e = find_edge (bb, EXIT_BLOCK_PTR);
3037 gcc_assert (e == NULL);
3039 #endif
3041 /* Note that the following may create a new basic block
3042 and renumber the existing basic blocks. */
3043 if (split_at_insn != BB_END (bb))
3045 e = split_block (bb, split_at_insn);
3046 if (e)
3047 blocks_split++;
3050 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
3053 if (insn == BB_HEAD (bb))
3054 break;
3058 if (blocks_split)
3059 verify_flow_info ();
3061 return blocks_split;
3064 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
3065 the conditional branch target, SECOND_HEAD should be the fall-thru
3066 there is no need to handle this here the loop versioning code handles
3067 this. the reason for SECON_HEAD is that it is needed for condition
3068 in trees, and this should be of the same type since it is a hook. */
3069 static void
3070 rtl_lv_add_condition_to_bb (basic_block first_head ,
3071 basic_block second_head ATTRIBUTE_UNUSED,
3072 basic_block cond_bb, void *comp_rtx)
3074 rtx label, seq, jump;
3075 rtx op0 = XEXP ((rtx)comp_rtx, 0);
3076 rtx op1 = XEXP ((rtx)comp_rtx, 1);
3077 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
3078 enum machine_mode mode;
3081 label = block_label (first_head);
3082 mode = GET_MODE (op0);
3083 if (mode == VOIDmode)
3084 mode = GET_MODE (op1);
3086 start_sequence ();
3087 op0 = force_operand (op0, NULL_RTX);
3088 op1 = force_operand (op1, NULL_RTX);
3089 do_compare_rtx_and_jump (op0, op1, comp, 0,
3090 mode, NULL_RTX, NULL_RTX, label, -1);
3091 jump = get_last_insn ();
3092 JUMP_LABEL (jump) = label;
3093 LABEL_NUSES (label)++;
3094 seq = get_insns ();
3095 end_sequence ();
3097 /* Add the new cond , in the new head. */
3098 emit_insn_after(seq, BB_END(cond_bb));
3102 /* Given a block B with unconditional branch at its end, get the
3103 store the return the branch edge and the fall-thru edge in
3104 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
3105 static void
3106 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
3107 edge *fallthru_edge)
3109 edge e = EDGE_SUCC (b, 0);
3111 if (e->flags & EDGE_FALLTHRU)
3113 *fallthru_edge = e;
3114 *branch_edge = EDGE_SUCC (b, 1);
3116 else
3118 *branch_edge = e;
3119 *fallthru_edge = EDGE_SUCC (b, 1);
3123 void
3124 init_rtl_bb_info (basic_block bb)
3126 gcc_assert (!bb->il.rtl);
3127 bb->il.rtl = ggc_alloc_cleared_rtl_bb_info ();
3130 /* Returns true if it is possible to remove edge E by redirecting
3131 it to the destination of the other edge from E->src. */
3133 static bool
3134 rtl_can_remove_branch_p (const_edge e)
3136 const_basic_block src = e->src;
3137 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
3138 const_rtx insn = BB_END (src), set;
3140 /* The conditions are taken from try_redirect_by_replacing_jump. */
3141 if (target == EXIT_BLOCK_PTR)
3142 return false;
3144 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
3145 return false;
3147 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
3148 || BB_PARTITION (src) != BB_PARTITION (target))
3149 return false;
3151 if (!onlyjump_p (insn)
3152 || tablejump_p (insn, NULL, NULL))
3153 return false;
3155 set = single_set (insn);
3156 if (!set || side_effects_p (set))
3157 return false;
3159 return true;
3162 /* Implementation of CFG manipulation for linearized RTL. */
3163 struct cfg_hooks rtl_cfg_hooks = {
3164 "rtl",
3165 rtl_verify_flow_info,
3166 rtl_dump_bb,
3167 rtl_create_basic_block,
3168 rtl_redirect_edge_and_branch,
3169 rtl_redirect_edge_and_branch_force,
3170 rtl_can_remove_branch_p,
3171 rtl_delete_block,
3172 rtl_split_block,
3173 rtl_move_block_after,
3174 rtl_can_merge_blocks, /* can_merge_blocks_p */
3175 rtl_merge_blocks,
3176 rtl_predict_edge,
3177 rtl_predicted_by_p,
3178 NULL, /* can_duplicate_block_p */
3179 NULL, /* duplicate_block */
3180 rtl_split_edge,
3181 rtl_make_forwarder_block,
3182 rtl_tidy_fallthru_edge,
3183 rtl_force_nonfallthru,
3184 rtl_block_ends_with_call_p,
3185 rtl_block_ends_with_condjump_p,
3186 rtl_flow_call_edges_add,
3187 NULL, /* execute_on_growing_pred */
3188 NULL, /* execute_on_shrinking_pred */
3189 NULL, /* duplicate loop for trees */
3190 NULL, /* lv_add_condition_to_bb */
3191 NULL, /* lv_adjust_loop_header_phi*/
3192 NULL, /* extract_cond_bb_edges */
3193 NULL /* flush_pending_stmts */
3196 /* Implementation of CFG manipulation for cfg layout RTL, where
3197 basic block connected via fallthru edges does not have to be adjacent.
3198 This representation will hopefully become the default one in future
3199 version of the compiler. */
3201 /* We do not want to declare these functions in a header file, since they
3202 should only be used through the cfghooks interface, and we do not want to
3203 move them here since it would require also moving quite a lot of related
3204 code. They are in cfglayout.c. */
3205 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block);
3206 extern basic_block cfg_layout_duplicate_bb (basic_block);
3208 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
3209 "cfglayout mode",
3210 rtl_verify_flow_info_1,
3211 rtl_dump_bb,
3212 cfg_layout_create_basic_block,
3213 cfg_layout_redirect_edge_and_branch,
3214 cfg_layout_redirect_edge_and_branch_force,
3215 rtl_can_remove_branch_p,
3216 cfg_layout_delete_block,
3217 cfg_layout_split_block,
3218 rtl_move_block_after,
3219 cfg_layout_can_merge_blocks_p,
3220 cfg_layout_merge_blocks,
3221 rtl_predict_edge,
3222 rtl_predicted_by_p,
3223 cfg_layout_can_duplicate_bb_p,
3224 cfg_layout_duplicate_bb,
3225 cfg_layout_split_edge,
3226 rtl_make_forwarder_block,
3227 NULL, /* tidy_fallthru_edge */
3228 rtl_force_nonfallthru,
3229 rtl_block_ends_with_call_p,
3230 rtl_block_ends_with_condjump_p,
3231 rtl_flow_call_edges_add,
3232 NULL, /* execute_on_growing_pred */
3233 NULL, /* execute_on_shrinking_pred */
3234 duplicate_loop_to_header_edge, /* duplicate loop for trees */
3235 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
3236 NULL, /* lv_adjust_loop_header_phi*/
3237 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
3238 NULL /* flush_pending_stmts */