make __stl_prime_list in comdat
[official-gcc.git] / gcc / cfgrtl.c
blobf06dbc83b1d952f14c7493b39e54e331d937c07f
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
25 Available functionality:
26 - Basic CFG/RTL manipulation API documented in cfghooks.h
27 - CFG-aware instruction chain manipulation
28 delete_insn, delete_insn_chain
29 - Edge splitting and committing to edges
30 insert_insn_on_edge, commit_edge_insertions
31 - CFG updating after insn simplification
32 purge_dead_edges, purge_all_dead_edges
33 - CFG fixing after coarse manipulation
34 fixup_abnormal_edges
36 Functions not supposed for generic use:
37 - Infrastructure to determine quickly basic block for insn
38 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
39 - Edge redirection with updating and optimizing of insn chain
40 block_label, tidy_fallthru_edge, force_nonfallthru */
42 #include "config.h"
43 #include "system.h"
44 #include "coretypes.h"
45 #include "tm.h"
46 #include "tree.h"
47 #include "hard-reg-set.h"
48 #include "basic-block.h"
49 #include "regs.h"
50 #include "flags.h"
51 #include "output.h"
52 #include "function.h"
53 #include "except.h"
54 #include "rtl-error.h"
55 #include "tm_p.h"
56 #include "obstack.h"
57 #include "insn-attr.h"
58 #include "insn-config.h"
59 #include "cfglayout.h"
60 #include "expr.h"
61 #include "target.h"
62 #include "common/common-target.h"
63 #include "cfgloop.h"
64 #include "ggc.h"
65 #include "tree-pass.h"
66 #include "df.h"
68 static int can_delete_note_p (const_rtx);
69 static int can_delete_label_p (const_rtx);
70 static basic_block rtl_split_edge (edge);
71 static bool rtl_move_block_after (basic_block, basic_block);
72 static int rtl_verify_flow_info (void);
73 static basic_block cfg_layout_split_block (basic_block, void *);
74 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
75 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
76 static void cfg_layout_delete_block (basic_block);
77 static void rtl_delete_block (basic_block);
78 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
79 static edge rtl_redirect_edge_and_branch (edge, basic_block);
80 static basic_block rtl_split_block (basic_block, void *);
81 static void rtl_dump_bb (basic_block, FILE *, int, int);
82 static int rtl_verify_flow_info_1 (void);
83 static void rtl_make_forwarder_block (edge);
85 /* Return true if NOTE is not one of the ones that must be kept paired,
86 so that we may simply delete it. */
88 static int
89 can_delete_note_p (const_rtx note)
91 switch (NOTE_KIND (note))
93 case NOTE_INSN_DELETED:
94 case NOTE_INSN_BASIC_BLOCK:
95 case NOTE_INSN_EPILOGUE_BEG:
96 return true;
98 default:
99 return false;
103 /* True if a given label can be deleted. */
105 static int
106 can_delete_label_p (const_rtx label)
108 return (!LABEL_PRESERVE_P (label)
109 /* User declared labels must be preserved. */
110 && LABEL_NAME (label) == 0
111 && !in_expr_list_p (forced_labels, label));
114 /* Delete INSN by patching it out. Return the next insn. */
117 delete_insn (rtx insn)
119 rtx next = NEXT_INSN (insn);
120 rtx note;
121 bool really_delete = true;
123 if (LABEL_P (insn))
125 /* Some labels can't be directly removed from the INSN chain, as they
126 might be references via variables, constant pool etc.
127 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
128 if (! can_delete_label_p (insn))
130 const char *name = LABEL_NAME (insn);
132 really_delete = false;
133 PUT_CODE (insn, NOTE);
134 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
135 NOTE_DELETED_LABEL_NAME (insn) = name;
138 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
141 if (really_delete)
143 /* If this insn has already been deleted, something is very wrong. */
144 gcc_assert (!INSN_DELETED_P (insn));
145 remove_insn (insn);
146 INSN_DELETED_P (insn) = 1;
149 /* If deleting a jump, decrement the use count of the label. Deleting
150 the label itself should happen in the normal course of block merging. */
151 if (JUMP_P (insn))
153 if (JUMP_LABEL (insn)
154 && LABEL_P (JUMP_LABEL (insn)))
155 LABEL_NUSES (JUMP_LABEL (insn))--;
157 /* If there are more targets, remove them too. */
158 while ((note
159 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
160 && LABEL_P (XEXP (note, 0)))
162 LABEL_NUSES (XEXP (note, 0))--;
163 remove_note (insn, note);
167 /* Also if deleting any insn that references a label as an operand. */
168 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
169 && LABEL_P (XEXP (note, 0)))
171 LABEL_NUSES (XEXP (note, 0))--;
172 remove_note (insn, note);
175 if (JUMP_TABLE_DATA_P (insn))
177 rtx pat = PATTERN (insn);
178 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
179 int len = XVECLEN (pat, diff_vec_p);
180 int i;
182 for (i = 0; i < len; i++)
184 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
186 /* When deleting code in bulk (e.g. removing many unreachable
187 blocks) we can delete a label that's a target of the vector
188 before deleting the vector itself. */
189 if (!NOTE_P (label))
190 LABEL_NUSES (label)--;
194 return next;
197 /* Like delete_insn but also purge dead edges from BB. */
200 delete_insn_and_edges (rtx insn)
202 rtx x;
203 bool purge = false;
205 if (INSN_P (insn)
206 && BLOCK_FOR_INSN (insn)
207 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
208 purge = true;
209 x = delete_insn (insn);
210 if (purge)
211 purge_dead_edges (BLOCK_FOR_INSN (insn));
212 return x;
215 /* Unlink a chain of insns between START and FINISH, leaving notes
216 that must be paired. If CLEAR_BB is true, we set bb field for
217 insns that cannot be removed to NULL. */
219 void
220 delete_insn_chain (rtx start, rtx finish, bool clear_bb)
222 rtx next;
224 /* Unchain the insns one by one. It would be quicker to delete all of these
225 with a single unchaining, rather than one at a time, but we need to keep
226 the NOTE's. */
227 while (1)
229 next = NEXT_INSN (start);
230 if (NOTE_P (start) && !can_delete_note_p (start))
232 else
233 next = delete_insn (start);
235 if (clear_bb && !INSN_DELETED_P (start))
236 set_block_for_insn (start, NULL);
238 if (start == finish)
239 break;
240 start = next;
244 /* Create a new basic block consisting of the instructions between HEAD and END
245 inclusive. This function is designed to allow fast BB construction - reuses
246 the note and basic block struct in BB_NOTE, if any and do not grow
247 BASIC_BLOCK chain and should be used directly only by CFG construction code.
248 END can be NULL in to create new empty basic block before HEAD. Both END
249 and HEAD can be NULL to create basic block at the end of INSN chain.
250 AFTER is the basic block we should be put after. */
252 basic_block
253 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
255 basic_block bb;
257 if (bb_note
258 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
259 && bb->aux == NULL)
261 /* If we found an existing note, thread it back onto the chain. */
263 rtx after;
265 if (LABEL_P (head))
266 after = head;
267 else
269 after = PREV_INSN (head);
270 head = bb_note;
273 if (after != bb_note && NEXT_INSN (after) != bb_note)
274 reorder_insns_nobb (bb_note, bb_note, after);
276 else
278 /* Otherwise we must create a note and a basic block structure. */
280 bb = alloc_block ();
282 init_rtl_bb_info (bb);
283 if (!head && !end)
284 head = end = bb_note
285 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
286 else if (LABEL_P (head) && end)
288 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
289 if (head == end)
290 end = bb_note;
292 else
294 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
295 head = bb_note;
296 if (!end)
297 end = head;
300 NOTE_BASIC_BLOCK (bb_note) = bb;
303 /* Always include the bb note in the block. */
304 if (NEXT_INSN (end) == bb_note)
305 end = bb_note;
307 BB_HEAD (bb) = head;
308 BB_END (bb) = end;
309 bb->index = last_basic_block++;
310 bb->flags = BB_NEW | BB_RTL;
311 link_block (bb, after);
312 SET_BASIC_BLOCK (bb->index, bb);
313 df_bb_refs_record (bb->index, false);
314 update_bb_for_insn (bb);
315 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
317 /* Tag the block so that we know it has been used when considering
318 other basic block notes. */
319 bb->aux = bb;
321 return bb;
324 /* Create new basic block consisting of instructions in between HEAD and END
325 and place it to the BB chain after block AFTER. END can be NULL in to
326 create new empty basic block before HEAD. Both END and HEAD can be NULL to
327 create basic block at the end of INSN chain. */
329 static basic_block
330 rtl_create_basic_block (void *headp, void *endp, basic_block after)
332 rtx head = (rtx) headp, end = (rtx) endp;
333 basic_block bb;
335 /* Grow the basic block array if needed. */
336 if ((size_t) last_basic_block >= VEC_length (basic_block, basic_block_info))
338 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
339 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
342 n_basic_blocks++;
344 bb = create_basic_block_structure (head, end, NULL, after);
345 bb->aux = NULL;
346 return bb;
349 static basic_block
350 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
352 basic_block newbb = rtl_create_basic_block (head, end, after);
354 return newbb;
357 /* Delete the insns in a (non-live) block. We physically delete every
358 non-deleted-note insn, and update the flow graph appropriately.
360 Return nonzero if we deleted an exception handler. */
362 /* ??? Preserving all such notes strikes me as wrong. It would be nice
363 to post-process the stream to remove empty blocks, loops, ranges, etc. */
365 static void
366 rtl_delete_block (basic_block b)
368 rtx insn, end;
370 /* If the head of this block is a CODE_LABEL, then it might be the
371 label for an exception handler which can't be reached. We need
372 to remove the label from the exception_handler_label list. */
373 insn = BB_HEAD (b);
375 end = get_last_bb_insn (b);
377 /* Selectively delete the entire chain. */
378 BB_HEAD (b) = NULL;
379 delete_insn_chain (insn, end, true);
382 if (dump_file)
383 fprintf (dump_file, "deleting block %d\n", b->index);
384 df_bb_delete (b->index);
387 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
389 void
390 compute_bb_for_insn (void)
392 basic_block bb;
394 FOR_EACH_BB (bb)
396 rtx end = BB_END (bb);
397 rtx insn;
399 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
401 BLOCK_FOR_INSN (insn) = bb;
402 if (insn == end)
403 break;
408 /* Release the basic_block_for_insn array. */
410 unsigned int
411 free_bb_for_insn (void)
413 rtx insn;
414 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
415 if (!BARRIER_P (insn))
416 BLOCK_FOR_INSN (insn) = NULL;
417 return 0;
420 static unsigned int
421 rest_of_pass_free_cfg (void)
423 #ifdef DELAY_SLOTS
424 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
425 valid at that point so it would be too late to call df_analyze. */
426 if (optimize > 0 && flag_delayed_branch)
428 df_note_add_problem ();
429 df_analyze ();
431 #endif
433 free_bb_for_insn ();
434 return 0;
437 struct rtl_opt_pass pass_free_cfg =
440 RTL_PASS,
441 "*free_cfg", /* name */
442 NULL, /* gate */
443 rest_of_pass_free_cfg, /* execute */
444 NULL, /* sub */
445 NULL, /* next */
446 0, /* static_pass_number */
447 TV_NONE, /* tv_id */
448 0, /* properties_required */
449 0, /* properties_provided */
450 PROP_cfg, /* properties_destroyed */
451 0, /* todo_flags_start */
452 0, /* todo_flags_finish */
456 /* Return RTX to emit after when we want to emit code on the entry of function. */
458 entry_of_function (void)
460 return (n_basic_blocks > NUM_FIXED_BLOCKS ?
461 BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
464 /* Emit INSN at the entry point of the function, ensuring that it is only
465 executed once per function. */
466 void
467 emit_insn_at_entry (rtx insn)
469 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
470 edge e = ei_safe_edge (ei);
471 gcc_assert (e->flags & EDGE_FALLTHRU);
473 insert_insn_on_edge (insn, e);
474 commit_edge_insertions ();
477 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
478 (or BARRIER if found) and notify df of the bb change.
479 The insn chain range is inclusive
480 (i.e. both BEGIN and END will be updated. */
482 static void
483 update_bb_for_insn_chain (rtx begin, rtx end, basic_block bb)
485 rtx insn;
487 end = NEXT_INSN (end);
488 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
489 if (!BARRIER_P (insn))
490 df_insn_change_bb (insn, bb);
493 /* Update BLOCK_FOR_INSN of insns in BB to BB,
494 and notify df of the change. */
496 void
497 update_bb_for_insn (basic_block bb)
499 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
503 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
504 note associated with the BLOCK. */
506 static rtx
507 first_insn_after_basic_block_note (basic_block block)
509 rtx insn;
511 /* Get the first instruction in the block. */
512 insn = BB_HEAD (block);
514 if (insn == NULL_RTX)
515 return NULL_RTX;
516 if (LABEL_P (insn))
517 insn = NEXT_INSN (insn);
518 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
520 return NEXT_INSN (insn);
523 /* Creates a new basic block just after basic block B by splitting
524 everything after specified instruction I. */
526 static basic_block
527 rtl_split_block (basic_block bb, void *insnp)
529 basic_block new_bb;
530 rtx insn = (rtx) insnp;
531 edge e;
532 edge_iterator ei;
534 if (!insn)
536 insn = first_insn_after_basic_block_note (bb);
538 if (insn)
540 rtx next = insn;
542 insn = PREV_INSN (insn);
544 /* If the block contains only debug insns, insn would have
545 been NULL in a non-debug compilation, and then we'd end
546 up emitting a DELETED note. For -fcompare-debug
547 stability, emit the note too. */
548 if (insn != BB_END (bb)
549 && DEBUG_INSN_P (next)
550 && DEBUG_INSN_P (BB_END (bb)))
552 while (next != BB_END (bb) && DEBUG_INSN_P (next))
553 next = NEXT_INSN (next);
555 if (next == BB_END (bb))
556 emit_note_after (NOTE_INSN_DELETED, next);
559 else
560 insn = get_last_insn ();
563 /* We probably should check type of the insn so that we do not create
564 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
565 bother. */
566 if (insn == BB_END (bb))
567 emit_note_after (NOTE_INSN_DELETED, insn);
569 /* Create the new basic block. */
570 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
571 BB_COPY_PARTITION (new_bb, bb);
572 BB_END (bb) = insn;
574 /* Redirect the outgoing edges. */
575 new_bb->succs = bb->succs;
576 bb->succs = NULL;
577 FOR_EACH_EDGE (e, ei, new_bb->succs)
578 e->src = new_bb;
580 /* The new block starts off being dirty. */
581 df_set_bb_dirty (bb);
582 return new_bb;
585 /* Blocks A and B are to be merged into a single block A. The insns
586 are already contiguous. */
588 static void
589 rtl_merge_blocks (basic_block a, basic_block b)
591 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
592 rtx del_first = NULL_RTX, del_last = NULL_RTX;
593 rtx b_debug_start = b_end, b_debug_end = b_end;
594 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
595 int b_empty = 0;
597 if (dump_file)
598 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
599 a->index);
601 while (DEBUG_INSN_P (b_end))
602 b_end = PREV_INSN (b_debug_start = b_end);
604 /* If there was a CODE_LABEL beginning B, delete it. */
605 if (LABEL_P (b_head))
607 /* Detect basic blocks with nothing but a label. This can happen
608 in particular at the end of a function. */
609 if (b_head == b_end)
610 b_empty = 1;
612 del_first = del_last = b_head;
613 b_head = NEXT_INSN (b_head);
616 /* Delete the basic block note and handle blocks containing just that
617 note. */
618 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
620 if (b_head == b_end)
621 b_empty = 1;
622 if (! del_last)
623 del_first = b_head;
625 del_last = b_head;
626 b_head = NEXT_INSN (b_head);
629 /* If there was a jump out of A, delete it. */
630 if (JUMP_P (a_end))
632 rtx prev;
634 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
635 if (!NOTE_P (prev)
636 || NOTE_INSN_BASIC_BLOCK_P (prev)
637 || prev == BB_HEAD (a))
638 break;
640 del_first = a_end;
642 #ifdef HAVE_cc0
643 /* If this was a conditional jump, we need to also delete
644 the insn that set cc0. */
645 if (only_sets_cc0_p (prev))
647 rtx tmp = prev;
649 prev = prev_nonnote_insn (prev);
650 if (!prev)
651 prev = BB_HEAD (a);
652 del_first = tmp;
654 #endif
656 a_end = PREV_INSN (del_first);
658 else if (BARRIER_P (NEXT_INSN (a_end)))
659 del_first = NEXT_INSN (a_end);
661 /* Delete everything marked above as well as crap that might be
662 hanging out between the two blocks. */
663 BB_HEAD (b) = NULL;
664 delete_insn_chain (del_first, del_last, true);
666 /* Reassociate the insns of B with A. */
667 if (!b_empty)
669 update_bb_for_insn_chain (a_end, b_debug_end, a);
671 a_end = b_debug_end;
673 else if (b_end != b_debug_end)
675 /* Move any deleted labels and other notes between the end of A
676 and the debug insns that make up B after the debug insns,
677 bringing the debug insns into A while keeping the notes after
678 the end of A. */
679 if (NEXT_INSN (a_end) != b_debug_start)
680 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
681 b_debug_end);
682 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
683 a_end = b_debug_end;
686 df_bb_delete (b->index);
687 BB_END (a) = a_end;
689 /* If B was a forwarder block, propagate the locus on the edge. */
690 if (forwarder_p && !EDGE_SUCC (b, 0)->goto_locus)
691 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
693 if (dump_file)
694 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
698 /* Return true when block A and B can be merged. */
700 static bool
701 rtl_can_merge_blocks (basic_block a, basic_block b)
703 /* If we are partitioning hot/cold basic blocks, we don't want to
704 mess up unconditional or indirect jumps that cross between hot
705 and cold sections.
707 Basic block partitioning may result in some jumps that appear to
708 be optimizable (or blocks that appear to be mergeable), but which really
709 must be left untouched (they are required to make it safely across
710 partition boundaries). See the comments at the top of
711 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
713 if (BB_PARTITION (a) != BB_PARTITION (b))
714 return false;
716 /* There must be exactly one edge in between the blocks. */
717 return (single_succ_p (a)
718 && single_succ (a) == b
719 && single_pred_p (b)
720 && a != b
721 /* Must be simple edge. */
722 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
723 && a->next_bb == b
724 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
725 /* If the jump insn has side effects,
726 we can't kill the edge. */
727 && (!JUMP_P (BB_END (a))
728 || (reload_completed
729 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
732 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
733 exist. */
736 block_label (basic_block block)
738 if (block == EXIT_BLOCK_PTR)
739 return NULL_RTX;
741 if (!LABEL_P (BB_HEAD (block)))
743 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
746 return BB_HEAD (block);
749 /* Attempt to perform edge redirection by replacing possibly complex jump
750 instruction by unconditional jump or removing jump completely. This can
751 apply only if all edges now point to the same block. The parameters and
752 return values are equivalent to redirect_edge_and_branch. */
754 edge
755 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
757 basic_block src = e->src;
758 rtx insn = BB_END (src), kill_from;
759 rtx set;
760 int fallthru = 0;
762 /* If we are partitioning hot/cold basic blocks, we don't want to
763 mess up unconditional or indirect jumps that cross between hot
764 and cold sections.
766 Basic block partitioning may result in some jumps that appear to
767 be optimizable (or blocks that appear to be mergeable), but which really
768 must be left untouched (they are required to make it safely across
769 partition boundaries). See the comments at the top of
770 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
772 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
773 || BB_PARTITION (src) != BB_PARTITION (target))
774 return NULL;
776 /* We can replace or remove a complex jump only when we have exactly
777 two edges. Also, if we have exactly one outgoing edge, we can
778 redirect that. */
779 if (EDGE_COUNT (src->succs) >= 3
780 /* Verify that all targets will be TARGET. Specifically, the
781 edge that is not E must also go to TARGET. */
782 || (EDGE_COUNT (src->succs) == 2
783 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
784 return NULL;
786 if (!onlyjump_p (insn))
787 return NULL;
788 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
789 return NULL;
791 /* Avoid removing branch with side effects. */
792 set = single_set (insn);
793 if (!set || side_effects_p (set))
794 return NULL;
796 /* In case we zap a conditional jump, we'll need to kill
797 the cc0 setter too. */
798 kill_from = insn;
799 #ifdef HAVE_cc0
800 if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
801 && only_sets_cc0_p (PREV_INSN (insn)))
802 kill_from = PREV_INSN (insn);
803 #endif
805 /* See if we can create the fallthru edge. */
806 if (in_cfglayout || can_fallthru (src, target))
808 if (dump_file)
809 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
810 fallthru = 1;
812 /* Selectively unlink whole insn chain. */
813 if (in_cfglayout)
815 rtx insn = src->il.rtl->footer;
817 delete_insn_chain (kill_from, BB_END (src), false);
819 /* Remove barriers but keep jumptables. */
820 while (insn)
822 if (BARRIER_P (insn))
824 if (PREV_INSN (insn))
825 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
826 else
827 src->il.rtl->footer = NEXT_INSN (insn);
828 if (NEXT_INSN (insn))
829 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
831 if (LABEL_P (insn))
832 break;
833 insn = NEXT_INSN (insn);
836 else
837 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
838 false);
841 /* If this already is simplejump, redirect it. */
842 else if (simplejump_p (insn))
844 if (e->dest == target)
845 return NULL;
846 if (dump_file)
847 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
848 INSN_UID (insn), e->dest->index, target->index);
849 if (!redirect_jump (insn, block_label (target), 0))
851 gcc_assert (target == EXIT_BLOCK_PTR);
852 return NULL;
856 /* Cannot do anything for target exit block. */
857 else if (target == EXIT_BLOCK_PTR)
858 return NULL;
860 /* Or replace possibly complicated jump insn by simple jump insn. */
861 else
863 rtx target_label = block_label (target);
864 rtx barrier, label, table;
866 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
867 JUMP_LABEL (BB_END (src)) = target_label;
868 LABEL_NUSES (target_label)++;
869 if (dump_file)
870 fprintf (dump_file, "Replacing insn %i by jump %i\n",
871 INSN_UID (insn), INSN_UID (BB_END (src)));
874 delete_insn_chain (kill_from, insn, false);
876 /* Recognize a tablejump that we are converting to a
877 simple jump and remove its associated CODE_LABEL
878 and ADDR_VEC or ADDR_DIFF_VEC. */
879 if (tablejump_p (insn, &label, &table))
880 delete_insn_chain (label, table, false);
882 barrier = next_nonnote_insn (BB_END (src));
883 if (!barrier || !BARRIER_P (barrier))
884 emit_barrier_after (BB_END (src));
885 else
887 if (barrier != NEXT_INSN (BB_END (src)))
889 /* Move the jump before barrier so that the notes
890 which originally were or were created before jump table are
891 inside the basic block. */
892 rtx new_insn = BB_END (src);
894 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
895 PREV_INSN (barrier), src);
897 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
898 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
900 NEXT_INSN (new_insn) = barrier;
901 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
903 PREV_INSN (new_insn) = PREV_INSN (barrier);
904 PREV_INSN (barrier) = new_insn;
909 /* Keep only one edge out and set proper flags. */
910 if (!single_succ_p (src))
911 remove_edge (e);
912 gcc_assert (single_succ_p (src));
914 e = single_succ_edge (src);
915 if (fallthru)
916 e->flags = EDGE_FALLTHRU;
917 else
918 e->flags = 0;
920 e->probability = REG_BR_PROB_BASE;
921 e->count = src->count;
923 if (e->dest != target)
924 redirect_edge_succ (e, target);
925 return e;
928 /* Subroutine of redirect_branch_edge that tries to patch the jump
929 instruction INSN so that it reaches block NEW. Do this
930 only when it originally reached block OLD. Return true if this
931 worked or the original target wasn't OLD, return false if redirection
932 doesn't work. */
934 static bool
935 patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
937 rtx tmp;
938 /* Recognize a tablejump and adjust all matching cases. */
939 if (tablejump_p (insn, NULL, &tmp))
941 rtvec vec;
942 int j;
943 rtx new_label = block_label (new_bb);
945 if (new_bb == EXIT_BLOCK_PTR)
946 return false;
947 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
948 vec = XVEC (PATTERN (tmp), 0);
949 else
950 vec = XVEC (PATTERN (tmp), 1);
952 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
953 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
955 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
956 --LABEL_NUSES (old_label);
957 ++LABEL_NUSES (new_label);
960 /* Handle casesi dispatch insns. */
961 if ((tmp = single_set (insn)) != NULL
962 && SET_DEST (tmp) == pc_rtx
963 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
964 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
965 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
967 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
968 new_label);
969 --LABEL_NUSES (old_label);
970 ++LABEL_NUSES (new_label);
973 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
975 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
976 rtx new_label, note;
978 if (new_bb == EXIT_BLOCK_PTR)
979 return false;
980 new_label = block_label (new_bb);
982 for (i = 0; i < n; ++i)
984 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
985 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
986 if (XEXP (old_ref, 0) == old_label)
988 ASM_OPERANDS_LABEL (tmp, i)
989 = gen_rtx_LABEL_REF (Pmode, new_label);
990 --LABEL_NUSES (old_label);
991 ++LABEL_NUSES (new_label);
995 if (JUMP_LABEL (insn) == old_label)
997 JUMP_LABEL (insn) = new_label;
998 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
999 if (note)
1000 remove_note (insn, note);
1002 else
1004 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1005 if (note)
1006 remove_note (insn, note);
1007 if (JUMP_LABEL (insn) != new_label
1008 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1009 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1011 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1012 != NULL_RTX)
1013 XEXP (note, 0) = new_label;
1015 else
1017 /* ?? We may play the games with moving the named labels from
1018 one basic block to the other in case only one computed_jump is
1019 available. */
1020 if (computed_jump_p (insn)
1021 /* A return instruction can't be redirected. */
1022 || returnjump_p (insn))
1023 return false;
1025 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1027 /* If the insn doesn't go where we think, we're confused. */
1028 gcc_assert (JUMP_LABEL (insn) == old_label);
1030 /* If the substitution doesn't succeed, die. This can happen
1031 if the back end emitted unrecognizable instructions or if
1032 target is exit block on some arches. */
1033 if (!redirect_jump (insn, block_label (new_bb), 0))
1035 gcc_assert (new_bb == EXIT_BLOCK_PTR);
1036 return false;
1040 return true;
1044 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1045 NULL on failure */
1046 static edge
1047 redirect_branch_edge (edge e, basic_block target)
1049 rtx old_label = BB_HEAD (e->dest);
1050 basic_block src = e->src;
1051 rtx insn = BB_END (src);
1053 /* We can only redirect non-fallthru edges of jump insn. */
1054 if (e->flags & EDGE_FALLTHRU)
1055 return NULL;
1056 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1057 return NULL;
1059 if (!currently_expanding_to_rtl)
1061 if (!patch_jump_insn (insn, old_label, target))
1062 return NULL;
1064 else
1065 /* When expanding this BB might actually contain multiple
1066 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1067 Redirect all of those that match our label. */
1068 FOR_BB_INSNS (src, insn)
1069 if (JUMP_P (insn) && !patch_jump_insn (insn, old_label, target))
1070 return NULL;
1072 if (dump_file)
1073 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1074 e->src->index, e->dest->index, target->index);
1076 if (e->dest != target)
1077 e = redirect_edge_succ_nodup (e, target);
1079 return e;
1082 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
1083 expense of adding new instructions or reordering basic blocks.
1085 Function can be also called with edge destination equivalent to the TARGET.
1086 Then it should try the simplifications and do nothing if none is possible.
1088 Return edge representing the branch if transformation succeeded. Return NULL
1089 on failure.
1090 We still return NULL in case E already destinated TARGET and we didn't
1091 managed to simplify instruction stream. */
1093 static edge
1094 rtl_redirect_edge_and_branch (edge e, basic_block target)
1096 edge ret;
1097 basic_block src = e->src;
1099 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1100 return NULL;
1102 if (e->dest == target)
1103 return e;
1105 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1107 df_set_bb_dirty (src);
1108 return ret;
1111 ret = redirect_branch_edge (e, target);
1112 if (!ret)
1113 return NULL;
1115 df_set_bb_dirty (src);
1116 return ret;
1119 /* Like force_nonfallthru below, but additionally performs redirection
1120 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1121 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1122 simple_return_rtx, indicating which kind of returnjump to create.
1123 It should be NULL otherwise. */
1125 basic_block
1126 force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1128 basic_block jump_block, new_bb = NULL, src = e->src;
1129 rtx note;
1130 edge new_edge;
1131 int abnormal_edge_flags = 0;
1132 int loc;
1134 /* In the case the last instruction is conditional jump to the next
1135 instruction, first redirect the jump itself and then continue
1136 by creating a basic block afterwards to redirect fallthru edge. */
1137 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
1138 && any_condjump_p (BB_END (e->src))
1139 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1141 rtx note;
1142 edge b = unchecked_make_edge (e->src, target, 0);
1143 bool redirected;
1145 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1146 gcc_assert (redirected);
1148 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1149 if (note)
1151 int prob = INTVAL (XEXP (note, 0));
1153 b->probability = prob;
1154 b->count = e->count * prob / REG_BR_PROB_BASE;
1155 e->probability -= e->probability;
1156 e->count -= b->count;
1157 if (e->probability < 0)
1158 e->probability = 0;
1159 if (e->count < 0)
1160 e->count = 0;
1164 if (e->flags & EDGE_ABNORMAL)
1166 /* Irritating special case - fallthru edge to the same block as abnormal
1167 edge.
1168 We can't redirect abnormal edge, but we still can split the fallthru
1169 one and create separate abnormal edge to original destination.
1170 This allows bb-reorder to make such edge non-fallthru. */
1171 gcc_assert (e->dest == target);
1172 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
1173 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
1175 else
1177 gcc_assert (e->flags & EDGE_FALLTHRU);
1178 if (e->src == ENTRY_BLOCK_PTR)
1180 /* We can't redirect the entry block. Create an empty block
1181 at the start of the function which we use to add the new
1182 jump. */
1183 edge tmp;
1184 edge_iterator ei;
1185 bool found = false;
1187 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
1189 /* Change the existing edge's source to be the new block, and add
1190 a new edge from the entry block to the new block. */
1191 e->src = bb;
1192 for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1194 if (tmp == e)
1196 VEC_unordered_remove (edge, ENTRY_BLOCK_PTR->succs, ei.index);
1197 found = true;
1198 break;
1200 else
1201 ei_next (&ei);
1204 gcc_assert (found);
1206 VEC_safe_push (edge, gc, bb->succs, e);
1207 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1211 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags)
1213 /* Create the new structures. */
1215 /* If the old block ended with a tablejump, skip its table
1216 by searching forward from there. Otherwise start searching
1217 forward from the last instruction of the old block. */
1218 if (!tablejump_p (BB_END (e->src), NULL, &note))
1219 note = BB_END (e->src);
1220 note = NEXT_INSN (note);
1222 jump_block = create_basic_block (note, NULL, e->src);
1223 jump_block->count = e->count;
1224 jump_block->frequency = EDGE_FREQUENCY (e);
1225 jump_block->loop_depth = target->loop_depth;
1227 /* Make sure new block ends up in correct hot/cold section. */
1229 BB_COPY_PARTITION (jump_block, e->src);
1230 if (flag_reorder_blocks_and_partition
1231 && targetm_common.have_named_sections
1232 && JUMP_P (BB_END (jump_block))
1233 && !any_condjump_p (BB_END (jump_block))
1234 && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
1235 add_reg_note (BB_END (jump_block), REG_CROSSING_JUMP, NULL_RTX);
1237 /* Wire edge in. */
1238 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1239 new_edge->probability = e->probability;
1240 new_edge->count = e->count;
1242 /* Redirect old edge. */
1243 redirect_edge_pred (e, jump_block);
1244 e->probability = REG_BR_PROB_BASE;
1246 new_bb = jump_block;
1248 else
1249 jump_block = e->src;
1251 if (e->goto_locus && e->goto_block == NULL)
1252 loc = e->goto_locus;
1253 else
1254 loc = 0;
1255 e->flags &= ~EDGE_FALLTHRU;
1256 if (target == EXIT_BLOCK_PTR)
1258 if (jump_label == ret_rtx)
1260 #ifdef HAVE_return
1261 emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
1262 #else
1263 gcc_unreachable ();
1264 #endif
1266 else
1268 gcc_assert (jump_label == simple_return_rtx);
1269 #ifdef HAVE_simple_return
1270 emit_jump_insn_after_setloc (gen_simple_return (),
1271 BB_END (jump_block), loc);
1272 #else
1273 gcc_unreachable ();
1274 #endif
1276 set_return_jump_label (BB_END (jump_block));
1278 else
1280 rtx label = block_label (target);
1281 emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
1282 JUMP_LABEL (BB_END (jump_block)) = label;
1283 LABEL_NUSES (label)++;
1286 emit_barrier_after (BB_END (jump_block));
1287 redirect_edge_succ_nodup (e, target);
1289 if (abnormal_edge_flags)
1290 make_edge (src, target, abnormal_edge_flags);
1292 df_mark_solutions_dirty ();
1293 return new_bb;
1296 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1297 (and possibly create new basic block) to make edge non-fallthru.
1298 Return newly created BB or NULL if none. */
1300 static basic_block
1301 rtl_force_nonfallthru (edge e)
1303 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1306 /* Redirect edge even at the expense of creating new jump insn or
1307 basic block. Return new basic block if created, NULL otherwise.
1308 Conversion must be possible. */
1310 static basic_block
1311 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1313 if (redirect_edge_and_branch (e, target)
1314 || e->dest == target)
1315 return NULL;
1317 /* In case the edge redirection failed, try to force it to be non-fallthru
1318 and redirect newly created simplejump. */
1319 df_set_bb_dirty (e->src);
1320 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1323 /* The given edge should potentially be a fallthru edge. If that is in
1324 fact true, delete the jump and barriers that are in the way. */
1326 static void
1327 rtl_tidy_fallthru_edge (edge e)
1329 rtx q;
1330 basic_block b = e->src, c = b->next_bb;
1332 /* ??? In a late-running flow pass, other folks may have deleted basic
1333 blocks by nopping out blocks, leaving multiple BARRIERs between here
1334 and the target label. They ought to be chastised and fixed.
1336 We can also wind up with a sequence of undeletable labels between
1337 one block and the next.
1339 So search through a sequence of barriers, labels, and notes for
1340 the head of block C and assert that we really do fall through. */
1342 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1343 if (INSN_P (q))
1344 return;
1346 /* Remove what will soon cease being the jump insn from the source block.
1347 If block B consisted only of this single jump, turn it into a deleted
1348 note. */
1349 q = BB_END (b);
1350 if (JUMP_P (q)
1351 && onlyjump_p (q)
1352 && (any_uncondjump_p (q)
1353 || single_succ_p (b)))
1355 #ifdef HAVE_cc0
1356 /* If this was a conditional jump, we need to also delete
1357 the insn that set cc0. */
1358 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1359 q = PREV_INSN (q);
1360 #endif
1362 q = PREV_INSN (q);
1365 /* Selectively unlink the sequence. */
1366 if (q != PREV_INSN (BB_HEAD (c)))
1367 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1369 e->flags |= EDGE_FALLTHRU;
1372 /* Should move basic block BB after basic block AFTER. NIY. */
1374 static bool
1375 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1376 basic_block after ATTRIBUTE_UNUSED)
1378 return false;
1381 /* Split a (typically critical) edge. Return the new block.
1382 The edge must not be abnormal.
1384 ??? The code generally expects to be called on critical edges.
1385 The case of a block ending in an unconditional jump to a
1386 block with multiple predecessors is not handled optimally. */
1388 static basic_block
1389 rtl_split_edge (edge edge_in)
1391 basic_block bb;
1392 rtx before;
1394 /* Abnormal edges cannot be split. */
1395 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1397 /* We are going to place the new block in front of edge destination.
1398 Avoid existence of fallthru predecessors. */
1399 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1401 edge e = find_fallthru_edge (edge_in->dest->preds);
1403 if (e)
1404 force_nonfallthru (e);
1407 /* Create the basic block note. */
1408 if (edge_in->dest != EXIT_BLOCK_PTR)
1409 before = BB_HEAD (edge_in->dest);
1410 else
1411 before = NULL_RTX;
1413 /* If this is a fall through edge to the exit block, the blocks might be
1414 not adjacent, and the right place is the after the source. */
1415 if (edge_in->flags & EDGE_FALLTHRU && edge_in->dest == EXIT_BLOCK_PTR)
1417 before = NEXT_INSN (BB_END (edge_in->src));
1418 bb = create_basic_block (before, NULL, edge_in->src);
1419 BB_COPY_PARTITION (bb, edge_in->src);
1421 else
1423 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1424 /* ??? Why not edge_in->dest->prev_bb here? */
1425 BB_COPY_PARTITION (bb, edge_in->dest);
1428 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1430 /* For non-fallthru edges, we must adjust the predecessor's
1431 jump instruction to target our new block. */
1432 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1434 edge redirected = redirect_edge_and_branch (edge_in, bb);
1435 gcc_assert (redirected);
1437 else
1439 if (edge_in->src != ENTRY_BLOCK_PTR)
1441 /* For asm goto even splitting of fallthru edge might
1442 need insn patching, as other labels might point to the
1443 old label. */
1444 rtx last = BB_END (edge_in->src);
1445 if (last
1446 && JUMP_P (last)
1447 && edge_in->dest != EXIT_BLOCK_PTR
1448 && extract_asm_operands (PATTERN (last)) != NULL_RTX
1449 && patch_jump_insn (last, before, bb))
1450 df_set_bb_dirty (edge_in->src);
1452 redirect_edge_succ (edge_in, bb);
1455 return bb;
1458 /* Queue instructions for insertion on an edge between two basic blocks.
1459 The new instructions and basic blocks (if any) will not appear in the
1460 CFG until commit_edge_insertions is called. */
1462 void
1463 insert_insn_on_edge (rtx pattern, edge e)
1465 /* We cannot insert instructions on an abnormal critical edge.
1466 It will be easier to find the culprit if we die now. */
1467 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1469 if (e->insns.r == NULL_RTX)
1470 start_sequence ();
1471 else
1472 push_to_sequence (e->insns.r);
1474 emit_insn (pattern);
1476 e->insns.r = get_insns ();
1477 end_sequence ();
1480 /* Update the CFG for the instructions queued on edge E. */
1482 void
1483 commit_one_edge_insertion (edge e)
1485 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1486 basic_block bb;
1488 /* Pull the insns off the edge now since the edge might go away. */
1489 insns = e->insns.r;
1490 e->insns.r = NULL_RTX;
1492 /* Figure out where to put these insns. If the destination has
1493 one predecessor, insert there. Except for the exit block. */
1494 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
1496 bb = e->dest;
1498 /* Get the location correct wrt a code label, and "nice" wrt
1499 a basic block note, and before everything else. */
1500 tmp = BB_HEAD (bb);
1501 if (LABEL_P (tmp))
1502 tmp = NEXT_INSN (tmp);
1503 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1504 tmp = NEXT_INSN (tmp);
1505 if (tmp == BB_HEAD (bb))
1506 before = tmp;
1507 else if (tmp)
1508 after = PREV_INSN (tmp);
1509 else
1510 after = get_last_insn ();
1513 /* If the source has one successor and the edge is not abnormal,
1514 insert there. Except for the entry block. */
1515 else if ((e->flags & EDGE_ABNORMAL) == 0
1516 && single_succ_p (e->src)
1517 && e->src != ENTRY_BLOCK_PTR)
1519 bb = e->src;
1521 /* It is possible to have a non-simple jump here. Consider a target
1522 where some forms of unconditional jumps clobber a register. This
1523 happens on the fr30 for example.
1525 We know this block has a single successor, so we can just emit
1526 the queued insns before the jump. */
1527 if (JUMP_P (BB_END (bb)))
1528 before = BB_END (bb);
1529 else
1531 /* We'd better be fallthru, or we've lost track of what's what. */
1532 gcc_assert (e->flags & EDGE_FALLTHRU);
1534 after = BB_END (bb);
1538 /* Otherwise we must split the edge. */
1539 else
1541 bb = split_edge (e);
1542 after = BB_END (bb);
1544 if (flag_reorder_blocks_and_partition
1545 && targetm_common.have_named_sections
1546 && e->src != ENTRY_BLOCK_PTR
1547 && BB_PARTITION (e->src) == BB_COLD_PARTITION
1548 && !(e->flags & EDGE_CROSSING)
1549 && JUMP_P (after)
1550 && !any_condjump_p (after)
1551 && (single_succ_edge (bb)->flags & EDGE_CROSSING))
1552 add_reg_note (after, REG_CROSSING_JUMP, NULL_RTX);
1555 /* Now that we've found the spot, do the insertion. */
1556 if (before)
1558 emit_insn_before_noloc (insns, before, bb);
1559 last = prev_nonnote_insn (before);
1561 else
1562 last = emit_insn_after_noloc (insns, after, bb);
1564 if (returnjump_p (last))
1566 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1567 This is not currently a problem because this only happens
1568 for the (single) epilogue, which already has a fallthru edge
1569 to EXIT. */
1571 e = single_succ_edge (bb);
1572 gcc_assert (e->dest == EXIT_BLOCK_PTR
1573 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
1575 e->flags &= ~EDGE_FALLTHRU;
1576 emit_barrier_after (last);
1578 if (before)
1579 delete_insn (before);
1581 else
1582 gcc_assert (!JUMP_P (last));
1585 /* Update the CFG for all queued instructions. */
1587 void
1588 commit_edge_insertions (void)
1590 basic_block bb;
1592 #ifdef ENABLE_CHECKING
1593 verify_flow_info ();
1594 #endif
1596 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1598 edge e;
1599 edge_iterator ei;
1601 FOR_EACH_EDGE (e, ei, bb->succs)
1602 if (e->insns.r)
1603 commit_one_edge_insertion (e);
1608 /* Print out RTL-specific basic block information (live information
1609 at start and end). */
1611 static void
1612 rtl_dump_bb (basic_block bb, FILE *outf, int indent, int flags ATTRIBUTE_UNUSED)
1614 rtx insn;
1615 rtx last;
1616 char *s_indent;
1618 s_indent = (char *) alloca ((size_t) indent + 1);
1619 memset (s_indent, ' ', (size_t) indent);
1620 s_indent[indent] = '\0';
1622 if (df)
1624 df_dump_top (bb, outf);
1625 putc ('\n', outf);
1628 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1629 insn = NEXT_INSN (insn))
1630 print_rtl_single (outf, insn);
1632 if (df)
1634 df_dump_bottom (bb, outf);
1635 putc ('\n', outf);
1640 /* Like print_rtl, but also print out live information for the start of each
1641 basic block. */
1643 void
1644 print_rtl_with_bb (FILE *outf, const_rtx rtx_first)
1646 const_rtx tmp_rtx;
1647 if (rtx_first == 0)
1648 fprintf (outf, "(nil)\n");
1649 else
1651 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1652 int max_uid = get_max_uid ();
1653 basic_block *start = XCNEWVEC (basic_block, max_uid);
1654 basic_block *end = XCNEWVEC (basic_block, max_uid);
1655 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
1657 basic_block bb;
1659 if (df)
1660 df_dump_start (outf);
1662 FOR_EACH_BB_REVERSE (bb)
1664 rtx x;
1666 start[INSN_UID (BB_HEAD (bb))] = bb;
1667 end[INSN_UID (BB_END (bb))] = bb;
1668 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1670 enum bb_state state = IN_MULTIPLE_BB;
1672 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1673 state = IN_ONE_BB;
1674 in_bb_p[INSN_UID (x)] = state;
1676 if (x == BB_END (bb))
1677 break;
1681 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1683 int did_output;
1685 bb = start[INSN_UID (tmp_rtx)];
1686 if (bb != NULL)
1687 dump_bb_info (bb, true, false, dump_flags, ";; ", outf);
1689 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1690 && !NOTE_P (tmp_rtx)
1691 && !BARRIER_P (tmp_rtx))
1692 fprintf (outf, ";; Insn is not within a basic block\n");
1693 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1694 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1696 did_output = print_rtl_single (outf, tmp_rtx);
1698 bb = end[INSN_UID (tmp_rtx)];
1699 if (bb != NULL)
1700 dump_bb_info (bb, false, true, dump_flags, ";; ", outf);
1701 if (did_output)
1702 putc ('\n', outf);
1705 free (start);
1706 free (end);
1707 free (in_bb_p);
1710 if (crtl->epilogue_delay_list != 0)
1712 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1713 for (tmp_rtx = crtl->epilogue_delay_list; tmp_rtx != 0;
1714 tmp_rtx = XEXP (tmp_rtx, 1))
1715 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1719 void
1720 update_br_prob_note (basic_block bb)
1722 rtx note;
1723 if (!JUMP_P (BB_END (bb)))
1724 return;
1725 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
1726 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1727 return;
1728 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1731 /* Get the last insn associated with block BB (that includes barriers and
1732 tablejumps after BB). */
1734 get_last_bb_insn (basic_block bb)
1736 rtx tmp;
1737 rtx end = BB_END (bb);
1739 /* Include any jump table following the basic block. */
1740 if (tablejump_p (end, NULL, &tmp))
1741 end = tmp;
1743 /* Include any barriers that may follow the basic block. */
1744 tmp = next_nonnote_insn_bb (end);
1745 while (tmp && BARRIER_P (tmp))
1747 end = tmp;
1748 tmp = next_nonnote_insn_bb (end);
1751 return end;
1754 /* Verify the CFG and RTL consistency common for both underlying RTL and
1755 cfglayout RTL.
1757 Currently it does following checks:
1759 - overlapping of basic blocks
1760 - insns with wrong BLOCK_FOR_INSN pointers
1761 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1762 - tails of basic blocks (ensure that boundary is necessary)
1763 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1764 and NOTE_INSN_BASIC_BLOCK
1765 - verify that no fall_thru edge crosses hot/cold partition boundaries
1766 - verify that there are no pending RTL branch predictions
1768 In future it can be extended check a lot of other stuff as well
1769 (reachability of basic blocks, life information, etc. etc.). */
1771 static int
1772 rtl_verify_flow_info_1 (void)
1774 rtx x;
1775 int err = 0;
1776 basic_block bb;
1778 /* Check the general integrity of the basic blocks. */
1779 FOR_EACH_BB_REVERSE (bb)
1781 rtx insn;
1783 if (!(bb->flags & BB_RTL))
1785 error ("BB_RTL flag not set for block %d", bb->index);
1786 err = 1;
1789 FOR_BB_INSNS (bb, insn)
1790 if (BLOCK_FOR_INSN (insn) != bb)
1792 error ("insn %d basic block pointer is %d, should be %d",
1793 INSN_UID (insn),
1794 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
1795 bb->index);
1796 err = 1;
1799 for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn))
1800 if (!BARRIER_P (insn)
1801 && BLOCK_FOR_INSN (insn) != NULL)
1803 error ("insn %d in header of bb %d has non-NULL basic block",
1804 INSN_UID (insn), bb->index);
1805 err = 1;
1807 for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
1808 if (!BARRIER_P (insn)
1809 && BLOCK_FOR_INSN (insn) != NULL)
1811 error ("insn %d in footer of bb %d has non-NULL basic block",
1812 INSN_UID (insn), bb->index);
1813 err = 1;
1817 /* Now check the basic blocks (boundaries etc.) */
1818 FOR_EACH_BB_REVERSE (bb)
1820 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1821 edge e, fallthru = NULL;
1822 rtx note;
1823 edge_iterator ei;
1825 if (JUMP_P (BB_END (bb))
1826 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
1827 && EDGE_COUNT (bb->succs) >= 2
1828 && any_condjump_p (BB_END (bb)))
1830 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
1831 && profile_status != PROFILE_ABSENT)
1833 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
1834 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1835 err = 1;
1838 FOR_EACH_EDGE (e, ei, bb->succs)
1840 bool is_crossing;
1842 if (e->flags & EDGE_FALLTHRU)
1843 n_fallthru++, fallthru = e;
1845 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
1846 && e->src != ENTRY_BLOCK_PTR
1847 && e->dest != EXIT_BLOCK_PTR);
1848 if (e->flags & EDGE_CROSSING)
1850 if (!is_crossing)
1852 error ("EDGE_CROSSING incorrectly set across same section");
1853 err = 1;
1855 if (e->flags & EDGE_FALLTHRU)
1857 error ("fallthru edge crosses section boundary (bb %i)",
1858 e->src->index);
1859 err = 1;
1861 if (e->flags & EDGE_EH)
1863 error ("EH edge crosses section boundary (bb %i)",
1864 e->src->index);
1865 err = 1;
1868 else if (is_crossing)
1870 error ("EDGE_CROSSING missing across section boundary");
1871 err = 1;
1874 if ((e->flags & ~(EDGE_DFS_BACK
1875 | EDGE_CAN_FALLTHRU
1876 | EDGE_IRREDUCIBLE_LOOP
1877 | EDGE_LOOP_EXIT
1878 | EDGE_CROSSING)) == 0)
1879 n_branch++;
1881 if (e->flags & EDGE_ABNORMAL_CALL)
1882 n_call++;
1884 if (e->flags & EDGE_EH)
1885 n_eh++;
1886 else if (e->flags & EDGE_ABNORMAL)
1887 n_abnormal++;
1890 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
1892 error ("missing REG_EH_REGION note in the end of bb %i", bb->index);
1893 err = 1;
1895 if (n_eh > 1)
1897 error ("too many eh edges %i", bb->index);
1898 err = 1;
1900 if (n_branch
1901 && (!JUMP_P (BB_END (bb))
1902 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
1903 || any_condjump_p (BB_END (bb))))))
1905 error ("too many outgoing branch edges from bb %i", bb->index);
1906 err = 1;
1908 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
1910 error ("fallthru edge after unconditional jump %i", bb->index);
1911 err = 1;
1913 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
1915 error ("wrong number of branch edges after unconditional jump %i",
1916 bb->index);
1917 err = 1;
1919 if (n_branch != 1 && any_condjump_p (BB_END (bb))
1920 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
1922 error ("wrong amount of branch edges after conditional jump %i",
1923 bb->index);
1924 err = 1;
1926 if (n_call && !CALL_P (BB_END (bb)))
1928 error ("call edges for non-call insn in bb %i", bb->index);
1929 err = 1;
1931 if (n_abnormal
1932 && (!CALL_P (BB_END (bb)) && n_call != n_abnormal)
1933 && (!JUMP_P (BB_END (bb))
1934 || any_condjump_p (BB_END (bb))
1935 || any_uncondjump_p (BB_END (bb))))
1937 error ("abnormal edges for no purpose in bb %i", bb->index);
1938 err = 1;
1941 for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
1942 /* We may have a barrier inside a basic block before dead code
1943 elimination. There is no BLOCK_FOR_INSN field in a barrier. */
1944 if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
1946 debug_rtx (x);
1947 if (! BLOCK_FOR_INSN (x))
1948 error
1949 ("insn %d inside basic block %d but block_for_insn is NULL",
1950 INSN_UID (x), bb->index);
1951 else
1952 error
1953 ("insn %d inside basic block %d but block_for_insn is %i",
1954 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
1956 err = 1;
1959 /* OK pointers are correct. Now check the header of basic
1960 block. It ought to contain optional CODE_LABEL followed
1961 by NOTE_BASIC_BLOCK. */
1962 x = BB_HEAD (bb);
1963 if (LABEL_P (x))
1965 if (BB_END (bb) == x)
1967 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1968 bb->index);
1969 err = 1;
1972 x = NEXT_INSN (x);
1975 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
1977 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1978 bb->index);
1979 err = 1;
1982 if (BB_END (bb) == x)
1983 /* Do checks for empty blocks here. */
1985 else
1986 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
1988 if (NOTE_INSN_BASIC_BLOCK_P (x))
1990 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
1991 INSN_UID (x), bb->index);
1992 err = 1;
1995 if (x == BB_END (bb))
1996 break;
1998 if (control_flow_insn_p (x))
2000 error ("in basic block %d:", bb->index);
2001 fatal_insn ("flow control insn inside a basic block", x);
2006 /* Clean up. */
2007 return err;
2010 /* Verify the CFG and RTL consistency common for both underlying RTL and
2011 cfglayout RTL.
2013 Currently it does following checks:
2014 - all checks of rtl_verify_flow_info_1
2015 - test head/end pointers
2016 - check that all insns are in the basic blocks
2017 (except the switch handling code, barriers and notes)
2018 - check that all returns are followed by barriers
2019 - check that all fallthru edge points to the adjacent blocks. */
2021 static int
2022 rtl_verify_flow_info (void)
2024 basic_block bb;
2025 int err = rtl_verify_flow_info_1 ();
2026 rtx x;
2027 rtx last_head = get_last_insn ();
2028 basic_block *bb_info;
2029 int num_bb_notes;
2030 const rtx rtx_first = get_insns ();
2031 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
2032 const int max_uid = get_max_uid ();
2034 bb_info = XCNEWVEC (basic_block, max_uid);
2036 FOR_EACH_BB_REVERSE (bb)
2038 edge e;
2039 rtx head = BB_HEAD (bb);
2040 rtx end = BB_END (bb);
2042 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2044 /* Verify the end of the basic block is in the INSN chain. */
2045 if (x == end)
2046 break;
2048 /* And that the code outside of basic blocks has NULL bb field. */
2049 if (!BARRIER_P (x)
2050 && BLOCK_FOR_INSN (x) != NULL)
2052 error ("insn %d outside of basic blocks has non-NULL bb field",
2053 INSN_UID (x));
2054 err = 1;
2058 if (!x)
2060 error ("end insn %d for block %d not found in the insn stream",
2061 INSN_UID (end), bb->index);
2062 err = 1;
2065 /* Work backwards from the end to the head of the basic block
2066 to verify the head is in the RTL chain. */
2067 for (; x != NULL_RTX; x = PREV_INSN (x))
2069 /* While walking over the insn chain, verify insns appear
2070 in only one basic block. */
2071 if (bb_info[INSN_UID (x)] != NULL)
2073 error ("insn %d is in multiple basic blocks (%d and %d)",
2074 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2075 err = 1;
2078 bb_info[INSN_UID (x)] = bb;
2080 if (x == head)
2081 break;
2083 if (!x)
2085 error ("head insn %d for block %d not found in the insn stream",
2086 INSN_UID (head), bb->index);
2087 err = 1;
2090 last_head = PREV_INSN (x);
2092 e = find_fallthru_edge (bb->succs);
2093 if (!e)
2095 rtx insn;
2097 /* Ensure existence of barrier in BB with no fallthru edges. */
2098 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2100 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
2102 error ("missing barrier after block %i", bb->index);
2103 err = 1;
2104 break;
2106 if (BARRIER_P (insn))
2107 break;
2110 else if (e->src != ENTRY_BLOCK_PTR
2111 && e->dest != EXIT_BLOCK_PTR)
2113 rtx insn;
2115 if (e->src->next_bb != e->dest)
2117 error
2118 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2119 e->src->index, e->dest->index);
2120 err = 1;
2122 else
2123 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2124 insn = NEXT_INSN (insn))
2125 if (BARRIER_P (insn) || INSN_P (insn))
2127 error ("verify_flow_info: Incorrect fallthru %i->%i",
2128 e->src->index, e->dest->index);
2129 fatal_insn ("wrong insn in the fallthru edge", insn);
2130 err = 1;
2135 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2137 /* Check that the code before the first basic block has NULL
2138 bb field. */
2139 if (!BARRIER_P (x)
2140 && BLOCK_FOR_INSN (x) != NULL)
2142 error ("insn %d outside of basic blocks has non-NULL bb field",
2143 INSN_UID (x));
2144 err = 1;
2147 free (bb_info);
2149 num_bb_notes = 0;
2150 last_bb_seen = ENTRY_BLOCK_PTR;
2152 for (x = rtx_first; x; x = NEXT_INSN (x))
2154 if (NOTE_INSN_BASIC_BLOCK_P (x))
2156 bb = NOTE_BASIC_BLOCK (x);
2158 num_bb_notes++;
2159 if (bb != last_bb_seen->next_bb)
2160 internal_error ("basic blocks not laid down consecutively");
2162 curr_bb = last_bb_seen = bb;
2165 if (!curr_bb)
2167 switch (GET_CODE (x))
2169 case BARRIER:
2170 case NOTE:
2171 break;
2173 case CODE_LABEL:
2174 /* An addr_vec is placed outside any basic block. */
2175 if (NEXT_INSN (x)
2176 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
2177 x = NEXT_INSN (x);
2179 /* But in any case, non-deletable labels can appear anywhere. */
2180 break;
2182 default:
2183 fatal_insn ("insn outside basic block", x);
2187 if (JUMP_P (x)
2188 && returnjump_p (x) && ! condjump_p (x)
2189 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
2190 fatal_insn ("return not followed by barrier", x);
2191 if (curr_bb && x == BB_END (curr_bb))
2192 curr_bb = NULL;
2195 if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
2196 internal_error
2197 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2198 num_bb_notes, n_basic_blocks);
2200 return err;
2203 /* Assume that the preceding pass has possibly eliminated jump instructions
2204 or converted the unconditional jumps. Eliminate the edges from CFG.
2205 Return true if any edges are eliminated. */
2207 bool
2208 purge_dead_edges (basic_block bb)
2210 edge e;
2211 rtx insn = BB_END (bb), note;
2212 bool purged = false;
2213 bool found;
2214 edge_iterator ei;
2216 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
2218 insn = PREV_INSN (insn);
2219 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
2221 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2222 if (NONJUMP_INSN_P (insn)
2223 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2225 rtx eqnote;
2227 if (! may_trap_p (PATTERN (insn))
2228 || ((eqnote = find_reg_equal_equiv_note (insn))
2229 && ! may_trap_p (XEXP (eqnote, 0))))
2230 remove_note (insn, note);
2233 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2234 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2236 bool remove = false;
2238 /* There are three types of edges we need to handle correctly here: EH
2239 edges, abnormal call EH edges, and abnormal call non-EH edges. The
2240 latter can appear when nonlocal gotos are used. */
2241 if (e->flags & EDGE_ABNORMAL_CALL)
2243 if (!CALL_P (insn))
2244 remove = true;
2245 else if (can_nonlocal_goto (insn))
2247 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2249 else
2250 remove = true;
2252 else if (e->flags & EDGE_EH)
2253 remove = !can_throw_internal (insn);
2255 if (remove)
2257 remove_edge (e);
2258 df_set_bb_dirty (bb);
2259 purged = true;
2261 else
2262 ei_next (&ei);
2265 if (JUMP_P (insn))
2267 rtx note;
2268 edge b,f;
2269 edge_iterator ei;
2271 /* We do care only about conditional jumps and simplejumps. */
2272 if (!any_condjump_p (insn)
2273 && !returnjump_p (insn)
2274 && !simplejump_p (insn))
2275 return purged;
2277 /* Branch probability/prediction notes are defined only for
2278 condjumps. We've possibly turned condjump into simplejump. */
2279 if (simplejump_p (insn))
2281 note = find_reg_note (insn, REG_BR_PROB, NULL);
2282 if (note)
2283 remove_note (insn, note);
2284 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2285 remove_note (insn, note);
2288 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2290 /* Avoid abnormal flags to leak from computed jumps turned
2291 into simplejumps. */
2293 e->flags &= ~EDGE_ABNORMAL;
2295 /* See if this edge is one we should keep. */
2296 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2297 /* A conditional jump can fall through into the next
2298 block, so we should keep the edge. */
2300 ei_next (&ei);
2301 continue;
2303 else if (e->dest != EXIT_BLOCK_PTR
2304 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
2305 /* If the destination block is the target of the jump,
2306 keep the edge. */
2308 ei_next (&ei);
2309 continue;
2311 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2312 /* If the destination block is the exit block, and this
2313 instruction is a return, then keep the edge. */
2315 ei_next (&ei);
2316 continue;
2318 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2319 /* Keep the edges that correspond to exceptions thrown by
2320 this instruction and rematerialize the EDGE_ABNORMAL
2321 flag we just cleared above. */
2323 e->flags |= EDGE_ABNORMAL;
2324 ei_next (&ei);
2325 continue;
2328 /* We do not need this edge. */
2329 df_set_bb_dirty (bb);
2330 purged = true;
2331 remove_edge (e);
2334 if (EDGE_COUNT (bb->succs) == 0 || !purged)
2335 return purged;
2337 if (dump_file)
2338 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
2340 if (!optimize)
2341 return purged;
2343 /* Redistribute probabilities. */
2344 if (single_succ_p (bb))
2346 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2347 single_succ_edge (bb)->count = bb->count;
2349 else
2351 note = find_reg_note (insn, REG_BR_PROB, NULL);
2352 if (!note)
2353 return purged;
2355 b = BRANCH_EDGE (bb);
2356 f = FALLTHRU_EDGE (bb);
2357 b->probability = INTVAL (XEXP (note, 0));
2358 f->probability = REG_BR_PROB_BASE - b->probability;
2359 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2360 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2363 return purged;
2365 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
2367 /* First, there should not be any EH or ABCALL edges resulting
2368 from non-local gotos and the like. If there were, we shouldn't
2369 have created the sibcall in the first place. Second, there
2370 should of course never have been a fallthru edge. */
2371 gcc_assert (single_succ_p (bb));
2372 gcc_assert (single_succ_edge (bb)->flags
2373 == (EDGE_SIBCALL | EDGE_ABNORMAL));
2375 return 0;
2378 /* If we don't see a jump insn, we don't know exactly why the block would
2379 have been broken at this point. Look for a simple, non-fallthru edge,
2380 as these are only created by conditional branches. If we find such an
2381 edge we know that there used to be a jump here and can then safely
2382 remove all non-fallthru edges. */
2383 found = false;
2384 FOR_EACH_EDGE (e, ei, bb->succs)
2385 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
2387 found = true;
2388 break;
2391 if (!found)
2392 return purged;
2394 /* Remove all but the fake and fallthru edges. The fake edge may be
2395 the only successor for this block in the case of noreturn
2396 calls. */
2397 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2399 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
2401 df_set_bb_dirty (bb);
2402 remove_edge (e);
2403 purged = true;
2405 else
2406 ei_next (&ei);
2409 gcc_assert (single_succ_p (bb));
2411 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2412 single_succ_edge (bb)->count = bb->count;
2414 if (dump_file)
2415 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
2416 bb->index);
2417 return purged;
2420 /* Search all basic blocks for potentially dead edges and purge them. Return
2421 true if some edge has been eliminated. */
2423 bool
2424 purge_all_dead_edges (void)
2426 int purged = false;
2427 basic_block bb;
2429 FOR_EACH_BB (bb)
2431 bool purged_here = purge_dead_edges (bb);
2433 purged |= purged_here;
2436 return purged;
2439 /* This is used by a few passes that emit some instructions after abnormal
2440 calls, moving the basic block's end, while they in fact do want to emit
2441 them on the fallthru edge. Look for abnormal call edges, find backward
2442 the call in the block and insert the instructions on the edge instead.
2444 Similarly, handle instructions throwing exceptions internally.
2446 Return true when instructions have been found and inserted on edges. */
2448 bool
2449 fixup_abnormal_edges (void)
2451 bool inserted = false;
2452 basic_block bb;
2454 FOR_EACH_BB (bb)
2456 edge e;
2457 edge_iterator ei;
2459 /* Look for cases we are interested in - calls or instructions causing
2460 exceptions. */
2461 FOR_EACH_EDGE (e, ei, bb->succs)
2462 if ((e->flags & EDGE_ABNORMAL_CALL)
2463 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
2464 == (EDGE_ABNORMAL | EDGE_EH)))
2465 break;
2467 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
2469 rtx insn;
2471 /* Get past the new insns generated. Allow notes, as the insns
2472 may be already deleted. */
2473 insn = BB_END (bb);
2474 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
2475 && !can_throw_internal (insn)
2476 && insn != BB_HEAD (bb))
2477 insn = PREV_INSN (insn);
2479 if (CALL_P (insn) || can_throw_internal (insn))
2481 rtx stop, next;
2483 e = find_fallthru_edge (bb->succs);
2485 stop = NEXT_INSN (BB_END (bb));
2486 BB_END (bb) = insn;
2488 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
2490 next = NEXT_INSN (insn);
2491 if (INSN_P (insn))
2493 delete_insn (insn);
2495 /* Sometimes there's still the return value USE.
2496 If it's placed after a trapping call (i.e. that
2497 call is the last insn anyway), we have no fallthru
2498 edge. Simply delete this use and don't try to insert
2499 on the non-existent edge. */
2500 if (GET_CODE (PATTERN (insn)) != USE)
2502 /* We're not deleting it, we're moving it. */
2503 INSN_DELETED_P (insn) = 0;
2504 PREV_INSN (insn) = NULL_RTX;
2505 NEXT_INSN (insn) = NULL_RTX;
2507 insert_insn_on_edge (insn, e);
2508 inserted = true;
2511 else if (!BARRIER_P (insn))
2512 set_block_for_insn (insn, NULL);
2516 /* It may be that we don't find any trapping insn. In this
2517 case we discovered quite late that the insn that had been
2518 marked as can_throw_internal in fact couldn't trap at all.
2519 So we should in fact delete the EH edges out of the block. */
2520 else
2521 purge_dead_edges (bb);
2525 return inserted;
2528 /* Same as split_block but update cfg_layout structures. */
2530 static basic_block
2531 cfg_layout_split_block (basic_block bb, void *insnp)
2533 rtx insn = (rtx) insnp;
2534 basic_block new_bb = rtl_split_block (bb, insn);
2536 new_bb->il.rtl->footer = bb->il.rtl->footer;
2537 bb->il.rtl->footer = NULL;
2539 return new_bb;
2542 /* Redirect Edge to DEST. */
2543 static edge
2544 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
2546 basic_block src = e->src;
2547 edge ret;
2549 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
2550 return NULL;
2552 if (e->dest == dest)
2553 return e;
2555 if (e->src != ENTRY_BLOCK_PTR
2556 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
2558 df_set_bb_dirty (src);
2559 return ret;
2562 if (e->src == ENTRY_BLOCK_PTR
2563 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
2565 if (dump_file)
2566 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
2567 e->src->index, dest->index);
2569 df_set_bb_dirty (e->src);
2570 redirect_edge_succ (e, dest);
2571 return e;
2574 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
2575 in the case the basic block appears to be in sequence. Avoid this
2576 transformation. */
2578 if (e->flags & EDGE_FALLTHRU)
2580 /* Redirect any branch edges unified with the fallthru one. */
2581 if (JUMP_P (BB_END (src))
2582 && label_is_jump_target_p (BB_HEAD (e->dest),
2583 BB_END (src)))
2585 edge redirected;
2587 if (dump_file)
2588 fprintf (dump_file, "Fallthru edge unified with branch "
2589 "%i->%i redirected to %i\n",
2590 e->src->index, e->dest->index, dest->index);
2591 e->flags &= ~EDGE_FALLTHRU;
2592 redirected = redirect_branch_edge (e, dest);
2593 gcc_assert (redirected);
2594 redirected->flags |= EDGE_FALLTHRU;
2595 df_set_bb_dirty (redirected->src);
2596 return redirected;
2598 /* In case we are redirecting fallthru edge to the branch edge
2599 of conditional jump, remove it. */
2600 if (EDGE_COUNT (src->succs) == 2)
2602 /* Find the edge that is different from E. */
2603 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
2605 if (s->dest == dest
2606 && any_condjump_p (BB_END (src))
2607 && onlyjump_p (BB_END (src)))
2608 delete_insn (BB_END (src));
2610 if (dump_file)
2611 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
2612 e->src->index, e->dest->index, dest->index);
2613 ret = redirect_edge_succ_nodup (e, dest);
2615 else
2616 ret = redirect_branch_edge (e, dest);
2618 /* We don't want simplejumps in the insn stream during cfglayout. */
2619 gcc_assert (!simplejump_p (BB_END (src)));
2621 df_set_bb_dirty (src);
2622 return ret;
2625 /* Simple wrapper as we always can redirect fallthru edges. */
2626 static basic_block
2627 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
2629 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
2631 gcc_assert (redirected);
2632 return NULL;
2635 /* Same as delete_basic_block but update cfg_layout structures. */
2637 static void
2638 cfg_layout_delete_block (basic_block bb)
2640 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
2642 if (bb->il.rtl->header)
2644 next = BB_HEAD (bb);
2645 if (prev)
2646 NEXT_INSN (prev) = bb->il.rtl->header;
2647 else
2648 set_first_insn (bb->il.rtl->header);
2649 PREV_INSN (bb->il.rtl->header) = prev;
2650 insn = bb->il.rtl->header;
2651 while (NEXT_INSN (insn))
2652 insn = NEXT_INSN (insn);
2653 NEXT_INSN (insn) = next;
2654 PREV_INSN (next) = insn;
2656 next = NEXT_INSN (BB_END (bb));
2657 if (bb->il.rtl->footer)
2659 insn = bb->il.rtl->footer;
2660 while (insn)
2662 if (BARRIER_P (insn))
2664 if (PREV_INSN (insn))
2665 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2666 else
2667 bb->il.rtl->footer = NEXT_INSN (insn);
2668 if (NEXT_INSN (insn))
2669 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2671 if (LABEL_P (insn))
2672 break;
2673 insn = NEXT_INSN (insn);
2675 if (bb->il.rtl->footer)
2677 insn = BB_END (bb);
2678 NEXT_INSN (insn) = bb->il.rtl->footer;
2679 PREV_INSN (bb->il.rtl->footer) = insn;
2680 while (NEXT_INSN (insn))
2681 insn = NEXT_INSN (insn);
2682 NEXT_INSN (insn) = next;
2683 if (next)
2684 PREV_INSN (next) = insn;
2685 else
2686 set_last_insn (insn);
2689 if (bb->next_bb != EXIT_BLOCK_PTR)
2690 to = &bb->next_bb->il.rtl->header;
2691 else
2692 to = &cfg_layout_function_footer;
2694 rtl_delete_block (bb);
2696 if (prev)
2697 prev = NEXT_INSN (prev);
2698 else
2699 prev = get_insns ();
2700 if (next)
2701 next = PREV_INSN (next);
2702 else
2703 next = get_last_insn ();
2705 if (next && NEXT_INSN (next) != prev)
2707 remaints = unlink_insn_chain (prev, next);
2708 insn = remaints;
2709 while (NEXT_INSN (insn))
2710 insn = NEXT_INSN (insn);
2711 NEXT_INSN (insn) = *to;
2712 if (*to)
2713 PREV_INSN (*to) = insn;
2714 *to = remaints;
2718 /* Return true when blocks A and B can be safely merged. */
2720 static bool
2721 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
2723 /* If we are partitioning hot/cold basic blocks, we don't want to
2724 mess up unconditional or indirect jumps that cross between hot
2725 and cold sections.
2727 Basic block partitioning may result in some jumps that appear to
2728 be optimizable (or blocks that appear to be mergeable), but which really
2729 must be left untouched (they are required to make it safely across
2730 partition boundaries). See the comments at the top of
2731 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
2733 if (BB_PARTITION (a) != BB_PARTITION (b))
2734 return false;
2736 /* There must be exactly one edge in between the blocks. */
2737 return (single_succ_p (a)
2738 && single_succ (a) == b
2739 && single_pred_p (b) == 1
2740 && a != b
2741 /* Must be simple edge. */
2742 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
2743 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
2744 /* If the jump insn has side effects, we can't kill the edge.
2745 When not optimizing, try_redirect_by_replacing_jump will
2746 not allow us to redirect an edge by replacing a table jump. */
2747 && (!JUMP_P (BB_END (a))
2748 || ((!optimize || reload_completed)
2749 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
2752 /* Merge block A and B. The blocks must be mergeable. */
2754 static void
2755 cfg_layout_merge_blocks (basic_block a, basic_block b)
2757 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
2759 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
2761 if (dump_file)
2762 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
2763 a->index);
2765 /* If there was a CODE_LABEL beginning B, delete it. */
2766 if (LABEL_P (BB_HEAD (b)))
2768 delete_insn (BB_HEAD (b));
2771 /* We should have fallthru edge in a, or we can do dummy redirection to get
2772 it cleaned up. */
2773 if (JUMP_P (BB_END (a)))
2774 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
2775 gcc_assert (!JUMP_P (BB_END (a)));
2777 /* When not optimizing and the edge is the only place in RTL which holds
2778 some unique locus, emit a nop with that locus in between. */
2779 if (!optimize && EDGE_SUCC (a, 0)->goto_locus)
2781 rtx insn = BB_END (a), end = PREV_INSN (BB_HEAD (a));
2782 int goto_locus = EDGE_SUCC (a, 0)->goto_locus;
2784 while (insn != end && (!INSN_P (insn) || INSN_LOCATOR (insn) == 0))
2785 insn = PREV_INSN (insn);
2786 if (insn != end && locator_eq (INSN_LOCATOR (insn), goto_locus))
2787 goto_locus = 0;
2788 else
2790 insn = BB_HEAD (b);
2791 end = NEXT_INSN (BB_END (b));
2792 while (insn != end && !INSN_P (insn))
2793 insn = NEXT_INSN (insn);
2794 if (insn != end && INSN_LOCATOR (insn) != 0
2795 && locator_eq (INSN_LOCATOR (insn), goto_locus))
2796 goto_locus = 0;
2798 if (goto_locus)
2800 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
2801 INSN_LOCATOR (BB_END (a)) = goto_locus;
2805 /* Possible line number notes should appear in between. */
2806 if (b->il.rtl->header)
2808 rtx first = BB_END (a), last;
2810 last = emit_insn_after_noloc (b->il.rtl->header, BB_END (a), a);
2811 delete_insn_chain (NEXT_INSN (first), last, false);
2812 b->il.rtl->header = NULL;
2815 /* In the case basic blocks are not adjacent, move them around. */
2816 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
2818 rtx first = unlink_insn_chain (BB_HEAD (b), BB_END (b));
2820 emit_insn_after_noloc (first, BB_END (a), a);
2821 /* Skip possible DELETED_LABEL insn. */
2822 if (!NOTE_INSN_BASIC_BLOCK_P (first))
2823 first = NEXT_INSN (first);
2824 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (first));
2825 BB_HEAD (b) = NULL;
2827 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
2828 We need to explicitly call. */
2829 update_bb_for_insn_chain (NEXT_INSN (first),
2830 BB_END (b),
2833 delete_insn (first);
2835 /* Otherwise just re-associate the instructions. */
2836 else
2838 rtx insn;
2840 update_bb_for_insn_chain (BB_HEAD (b), BB_END (b), a);
2842 insn = BB_HEAD (b);
2843 /* Skip possible DELETED_LABEL insn. */
2844 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
2845 insn = NEXT_INSN (insn);
2846 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
2847 BB_HEAD (b) = NULL;
2848 BB_END (a) = BB_END (b);
2849 delete_insn (insn);
2852 df_bb_delete (b->index);
2854 /* Possible tablejumps and barriers should appear after the block. */
2855 if (b->il.rtl->footer)
2857 if (!a->il.rtl->footer)
2858 a->il.rtl->footer = b->il.rtl->footer;
2859 else
2861 rtx last = a->il.rtl->footer;
2863 while (NEXT_INSN (last))
2864 last = NEXT_INSN (last);
2865 NEXT_INSN (last) = b->il.rtl->footer;
2866 PREV_INSN (b->il.rtl->footer) = last;
2868 b->il.rtl->footer = NULL;
2871 /* If B was a forwarder block, propagate the locus on the edge. */
2872 if (forwarder_p && !EDGE_SUCC (b, 0)->goto_locus)
2873 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
2875 if (dump_file)
2876 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
2879 /* Split edge E. */
2881 static basic_block
2882 cfg_layout_split_edge (edge e)
2884 basic_block new_bb =
2885 create_basic_block (e->src != ENTRY_BLOCK_PTR
2886 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
2887 NULL_RTX, e->src);
2889 if (e->dest == EXIT_BLOCK_PTR)
2890 BB_COPY_PARTITION (new_bb, e->src);
2891 else
2892 BB_COPY_PARTITION (new_bb, e->dest);
2893 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
2894 redirect_edge_and_branch_force (e, new_bb);
2896 return new_bb;
2899 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
2901 static void
2902 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
2906 /* Return 1 if BB ends with a call, possibly followed by some
2907 instructions that must stay with the call, 0 otherwise. */
2909 static bool
2910 rtl_block_ends_with_call_p (basic_block bb)
2912 rtx insn = BB_END (bb);
2914 while (!CALL_P (insn)
2915 && insn != BB_HEAD (bb)
2916 && (keep_with_call_p (insn)
2917 || NOTE_P (insn)
2918 || DEBUG_INSN_P (insn)))
2919 insn = PREV_INSN (insn);
2920 return (CALL_P (insn));
2923 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
2925 static bool
2926 rtl_block_ends_with_condjump_p (const_basic_block bb)
2928 return any_condjump_p (BB_END (bb));
2931 /* Return true if we need to add fake edge to exit.
2932 Helper function for rtl_flow_call_edges_add. */
2934 static bool
2935 need_fake_edge_p (const_rtx insn)
2937 if (!INSN_P (insn))
2938 return false;
2940 if ((CALL_P (insn)
2941 && !SIBLING_CALL_P (insn)
2942 && !find_reg_note (insn, REG_NORETURN, NULL)
2943 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
2944 return true;
2946 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2947 && MEM_VOLATILE_P (PATTERN (insn)))
2948 || (GET_CODE (PATTERN (insn)) == PARALLEL
2949 && asm_noperands (insn) != -1
2950 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
2951 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2954 /* Add fake edges to the function exit for any non constant and non noreturn
2955 calls, volatile inline assembly in the bitmap of blocks specified by
2956 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
2957 that were split.
2959 The goal is to expose cases in which entering a basic block does not imply
2960 that all subsequent instructions must be executed. */
2962 static int
2963 rtl_flow_call_edges_add (sbitmap blocks)
2965 int i;
2966 int blocks_split = 0;
2967 int last_bb = last_basic_block;
2968 bool check_last_block = false;
2970 if (n_basic_blocks == NUM_FIXED_BLOCKS)
2971 return 0;
2973 if (! blocks)
2974 check_last_block = true;
2975 else
2976 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
2978 /* In the last basic block, before epilogue generation, there will be
2979 a fallthru edge to EXIT. Special care is required if the last insn
2980 of the last basic block is a call because make_edge folds duplicate
2981 edges, which would result in the fallthru edge also being marked
2982 fake, which would result in the fallthru edge being removed by
2983 remove_fake_edges, which would result in an invalid CFG.
2985 Moreover, we can't elide the outgoing fake edge, since the block
2986 profiler needs to take this into account in order to solve the minimal
2987 spanning tree in the case that the call doesn't return.
2989 Handle this by adding a dummy instruction in a new last basic block. */
2990 if (check_last_block)
2992 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
2993 rtx insn = BB_END (bb);
2995 /* Back up past insns that must be kept in the same block as a call. */
2996 while (insn != BB_HEAD (bb)
2997 && keep_with_call_p (insn))
2998 insn = PREV_INSN (insn);
3000 if (need_fake_edge_p (insn))
3002 edge e;
3004 e = find_edge (bb, EXIT_BLOCK_PTR);
3005 if (e)
3007 insert_insn_on_edge (gen_use (const0_rtx), e);
3008 commit_edge_insertions ();
3013 /* Now add fake edges to the function exit for any non constant
3014 calls since there is no way that we can determine if they will
3015 return or not... */
3017 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
3019 basic_block bb = BASIC_BLOCK (i);
3020 rtx insn;
3021 rtx prev_insn;
3023 if (!bb)
3024 continue;
3026 if (blocks && !TEST_BIT (blocks, i))
3027 continue;
3029 for (insn = BB_END (bb); ; insn = prev_insn)
3031 prev_insn = PREV_INSN (insn);
3032 if (need_fake_edge_p (insn))
3034 edge e;
3035 rtx split_at_insn = insn;
3037 /* Don't split the block between a call and an insn that should
3038 remain in the same block as the call. */
3039 if (CALL_P (insn))
3040 while (split_at_insn != BB_END (bb)
3041 && keep_with_call_p (NEXT_INSN (split_at_insn)))
3042 split_at_insn = NEXT_INSN (split_at_insn);
3044 /* The handling above of the final block before the epilogue
3045 should be enough to verify that there is no edge to the exit
3046 block in CFG already. Calling make_edge in such case would
3047 cause us to mark that edge as fake and remove it later. */
3049 #ifdef ENABLE_CHECKING
3050 if (split_at_insn == BB_END (bb))
3052 e = find_edge (bb, EXIT_BLOCK_PTR);
3053 gcc_assert (e == NULL);
3055 #endif
3057 /* Note that the following may create a new basic block
3058 and renumber the existing basic blocks. */
3059 if (split_at_insn != BB_END (bb))
3061 e = split_block (bb, split_at_insn);
3062 if (e)
3063 blocks_split++;
3066 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
3069 if (insn == BB_HEAD (bb))
3070 break;
3074 if (blocks_split)
3075 verify_flow_info ();
3077 return blocks_split;
3080 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
3081 the conditional branch target, SECOND_HEAD should be the fall-thru
3082 there is no need to handle this here the loop versioning code handles
3083 this. the reason for SECON_HEAD is that it is needed for condition
3084 in trees, and this should be of the same type since it is a hook. */
3085 static void
3086 rtl_lv_add_condition_to_bb (basic_block first_head ,
3087 basic_block second_head ATTRIBUTE_UNUSED,
3088 basic_block cond_bb, void *comp_rtx)
3090 rtx label, seq, jump;
3091 rtx op0 = XEXP ((rtx)comp_rtx, 0);
3092 rtx op1 = XEXP ((rtx)comp_rtx, 1);
3093 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
3094 enum machine_mode mode;
3097 label = block_label (first_head);
3098 mode = GET_MODE (op0);
3099 if (mode == VOIDmode)
3100 mode = GET_MODE (op1);
3102 start_sequence ();
3103 op0 = force_operand (op0, NULL_RTX);
3104 op1 = force_operand (op1, NULL_RTX);
3105 do_compare_rtx_and_jump (op0, op1, comp, 0,
3106 mode, NULL_RTX, NULL_RTX, label, -1);
3107 jump = get_last_insn ();
3108 JUMP_LABEL (jump) = label;
3109 LABEL_NUSES (label)++;
3110 seq = get_insns ();
3111 end_sequence ();
3113 /* Add the new cond , in the new head. */
3114 emit_insn_after(seq, BB_END(cond_bb));
3118 /* Given a block B with unconditional branch at its end, get the
3119 store the return the branch edge and the fall-thru edge in
3120 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
3121 static void
3122 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
3123 edge *fallthru_edge)
3125 edge e = EDGE_SUCC (b, 0);
3127 if (e->flags & EDGE_FALLTHRU)
3129 *fallthru_edge = e;
3130 *branch_edge = EDGE_SUCC (b, 1);
3132 else
3134 *branch_edge = e;
3135 *fallthru_edge = EDGE_SUCC (b, 1);
3139 void
3140 init_rtl_bb_info (basic_block bb)
3142 gcc_assert (!bb->il.rtl);
3143 bb->il.rtl = ggc_alloc_cleared_rtl_bb_info ();
3146 /* Returns true if it is possible to remove edge E by redirecting
3147 it to the destination of the other edge from E->src. */
3149 static bool
3150 rtl_can_remove_branch_p (const_edge e)
3152 const_basic_block src = e->src;
3153 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
3154 const_rtx insn = BB_END (src), set;
3156 /* The conditions are taken from try_redirect_by_replacing_jump. */
3157 if (target == EXIT_BLOCK_PTR)
3158 return false;
3160 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
3161 return false;
3163 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
3164 || BB_PARTITION (src) != BB_PARTITION (target))
3165 return false;
3167 if (!onlyjump_p (insn)
3168 || tablejump_p (insn, NULL, NULL))
3169 return false;
3171 set = single_set (insn);
3172 if (!set || side_effects_p (set))
3173 return false;
3175 return true;
3178 /* Implementation of CFG manipulation for linearized RTL. */
3179 struct cfg_hooks rtl_cfg_hooks = {
3180 "rtl",
3181 rtl_verify_flow_info,
3182 rtl_dump_bb,
3183 rtl_create_basic_block,
3184 rtl_redirect_edge_and_branch,
3185 rtl_redirect_edge_and_branch_force,
3186 rtl_can_remove_branch_p,
3187 rtl_delete_block,
3188 rtl_split_block,
3189 rtl_move_block_after,
3190 rtl_can_merge_blocks, /* can_merge_blocks_p */
3191 rtl_merge_blocks,
3192 rtl_predict_edge,
3193 rtl_predicted_by_p,
3194 NULL, /* can_duplicate_block_p */
3195 NULL, /* duplicate_block */
3196 rtl_split_edge,
3197 rtl_make_forwarder_block,
3198 rtl_tidy_fallthru_edge,
3199 rtl_force_nonfallthru,
3200 rtl_block_ends_with_call_p,
3201 rtl_block_ends_with_condjump_p,
3202 rtl_flow_call_edges_add,
3203 NULL, /* execute_on_growing_pred */
3204 NULL, /* execute_on_shrinking_pred */
3205 NULL, /* duplicate loop for trees */
3206 NULL, /* lv_add_condition_to_bb */
3207 NULL, /* lv_adjust_loop_header_phi*/
3208 NULL, /* extract_cond_bb_edges */
3209 NULL /* flush_pending_stmts */
3212 /* Implementation of CFG manipulation for cfg layout RTL, where
3213 basic block connected via fallthru edges does not have to be adjacent.
3214 This representation will hopefully become the default one in future
3215 version of the compiler. */
3217 /* We do not want to declare these functions in a header file, since they
3218 should only be used through the cfghooks interface, and we do not want to
3219 move them here since it would require also moving quite a lot of related
3220 code. They are in cfglayout.c. */
3221 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block);
3222 extern basic_block cfg_layout_duplicate_bb (basic_block);
3224 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
3225 "cfglayout mode",
3226 rtl_verify_flow_info_1,
3227 rtl_dump_bb,
3228 cfg_layout_create_basic_block,
3229 cfg_layout_redirect_edge_and_branch,
3230 cfg_layout_redirect_edge_and_branch_force,
3231 rtl_can_remove_branch_p,
3232 cfg_layout_delete_block,
3233 cfg_layout_split_block,
3234 rtl_move_block_after,
3235 cfg_layout_can_merge_blocks_p,
3236 cfg_layout_merge_blocks,
3237 rtl_predict_edge,
3238 rtl_predicted_by_p,
3239 cfg_layout_can_duplicate_bb_p,
3240 cfg_layout_duplicate_bb,
3241 cfg_layout_split_edge,
3242 rtl_make_forwarder_block,
3243 NULL, /* tidy_fallthru_edge */
3244 rtl_force_nonfallthru,
3245 rtl_block_ends_with_call_p,
3246 rtl_block_ends_with_condjump_p,
3247 rtl_flow_call_edges_add,
3248 NULL, /* execute_on_growing_pred */
3249 NULL, /* execute_on_shrinking_pred */
3250 duplicate_loop_to_header_edge, /* duplicate loop for trees */
3251 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
3252 NULL, /* lv_adjust_loop_header_phi*/
3253 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
3254 NULL /* flush_pending_stmts */