fortran frontend:
[official-gcc.git] / gcc / cfgrtl.c
blobd7921e3df44a89f108b2ed8a810b7514df52d1c4
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 /* This file contains low level functions to manipulate the CFG and analyze it
24 that are aware of the RTL intermediate language.
26 Available functionality:
27 - Basic CFG/RTL manipulation API documented in cfghooks.h
28 - CFG-aware instruction chain manipulation
29 delete_insn, delete_insn_chain
30 - Edge splitting and committing to edges
31 insert_insn_on_edge, commit_edge_insertions
32 - CFG updating after insn simplification
33 purge_dead_edges, purge_all_dead_edges
35 Functions not supposed for generic use:
36 - Infrastructure to determine quickly basic block for insn
37 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
38 - Edge redirection with updating and optimizing of insn chain
39 block_label, tidy_fallthru_edge, force_nonfallthru */
41 #include "config.h"
42 #include "system.h"
43 #include "coretypes.h"
44 #include "tm.h"
45 #include "tree.h"
46 #include "rtl.h"
47 #include "hard-reg-set.h"
48 #include "basic-block.h"
49 #include "regs.h"
50 #include "flags.h"
51 #include "output.h"
52 #include "function.h"
53 #include "except.h"
54 #include "toplev.h"
55 #include "tm_p.h"
56 #include "obstack.h"
57 #include "insn-config.h"
58 #include "cfglayout.h"
59 #include "expr.h"
60 #include "target.h"
61 #include "cfgloop.h"
62 #include "ggc.h"
63 #include "tree-pass.h"
64 #include "df.h"
66 static int can_delete_note_p (rtx);
67 static int can_delete_label_p (rtx);
68 static void commit_one_edge_insertion (edge);
69 static basic_block rtl_split_edge (edge);
70 static bool rtl_move_block_after (basic_block, basic_block);
71 static int rtl_verify_flow_info (void);
72 static basic_block cfg_layout_split_block (basic_block, void *);
73 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
74 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
75 static void cfg_layout_delete_block (basic_block);
76 static void rtl_delete_block (basic_block);
77 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
78 static edge rtl_redirect_edge_and_branch (edge, basic_block);
79 static basic_block rtl_split_block (basic_block, void *);
80 static void rtl_dump_bb (basic_block, FILE *, int);
81 static int rtl_verify_flow_info_1 (void);
82 static void rtl_make_forwarder_block (edge);
84 /* Return true if NOTE is not one of the ones that must be kept paired,
85 so that we may simply delete it. */
87 static int
88 can_delete_note_p (rtx note)
90 return (NOTE_KIND (note) == NOTE_INSN_DELETED
91 || NOTE_KIND (note) == NOTE_INSN_BASIC_BLOCK);
94 /* True if a given label can be deleted. */
96 static int
97 can_delete_label_p (rtx label)
99 return (!LABEL_PRESERVE_P (label)
100 /* User declared labels must be preserved. */
101 && LABEL_NAME (label) == 0
102 && !in_expr_list_p (forced_labels, label));
105 /* Delete INSN by patching it out. Return the next insn. */
108 delete_insn (rtx insn)
110 rtx next = NEXT_INSN (insn);
111 rtx note;
112 bool really_delete = true;
114 if (LABEL_P (insn))
116 /* Some labels can't be directly removed from the INSN chain, as they
117 might be references via variables, constant pool etc.
118 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
119 if (! can_delete_label_p (insn))
121 const char *name = LABEL_NAME (insn);
123 really_delete = false;
124 PUT_CODE (insn, NOTE);
125 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
126 NOTE_DELETED_LABEL_NAME (insn) = name;
129 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
132 if (really_delete)
134 /* If this insn has already been deleted, something is very wrong. */
135 gcc_assert (!INSN_DELETED_P (insn));
136 remove_insn (insn);
137 INSN_DELETED_P (insn) = 1;
140 /* If deleting a jump, decrement the use count of the label. Deleting
141 the label itself should happen in the normal course of block merging. */
142 if (JUMP_P (insn)
143 && JUMP_LABEL (insn)
144 && LABEL_P (JUMP_LABEL (insn)))
145 LABEL_NUSES (JUMP_LABEL (insn))--;
147 /* Also if deleting an insn that references a label. */
148 else
150 while ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
151 && LABEL_P (XEXP (note, 0)))
153 LABEL_NUSES (XEXP (note, 0))--;
154 remove_note (insn, note);
158 if (JUMP_P (insn)
159 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
160 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
162 rtx pat = PATTERN (insn);
163 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
164 int len = XVECLEN (pat, diff_vec_p);
165 int i;
167 for (i = 0; i < len; i++)
169 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
171 /* When deleting code in bulk (e.g. removing many unreachable
172 blocks) we can delete a label that's a target of the vector
173 before deleting the vector itself. */
174 if (!NOTE_P (label))
175 LABEL_NUSES (label)--;
179 return next;
182 /* Like delete_insn but also purge dead edges from BB. */
184 delete_insn_and_edges (rtx insn)
186 rtx x;
187 bool purge = false;
189 if (INSN_P (insn)
190 && BLOCK_FOR_INSN (insn)
191 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
192 purge = true;
193 x = delete_insn (insn);
194 if (purge)
195 purge_dead_edges (BLOCK_FOR_INSN (insn));
196 return x;
199 /* Unlink a chain of insns between START and FINISH, leaving notes
200 that must be paired. If CLEAR_BB is true, we set bb field for
201 insns that cannot be removed to NULL. */
203 void
204 delete_insn_chain (rtx start, rtx finish, bool clear_bb)
206 rtx next;
208 /* Unchain the insns one by one. It would be quicker to delete all of these
209 with a single unchaining, rather than one at a time, but we need to keep
210 the NOTE's. */
211 while (1)
213 next = NEXT_INSN (start);
214 if (NOTE_P (start) && !can_delete_note_p (start))
216 else
217 next = delete_insn (start);
219 if (clear_bb && !INSN_DELETED_P (start))
220 set_block_for_insn (start, NULL);
222 if (start == finish)
223 break;
224 start = next;
228 /* Like delete_insn but also purge dead edges from BB. */
229 void
230 delete_insn_chain_and_edges (rtx first, rtx last)
232 bool purge = false;
234 if (INSN_P (last)
235 && BLOCK_FOR_INSN (last)
236 && BB_END (BLOCK_FOR_INSN (last)) == last)
237 purge = true;
238 delete_insn_chain (first, last, false);
239 if (purge)
240 purge_dead_edges (BLOCK_FOR_INSN (last));
243 /* Create a new basic block consisting of the instructions between HEAD and END
244 inclusive. This function is designed to allow fast BB construction - reuses
245 the note and basic block struct in BB_NOTE, if any and do not grow
246 BASIC_BLOCK chain and should be used directly only by CFG construction code.
247 END can be NULL in to create new empty basic block before HEAD. Both END
248 and HEAD can be NULL to create basic block at the end of INSN chain.
249 AFTER is the basic block we should be put after. */
251 basic_block
252 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
254 basic_block bb;
256 if (bb_note
257 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
258 && bb->aux == NULL)
260 /* If we found an existing note, thread it back onto the chain. */
262 rtx after;
264 if (LABEL_P (head))
265 after = head;
266 else
268 after = PREV_INSN (head);
269 head = bb_note;
272 if (after != bb_note && NEXT_INSN (after) != bb_note)
273 reorder_insns_nobb (bb_note, bb_note, after);
275 else
277 /* Otherwise we must create a note and a basic block structure. */
279 bb = alloc_block ();
281 init_rtl_bb_info (bb);
282 if (!head && !end)
283 head = end = bb_note
284 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
285 else if (LABEL_P (head) && end)
287 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
288 if (head == end)
289 end = bb_note;
291 else
293 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
294 head = bb_note;
295 if (!end)
296 end = head;
299 NOTE_BASIC_BLOCK (bb_note) = bb;
302 /* Always include the bb note in the block. */
303 if (NEXT_INSN (end) == bb_note)
304 end = bb_note;
306 BB_HEAD (bb) = head;
307 BB_END (bb) = end;
308 bb->index = last_basic_block++;
309 bb->flags = BB_NEW | BB_RTL;
310 link_block (bb, after);
311 SET_BASIC_BLOCK (bb->index, bb);
312 df_bb_refs_record (bb->index, false);
313 update_bb_for_insn (bb);
314 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
316 /* Tag the block so that we know it has been used when considering
317 other basic block notes. */
318 bb->aux = bb;
320 return bb;
323 /* Create new basic block consisting of instructions in between HEAD and END
324 and place it to the BB chain after block AFTER. END can be NULL in to
325 create new empty basic block before HEAD. Both END and HEAD can be NULL to
326 create basic block at the end of INSN chain. */
328 static basic_block
329 rtl_create_basic_block (void *headp, void *endp, basic_block after)
331 rtx head = (rtx) headp, end = (rtx) endp;
332 basic_block bb;
334 /* Grow the basic block array if needed. */
335 if ((size_t) last_basic_block >= VEC_length (basic_block, basic_block_info))
337 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
338 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
341 n_basic_blocks++;
343 bb = create_basic_block_structure (head, end, NULL, after);
344 bb->aux = NULL;
345 return bb;
348 static basic_block
349 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
351 basic_block newbb = rtl_create_basic_block (head, end, after);
353 return newbb;
356 /* Delete the insns in a (non-live) block. We physically delete every
357 non-deleted-note insn, and update the flow graph appropriately.
359 Return nonzero if we deleted an exception handler. */
361 /* ??? Preserving all such notes strikes me as wrong. It would be nice
362 to post-process the stream to remove empty blocks, loops, ranges, etc. */
364 static void
365 rtl_delete_block (basic_block b)
367 rtx insn, end;
369 /* If the head of this block is a CODE_LABEL, then it might be the
370 label for an exception handler which can't be reached. We need
371 to remove the label from the exception_handler_label list. */
372 insn = BB_HEAD (b);
373 if (LABEL_P (insn))
374 maybe_remove_eh_handler (insn);
376 end = get_last_bb_insn (b);
378 /* Selectively delete the entire chain. */
379 BB_HEAD (b) = NULL;
380 delete_insn_chain (insn, end, true);
383 if (dump_file)
384 fprintf (dump_file, "deleting block %d\n", b->index);
385 df_bb_delete (b->index);
388 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
390 void
391 compute_bb_for_insn (void)
393 basic_block bb;
395 FOR_EACH_BB (bb)
397 rtx end = BB_END (bb);
398 rtx insn;
400 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
402 BLOCK_FOR_INSN (insn) = bb;
403 if (insn == end)
404 break;
409 /* Release the basic_block_for_insn array. */
411 unsigned int
412 free_bb_for_insn (void)
414 rtx insn;
415 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
416 if (!BARRIER_P (insn))
417 BLOCK_FOR_INSN (insn) = NULL;
418 return 0;
421 struct tree_opt_pass pass_free_cfg =
423 NULL, /* name */
424 NULL, /* gate */
425 free_bb_for_insn, /* execute */
426 NULL, /* sub */
427 NULL, /* next */
428 0, /* static_pass_number */
429 0, /* tv_id */
430 0, /* properties_required */
431 0, /* properties_provided */
432 PROP_cfg, /* properties_destroyed */
433 0, /* todo_flags_start */
434 0, /* todo_flags_finish */
435 0 /* letter */
438 /* Return RTX to emit after when we want to emit code on the entry of function. */
440 entry_of_function (void)
442 return (n_basic_blocks > NUM_FIXED_BLOCKS ?
443 BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
446 /* Emit INSN at the entry point of the function, ensuring that it is only
447 executed once per function. */
448 void
449 emit_insn_at_entry (rtx insn)
451 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
452 edge e = ei_safe_edge (ei);
453 gcc_assert (e->flags & EDGE_FALLTHRU);
455 insert_insn_on_edge (insn, e);
456 commit_edge_insertions ();
459 /* Update insns block within BB. */
461 void
462 update_bb_for_insn (basic_block bb)
464 rtx insn;
466 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
468 if (!BARRIER_P (insn))
470 set_block_for_insn (insn, bb);
471 df_insn_change_bb (insn);
473 if (insn == BB_END (bb))
474 break;
478 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
479 note associated with the BLOCK. */
481 static rtx
482 first_insn_after_basic_block_note (basic_block block)
484 rtx insn;
486 /* Get the first instruction in the block. */
487 insn = BB_HEAD (block);
489 if (insn == NULL_RTX)
490 return NULL_RTX;
491 if (LABEL_P (insn))
492 insn = NEXT_INSN (insn);
493 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
495 return NEXT_INSN (insn);
498 /* Creates a new basic block just after basic block B by splitting
499 everything after specified instruction I. */
501 static basic_block
502 rtl_split_block (basic_block bb, void *insnp)
504 basic_block new_bb;
505 rtx insn = (rtx) insnp;
506 edge e;
507 edge_iterator ei;
509 if (!insn)
511 insn = first_insn_after_basic_block_note (bb);
513 if (insn)
514 insn = PREV_INSN (insn);
515 else
516 insn = get_last_insn ();
519 /* We probably should check type of the insn so that we do not create
520 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
521 bother. */
522 if (insn == BB_END (bb))
523 emit_note_after (NOTE_INSN_DELETED, insn);
525 /* Create the new basic block. */
526 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
527 BB_COPY_PARTITION (new_bb, bb);
528 BB_END (bb) = insn;
530 /* Redirect the outgoing edges. */
531 new_bb->succs = bb->succs;
532 bb->succs = NULL;
533 FOR_EACH_EDGE (e, ei, new_bb->succs)
534 e->src = new_bb;
536 /* The new block starts off being dirty. */
537 df_set_bb_dirty (bb);
538 return new_bb;
541 /* Blocks A and B are to be merged into a single block A. The insns
542 are already contiguous. */
544 static void
545 rtl_merge_blocks (basic_block a, basic_block b)
547 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
548 rtx del_first = NULL_RTX, del_last = NULL_RTX;
549 int b_empty = 0;
551 if (dump_file)
552 fprintf (dump_file, "merging block %d into block %d\n", b->index, a->index);
554 /* If there was a CODE_LABEL beginning B, delete it. */
555 if (LABEL_P (b_head))
557 /* This might have been an EH label that no longer has incoming
558 EH edges. Update data structures to match. */
559 maybe_remove_eh_handler (b_head);
561 /* Detect basic blocks with nothing but a label. This can happen
562 in particular at the end of a function. */
563 if (b_head == b_end)
564 b_empty = 1;
566 del_first = del_last = b_head;
567 b_head = NEXT_INSN (b_head);
570 /* Delete the basic block note and handle blocks containing just that
571 note. */
572 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
574 if (b_head == b_end)
575 b_empty = 1;
576 if (! del_last)
577 del_first = b_head;
579 del_last = b_head;
580 b_head = NEXT_INSN (b_head);
583 /* If there was a jump out of A, delete it. */
584 if (JUMP_P (a_end))
586 rtx prev;
588 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
589 if (!NOTE_P (prev)
590 || NOTE_INSN_BASIC_BLOCK_P (prev)
591 || prev == BB_HEAD (a))
592 break;
594 del_first = a_end;
596 #ifdef HAVE_cc0
597 /* If this was a conditional jump, we need to also delete
598 the insn that set cc0. */
599 if (only_sets_cc0_p (prev))
601 rtx tmp = prev;
603 prev = prev_nonnote_insn (prev);
604 if (!prev)
605 prev = BB_HEAD (a);
606 del_first = tmp;
608 #endif
610 a_end = PREV_INSN (del_first);
612 else if (BARRIER_P (NEXT_INSN (a_end)))
613 del_first = NEXT_INSN (a_end);
615 /* Delete everything marked above as well as crap that might be
616 hanging out between the two blocks. */
617 BB_HEAD (b) = NULL;
618 delete_insn_chain (del_first, del_last, true);
620 /* Reassociate the insns of B with A. */
621 if (!b_empty)
623 rtx x;
625 for (x = a_end; x != b_end; x = NEXT_INSN (x))
627 set_block_for_insn (x, a);
628 df_insn_change_bb (x);
631 set_block_for_insn (b_end, a);
632 df_insn_change_bb (b_end);
634 a_end = b_end;
637 df_bb_delete (b->index);
638 BB_END (a) = a_end;
642 /* Return true when block A and B can be merged. */
643 static bool
644 rtl_can_merge_blocks (basic_block a,basic_block b)
646 /* If we are partitioning hot/cold basic blocks, we don't want to
647 mess up unconditional or indirect jumps that cross between hot
648 and cold sections.
650 Basic block partitioning may result in some jumps that appear to
651 be optimizable (or blocks that appear to be mergeable), but which really
652 must be left untouched (they are required to make it safely across
653 partition boundaries). See the comments at the top of
654 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
656 if (BB_PARTITION (a) != BB_PARTITION (b))
657 return false;
659 /* There must be exactly one edge in between the blocks. */
660 return (single_succ_p (a)
661 && single_succ (a) == b
662 && single_pred_p (b)
663 && a != b
664 /* Must be simple edge. */
665 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
666 && a->next_bb == b
667 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
668 /* If the jump insn has side effects,
669 we can't kill the edge. */
670 && (!JUMP_P (BB_END (a))
671 || (reload_completed
672 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
675 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
676 exist. */
679 block_label (basic_block block)
681 if (block == EXIT_BLOCK_PTR)
682 return NULL_RTX;
684 if (!LABEL_P (BB_HEAD (block)))
686 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
689 return BB_HEAD (block);
692 /* Attempt to perform edge redirection by replacing possibly complex jump
693 instruction by unconditional jump or removing jump completely. This can
694 apply only if all edges now point to the same block. The parameters and
695 return values are equivalent to redirect_edge_and_branch. */
697 edge
698 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
700 basic_block src = e->src;
701 rtx insn = BB_END (src), kill_from;
702 rtx set;
703 int fallthru = 0;
705 /* If we are partitioning hot/cold basic blocks, we don't want to
706 mess up unconditional or indirect jumps that cross between hot
707 and cold sections.
709 Basic block partitioning may result in some jumps that appear to
710 be optimizable (or blocks that appear to be mergeable), but which really
711 must be left untouched (they are required to make it safely across
712 partition boundaries). See the comments at the top of
713 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
715 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
716 || BB_PARTITION (src) != BB_PARTITION (target))
717 return NULL;
719 /* We can replace or remove a complex jump only when we have exactly
720 two edges. Also, if we have exactly one outgoing edge, we can
721 redirect that. */
722 if (EDGE_COUNT (src->succs) >= 3
723 /* Verify that all targets will be TARGET. Specifically, the
724 edge that is not E must also go to TARGET. */
725 || (EDGE_COUNT (src->succs) == 2
726 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
727 return NULL;
729 if (!onlyjump_p (insn))
730 return NULL;
731 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
732 return NULL;
734 /* Avoid removing branch with side effects. */
735 set = single_set (insn);
736 if (!set || side_effects_p (set))
737 return NULL;
739 /* In case we zap a conditional jump, we'll need to kill
740 the cc0 setter too. */
741 kill_from = insn;
742 #ifdef HAVE_cc0
743 if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
744 && only_sets_cc0_p (PREV_INSN (insn)))
745 kill_from = PREV_INSN (insn);
746 #endif
748 /* See if we can create the fallthru edge. */
749 if (in_cfglayout || can_fallthru (src, target))
751 if (dump_file)
752 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
753 fallthru = 1;
755 /* Selectively unlink whole insn chain. */
756 if (in_cfglayout)
758 rtx insn = src->il.rtl->footer;
760 delete_insn_chain (kill_from, BB_END (src), false);
762 /* Remove barriers but keep jumptables. */
763 while (insn)
765 if (BARRIER_P (insn))
767 if (PREV_INSN (insn))
768 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
769 else
770 src->il.rtl->footer = NEXT_INSN (insn);
771 if (NEXT_INSN (insn))
772 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
774 if (LABEL_P (insn))
775 break;
776 insn = NEXT_INSN (insn);
779 else
780 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
781 false);
784 /* If this already is simplejump, redirect it. */
785 else if (simplejump_p (insn))
787 if (e->dest == target)
788 return NULL;
789 if (dump_file)
790 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
791 INSN_UID (insn), e->dest->index, target->index);
792 if (!redirect_jump (insn, block_label (target), 0))
794 gcc_assert (target == EXIT_BLOCK_PTR);
795 return NULL;
799 /* Cannot do anything for target exit block. */
800 else if (target == EXIT_BLOCK_PTR)
801 return NULL;
803 /* Or replace possibly complicated jump insn by simple jump insn. */
804 else
806 rtx target_label = block_label (target);
807 rtx barrier, label, table;
809 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
810 JUMP_LABEL (BB_END (src)) = target_label;
811 LABEL_NUSES (target_label)++;
812 if (dump_file)
813 fprintf (dump_file, "Replacing insn %i by jump %i\n",
814 INSN_UID (insn), INSN_UID (BB_END (src)));
817 delete_insn_chain (kill_from, insn, false);
819 /* Recognize a tablejump that we are converting to a
820 simple jump and remove its associated CODE_LABEL
821 and ADDR_VEC or ADDR_DIFF_VEC. */
822 if (tablejump_p (insn, &label, &table))
823 delete_insn_chain (label, table, false);
825 barrier = next_nonnote_insn (BB_END (src));
826 if (!barrier || !BARRIER_P (barrier))
827 emit_barrier_after (BB_END (src));
828 else
830 if (barrier != NEXT_INSN (BB_END (src)))
832 /* Move the jump before barrier so that the notes
833 which originally were or were created before jump table are
834 inside the basic block. */
835 rtx new_insn = BB_END (src);
836 rtx tmp;
838 for (tmp = NEXT_INSN (BB_END (src)); tmp != barrier;
839 tmp = NEXT_INSN (tmp))
841 set_block_for_insn (tmp, src);
842 df_insn_change_bb (tmp);
845 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
846 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
848 NEXT_INSN (new_insn) = barrier;
849 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
851 PREV_INSN (new_insn) = PREV_INSN (barrier);
852 PREV_INSN (barrier) = new_insn;
857 /* Keep only one edge out and set proper flags. */
858 if (!single_succ_p (src))
859 remove_edge (e);
860 gcc_assert (single_succ_p (src));
862 e = single_succ_edge (src);
863 if (fallthru)
864 e->flags = EDGE_FALLTHRU;
865 else
866 e->flags = 0;
868 e->probability = REG_BR_PROB_BASE;
869 e->count = src->count;
871 if (e->dest != target)
872 redirect_edge_succ (e, target);
873 return e;
876 /* Redirect edge representing branch of (un)conditional jump or tablejump,
877 NULL on failure */
878 static edge
879 redirect_branch_edge (edge e, basic_block target)
881 rtx tmp;
882 rtx old_label = BB_HEAD (e->dest);
883 basic_block src = e->src;
884 rtx insn = BB_END (src);
886 /* We can only redirect non-fallthru edges of jump insn. */
887 if (e->flags & EDGE_FALLTHRU)
888 return NULL;
889 else if (!JUMP_P (insn))
890 return NULL;
892 /* Recognize a tablejump and adjust all matching cases. */
893 if (tablejump_p (insn, NULL, &tmp))
895 rtvec vec;
896 int j;
897 rtx new_label = block_label (target);
899 if (target == EXIT_BLOCK_PTR)
900 return NULL;
901 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
902 vec = XVEC (PATTERN (tmp), 0);
903 else
904 vec = XVEC (PATTERN (tmp), 1);
906 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
907 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
909 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
910 --LABEL_NUSES (old_label);
911 ++LABEL_NUSES (new_label);
914 /* Handle casesi dispatch insns. */
915 if ((tmp = single_set (insn)) != NULL
916 && SET_DEST (tmp) == pc_rtx
917 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
918 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
919 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
921 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
922 new_label);
923 --LABEL_NUSES (old_label);
924 ++LABEL_NUSES (new_label);
927 else
929 /* ?? We may play the games with moving the named labels from
930 one basic block to the other in case only one computed_jump is
931 available. */
932 if (computed_jump_p (insn)
933 /* A return instruction can't be redirected. */
934 || returnjump_p (insn))
935 return NULL;
937 /* If the insn doesn't go where we think, we're confused. */
938 gcc_assert (JUMP_LABEL (insn) == old_label);
940 /* If the substitution doesn't succeed, die. This can happen
941 if the back end emitted unrecognizable instructions or if
942 target is exit block on some arches. */
943 if (!redirect_jump (insn, block_label (target), 0))
945 gcc_assert (target == EXIT_BLOCK_PTR);
946 return NULL;
950 if (dump_file)
951 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
952 e->src->index, e->dest->index, target->index);
954 if (e->dest != target)
955 e = redirect_edge_succ_nodup (e, target);
957 return e;
960 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
961 expense of adding new instructions or reordering basic blocks.
963 Function can be also called with edge destination equivalent to the TARGET.
964 Then it should try the simplifications and do nothing if none is possible.
966 Return edge representing the branch if transformation succeeded. Return NULL
967 on failure.
968 We still return NULL in case E already destinated TARGET and we didn't
969 managed to simplify instruction stream. */
971 static edge
972 rtl_redirect_edge_and_branch (edge e, basic_block target)
974 edge ret;
975 basic_block src = e->src;
977 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
978 return NULL;
980 if (e->dest == target)
981 return e;
983 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
985 df_set_bb_dirty (src);
986 return ret;
989 ret = redirect_branch_edge (e, target);
990 if (!ret)
991 return NULL;
993 df_set_bb_dirty (src);
994 return ret;
997 /* Like force_nonfallthru below, but additionally performs redirection
998 Used by redirect_edge_and_branch_force. */
1000 static basic_block
1001 force_nonfallthru_and_redirect (edge e, basic_block target)
1003 basic_block jump_block, new_bb = NULL, src = e->src;
1004 rtx note;
1005 edge new_edge;
1006 int abnormal_edge_flags = 0;
1008 /* In the case the last instruction is conditional jump to the next
1009 instruction, first redirect the jump itself and then continue
1010 by creating a basic block afterwards to redirect fallthru edge. */
1011 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
1012 && any_condjump_p (BB_END (e->src))
1013 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1015 rtx note;
1016 edge b = unchecked_make_edge (e->src, target, 0);
1017 bool redirected;
1019 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1020 gcc_assert (redirected);
1022 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1023 if (note)
1025 int prob = INTVAL (XEXP (note, 0));
1027 b->probability = prob;
1028 b->count = e->count * prob / REG_BR_PROB_BASE;
1029 e->probability -= e->probability;
1030 e->count -= b->count;
1031 if (e->probability < 0)
1032 e->probability = 0;
1033 if (e->count < 0)
1034 e->count = 0;
1038 if (e->flags & EDGE_ABNORMAL)
1040 /* Irritating special case - fallthru edge to the same block as abnormal
1041 edge.
1042 We can't redirect abnormal edge, but we still can split the fallthru
1043 one and create separate abnormal edge to original destination.
1044 This allows bb-reorder to make such edge non-fallthru. */
1045 gcc_assert (e->dest == target);
1046 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
1047 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
1049 else
1051 gcc_assert (e->flags & EDGE_FALLTHRU);
1052 if (e->src == ENTRY_BLOCK_PTR)
1054 /* We can't redirect the entry block. Create an empty block
1055 at the start of the function which we use to add the new
1056 jump. */
1057 edge tmp;
1058 edge_iterator ei;
1059 bool found = false;
1061 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
1063 /* Change the existing edge's source to be the new block, and add
1064 a new edge from the entry block to the new block. */
1065 e->src = bb;
1066 for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1068 if (tmp == e)
1070 VEC_unordered_remove (edge, ENTRY_BLOCK_PTR->succs, ei.index);
1071 found = true;
1072 break;
1074 else
1075 ei_next (&ei);
1078 gcc_assert (found);
1080 VEC_safe_push (edge, gc, bb->succs, e);
1081 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1085 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags)
1087 /* Create the new structures. */
1089 /* If the old block ended with a tablejump, skip its table
1090 by searching forward from there. Otherwise start searching
1091 forward from the last instruction of the old block. */
1092 if (!tablejump_p (BB_END (e->src), NULL, &note))
1093 note = BB_END (e->src);
1094 note = NEXT_INSN (note);
1096 jump_block = create_basic_block (note, NULL, e->src);
1097 jump_block->count = e->count;
1098 jump_block->frequency = EDGE_FREQUENCY (e);
1099 jump_block->loop_depth = target->loop_depth;
1101 /* Make sure new block ends up in correct hot/cold section. */
1103 BB_COPY_PARTITION (jump_block, e->src);
1104 if (flag_reorder_blocks_and_partition
1105 && targetm.have_named_sections
1106 && JUMP_P (BB_END (jump_block))
1107 && !any_condjump_p (BB_END (jump_block))
1108 && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
1109 REG_NOTES (BB_END (jump_block)) = gen_rtx_EXPR_LIST (REG_CROSSING_JUMP,
1110 NULL_RTX,
1111 REG_NOTES
1112 (BB_END
1113 (jump_block)));
1115 /* Wire edge in. */
1116 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1117 new_edge->probability = e->probability;
1118 new_edge->count = e->count;
1120 /* Redirect old edge. */
1121 redirect_edge_pred (e, jump_block);
1122 e->probability = REG_BR_PROB_BASE;
1124 new_bb = jump_block;
1126 else
1127 jump_block = e->src;
1129 e->flags &= ~EDGE_FALLTHRU;
1130 if (target == EXIT_BLOCK_PTR)
1132 #ifdef HAVE_return
1133 emit_jump_insn_after_noloc (gen_return (), BB_END (jump_block));
1134 #else
1135 gcc_unreachable ();
1136 #endif
1138 else
1140 rtx label = block_label (target);
1141 emit_jump_insn_after_noloc (gen_jump (label), BB_END (jump_block));
1142 JUMP_LABEL (BB_END (jump_block)) = label;
1143 LABEL_NUSES (label)++;
1146 emit_barrier_after (BB_END (jump_block));
1147 redirect_edge_succ_nodup (e, target);
1149 if (abnormal_edge_flags)
1150 make_edge (src, target, abnormal_edge_flags);
1152 df_mark_solutions_dirty ();
1153 return new_bb;
1156 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1157 (and possibly create new basic block) to make edge non-fallthru.
1158 Return newly created BB or NULL if none. */
1160 basic_block
1161 force_nonfallthru (edge e)
1163 return force_nonfallthru_and_redirect (e, e->dest);
1166 /* Redirect edge even at the expense of creating new jump insn or
1167 basic block. Return new basic block if created, NULL otherwise.
1168 Conversion must be possible. */
1170 static basic_block
1171 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1173 if (redirect_edge_and_branch (e, target)
1174 || e->dest == target)
1175 return NULL;
1177 /* In case the edge redirection failed, try to force it to be non-fallthru
1178 and redirect newly created simplejump. */
1179 df_set_bb_dirty (e->src);
1180 return force_nonfallthru_and_redirect (e, target);
1183 /* The given edge should potentially be a fallthru edge. If that is in
1184 fact true, delete the jump and barriers that are in the way. */
1186 static void
1187 rtl_tidy_fallthru_edge (edge e)
1189 rtx q;
1190 basic_block b = e->src, c = b->next_bb;
1192 /* ??? In a late-running flow pass, other folks may have deleted basic
1193 blocks by nopping out blocks, leaving multiple BARRIERs between here
1194 and the target label. They ought to be chastised and fixed.
1196 We can also wind up with a sequence of undeletable labels between
1197 one block and the next.
1199 So search through a sequence of barriers, labels, and notes for
1200 the head of block C and assert that we really do fall through. */
1202 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1203 if (INSN_P (q))
1204 return;
1206 /* Remove what will soon cease being the jump insn from the source block.
1207 If block B consisted only of this single jump, turn it into a deleted
1208 note. */
1209 q = BB_END (b);
1210 if (JUMP_P (q)
1211 && onlyjump_p (q)
1212 && (any_uncondjump_p (q)
1213 || single_succ_p (b)))
1215 #ifdef HAVE_cc0
1216 /* If this was a conditional jump, we need to also delete
1217 the insn that set cc0. */
1218 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1219 q = PREV_INSN (q);
1220 #endif
1222 q = PREV_INSN (q);
1225 /* Selectively unlink the sequence. */
1226 if (q != PREV_INSN (BB_HEAD (c)))
1227 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1229 e->flags |= EDGE_FALLTHRU;
1232 /* Should move basic block BB after basic block AFTER. NIY. */
1234 static bool
1235 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1236 basic_block after ATTRIBUTE_UNUSED)
1238 return false;
1241 /* Split a (typically critical) edge. Return the new block.
1242 The edge must not be abnormal.
1244 ??? The code generally expects to be called on critical edges.
1245 The case of a block ending in an unconditional jump to a
1246 block with multiple predecessors is not handled optimally. */
1248 static basic_block
1249 rtl_split_edge (edge edge_in)
1251 basic_block bb;
1252 rtx before;
1254 /* Abnormal edges cannot be split. */
1255 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1257 /* We are going to place the new block in front of edge destination.
1258 Avoid existence of fallthru predecessors. */
1259 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1261 edge e;
1262 edge_iterator ei;
1264 FOR_EACH_EDGE (e, ei, edge_in->dest->preds)
1265 if (e->flags & EDGE_FALLTHRU)
1266 break;
1268 if (e)
1269 force_nonfallthru (e);
1272 /* Create the basic block note. */
1273 if (edge_in->dest != EXIT_BLOCK_PTR)
1274 before = BB_HEAD (edge_in->dest);
1275 else
1276 before = NULL_RTX;
1278 /* If this is a fall through edge to the exit block, the blocks might be
1279 not adjacent, and the right place is the after the source. */
1280 if (edge_in->flags & EDGE_FALLTHRU && edge_in->dest == EXIT_BLOCK_PTR)
1282 before = NEXT_INSN (BB_END (edge_in->src));
1283 bb = create_basic_block (before, NULL, edge_in->src);
1284 BB_COPY_PARTITION (bb, edge_in->src);
1286 else
1288 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1289 /* ??? Why not edge_in->dest->prev_bb here? */
1290 BB_COPY_PARTITION (bb, edge_in->dest);
1293 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1295 /* For non-fallthru edges, we must adjust the predecessor's
1296 jump instruction to target our new block. */
1297 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1299 edge redirected = redirect_edge_and_branch (edge_in, bb);
1300 gcc_assert (redirected);
1302 else
1303 redirect_edge_succ (edge_in, bb);
1305 return bb;
1308 /* Queue instructions for insertion on an edge between two basic blocks.
1309 The new instructions and basic blocks (if any) will not appear in the
1310 CFG until commit_edge_insertions is called. */
1312 void
1313 insert_insn_on_edge (rtx pattern, edge e)
1315 /* We cannot insert instructions on an abnormal critical edge.
1316 It will be easier to find the culprit if we die now. */
1317 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1319 if (e->insns.r == NULL_RTX)
1320 start_sequence ();
1321 else
1322 push_to_sequence (e->insns.r);
1324 emit_insn (pattern);
1326 e->insns.r = get_insns ();
1327 end_sequence ();
1330 /* Update the CFG for the instructions queued on edge E. */
1332 static void
1333 commit_one_edge_insertion (edge e)
1335 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1336 basic_block bb = NULL;
1338 /* Pull the insns off the edge now since the edge might go away. */
1339 insns = e->insns.r;
1340 e->insns.r = NULL_RTX;
1342 if (!before && !after)
1344 /* Figure out where to put these things. If the destination has
1345 one predecessor, insert there. Except for the exit block. */
1346 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
1348 bb = e->dest;
1350 /* Get the location correct wrt a code label, and "nice" wrt
1351 a basic block note, and before everything else. */
1352 tmp = BB_HEAD (bb);
1353 if (LABEL_P (tmp))
1354 tmp = NEXT_INSN (tmp);
1355 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1356 tmp = NEXT_INSN (tmp);
1357 if (tmp == BB_HEAD (bb))
1358 before = tmp;
1359 else if (tmp)
1360 after = PREV_INSN (tmp);
1361 else
1362 after = get_last_insn ();
1365 /* If the source has one successor and the edge is not abnormal,
1366 insert there. Except for the entry block. */
1367 else if ((e->flags & EDGE_ABNORMAL) == 0
1368 && single_succ_p (e->src)
1369 && e->src != ENTRY_BLOCK_PTR)
1371 bb = e->src;
1373 /* It is possible to have a non-simple jump here. Consider a target
1374 where some forms of unconditional jumps clobber a register. This
1375 happens on the fr30 for example.
1377 We know this block has a single successor, so we can just emit
1378 the queued insns before the jump. */
1379 if (JUMP_P (BB_END (bb)))
1380 before = BB_END (bb);
1381 else
1383 /* We'd better be fallthru, or we've lost track of
1384 what's what. */
1385 gcc_assert (e->flags & EDGE_FALLTHRU);
1387 after = BB_END (bb);
1390 /* Otherwise we must split the edge. */
1391 else
1393 bb = split_edge (e);
1394 after = BB_END (bb);
1396 if (flag_reorder_blocks_and_partition
1397 && targetm.have_named_sections
1398 && e->src != ENTRY_BLOCK_PTR
1399 && BB_PARTITION (e->src) == BB_COLD_PARTITION
1400 && !(e->flags & EDGE_CROSSING))
1402 rtx bb_note, cur_insn;
1404 bb_note = NULL_RTX;
1405 for (cur_insn = BB_HEAD (bb); cur_insn != NEXT_INSN (BB_END (bb));
1406 cur_insn = NEXT_INSN (cur_insn))
1407 if (NOTE_INSN_BASIC_BLOCK_P (cur_insn))
1409 bb_note = cur_insn;
1410 break;
1413 if (JUMP_P (BB_END (bb))
1414 && !any_condjump_p (BB_END (bb))
1415 && (single_succ_edge (bb)->flags & EDGE_CROSSING))
1416 REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
1417 (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
1422 /* Now that we've found the spot, do the insertion. */
1424 if (before)
1426 emit_insn_before_noloc (insns, before, bb);
1427 last = prev_nonnote_insn (before);
1429 else
1430 last = emit_insn_after_noloc (insns, after, bb);
1432 if (returnjump_p (last))
1434 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1435 This is not currently a problem because this only happens
1436 for the (single) epilogue, which already has a fallthru edge
1437 to EXIT. */
1439 e = single_succ_edge (bb);
1440 gcc_assert (e->dest == EXIT_BLOCK_PTR
1441 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
1443 e->flags &= ~EDGE_FALLTHRU;
1444 emit_barrier_after (last);
1446 if (before)
1447 delete_insn (before);
1449 else
1450 gcc_assert (!JUMP_P (last));
1452 /* Mark the basic block for find_many_sub_basic_blocks. */
1453 if (current_ir_type () != IR_RTL_CFGLAYOUT)
1454 bb->aux = &bb->aux;
1457 /* Update the CFG for all queued instructions. */
1459 void
1460 commit_edge_insertions (void)
1462 basic_block bb;
1463 sbitmap blocks;
1464 bool changed = false;
1466 #ifdef ENABLE_CHECKING
1467 verify_flow_info ();
1468 #endif
1470 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1472 edge e;
1473 edge_iterator ei;
1475 FOR_EACH_EDGE (e, ei, bb->succs)
1476 if (e->insns.r)
1478 changed = true;
1479 commit_one_edge_insertion (e);
1483 if (!changed)
1484 return;
1486 /* In the old rtl CFG API, it was OK to insert control flow on an
1487 edge, apparently? In cfglayout mode, this will *not* work, and
1488 the caller is responsible for making sure that control flow is
1489 valid at all times. */
1490 if (current_ir_type () == IR_RTL_CFGLAYOUT)
1491 return;
1493 blocks = sbitmap_alloc (last_basic_block);
1494 sbitmap_zero (blocks);
1495 FOR_EACH_BB (bb)
1496 if (bb->aux)
1498 SET_BIT (blocks, bb->index);
1499 /* Check for forgotten bb->aux values before commit_edge_insertions
1500 call. */
1501 gcc_assert (bb->aux == &bb->aux);
1502 bb->aux = NULL;
1504 find_many_sub_basic_blocks (blocks);
1505 sbitmap_free (blocks);
1509 /* Print out RTL-specific basic block information (live information
1510 at start and end). */
1512 static void
1513 rtl_dump_bb (basic_block bb, FILE *outf, int indent)
1515 rtx insn;
1516 rtx last;
1517 char *s_indent;
1519 s_indent = (char *) alloca ((size_t) indent + 1);
1520 memset (s_indent, ' ', (size_t) indent);
1521 s_indent[indent] = '\0';
1523 if (df)
1525 df_dump_top (bb, outf);
1526 putc ('\n', outf);
1529 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1530 insn = NEXT_INSN (insn))
1531 print_rtl_single (outf, insn);
1533 if (df)
1535 df_dump_bottom (bb, outf);
1536 putc ('\n', outf);
1541 /* Like print_rtl, but also print out live information for the start of each
1542 basic block. */
1544 void
1545 print_rtl_with_bb (FILE *outf, rtx rtx_first)
1547 rtx tmp_rtx;
1548 if (rtx_first == 0)
1549 fprintf (outf, "(nil)\n");
1550 else
1552 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1553 int max_uid = get_max_uid ();
1554 basic_block *start = XCNEWVEC (basic_block, max_uid);
1555 basic_block *end = XCNEWVEC (basic_block, max_uid);
1556 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
1558 basic_block bb;
1560 if (df)
1561 df_dump_start (outf);
1563 FOR_EACH_BB_REVERSE (bb)
1565 rtx x;
1567 start[INSN_UID (BB_HEAD (bb))] = bb;
1568 end[INSN_UID (BB_END (bb))] = bb;
1569 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1571 enum bb_state state = IN_MULTIPLE_BB;
1573 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1574 state = IN_ONE_BB;
1575 in_bb_p[INSN_UID (x)] = state;
1577 if (x == BB_END (bb))
1578 break;
1582 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1584 int did_output;
1585 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1587 edge e;
1588 edge_iterator ei;
1590 fprintf (outf, ";; Start of basic block (");
1591 FOR_EACH_EDGE (e, ei, bb->preds)
1592 fprintf (outf, " %d", e->src->index);
1593 fprintf (outf, ") -> %d\n", bb->index);
1595 if (df)
1597 df_dump_top (bb, outf);
1598 putc ('\n', outf);
1600 FOR_EACH_EDGE (e, ei, bb->preds)
1602 fputs (";; Pred edge ", outf);
1603 dump_edge_info (outf, e, 0);
1604 fputc ('\n', outf);
1608 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1609 && !NOTE_P (tmp_rtx)
1610 && !BARRIER_P (tmp_rtx))
1611 fprintf (outf, ";; Insn is not within a basic block\n");
1612 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1613 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1615 did_output = print_rtl_single (outf, tmp_rtx);
1617 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1619 edge e;
1620 edge_iterator ei;
1622 fprintf (outf, ";; End of basic block %d -> (", bb->index);
1623 FOR_EACH_EDGE (e, ei, bb->succs)
1624 fprintf (outf, " %d", e->dest->index);
1625 fprintf (outf, ")\n");
1627 if (df)
1629 df_dump_bottom (bb, outf);
1630 putc ('\n', outf);
1632 putc ('\n', outf);
1633 FOR_EACH_EDGE (e, ei, bb->succs)
1635 fputs (";; Succ edge ", outf);
1636 dump_edge_info (outf, e, 1);
1637 fputc ('\n', outf);
1640 if (did_output)
1641 putc ('\n', outf);
1644 free (start);
1645 free (end);
1646 free (in_bb_p);
1649 if (current_function_epilogue_delay_list != 0)
1651 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1652 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
1653 tmp_rtx = XEXP (tmp_rtx, 1))
1654 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1658 void
1659 update_br_prob_note (basic_block bb)
1661 rtx note;
1662 if (!JUMP_P (BB_END (bb)))
1663 return;
1664 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
1665 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1666 return;
1667 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1670 /* Get the last insn associated with block BB (that includes barriers and
1671 tablejumps after BB). */
1673 get_last_bb_insn (basic_block bb)
1675 rtx tmp;
1676 rtx end = BB_END (bb);
1678 /* Include any jump table following the basic block. */
1679 if (tablejump_p (end, NULL, &tmp))
1680 end = tmp;
1682 /* Include any barriers that may follow the basic block. */
1683 tmp = next_nonnote_insn (end);
1684 while (tmp && BARRIER_P (tmp))
1686 end = tmp;
1687 tmp = next_nonnote_insn (end);
1690 return end;
1693 /* Verify the CFG and RTL consistency common for both underlying RTL and
1694 cfglayout RTL.
1696 Currently it does following checks:
1698 - overlapping of basic blocks
1699 - insns with wrong BLOCK_FOR_INSN pointers
1700 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1701 - tails of basic blocks (ensure that boundary is necessary)
1702 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1703 and NOTE_INSN_BASIC_BLOCK
1704 - verify that no fall_thru edge crosses hot/cold partition boundaries
1705 - verify that there are no pending RTL branch predictions
1707 In future it can be extended check a lot of other stuff as well
1708 (reachability of basic blocks, life information, etc. etc.). */
1710 static int
1711 rtl_verify_flow_info_1 (void)
1713 rtx x;
1714 int err = 0;
1715 basic_block bb;
1717 /* Check the general integrity of the basic blocks. */
1718 FOR_EACH_BB_REVERSE (bb)
1720 rtx insn;
1722 if (!(bb->flags & BB_RTL))
1724 error ("BB_RTL flag not set for block %d", bb->index);
1725 err = 1;
1728 FOR_BB_INSNS (bb, insn)
1729 if (BLOCK_FOR_INSN (insn) != bb)
1731 error ("insn %d basic block pointer is %d, should be %d",
1732 INSN_UID (insn),
1733 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
1734 bb->index);
1735 err = 1;
1738 for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn))
1739 if (!BARRIER_P (insn)
1740 && BLOCK_FOR_INSN (insn) != NULL)
1742 error ("insn %d in header of bb %d has non-NULL basic block",
1743 INSN_UID (insn), bb->index);
1744 err = 1;
1746 for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
1747 if (!BARRIER_P (insn)
1748 && BLOCK_FOR_INSN (insn) != NULL)
1750 error ("insn %d in footer of bb %d has non-NULL basic block",
1751 INSN_UID (insn), bb->index);
1752 err = 1;
1756 /* Now check the basic blocks (boundaries etc.) */
1757 FOR_EACH_BB_REVERSE (bb)
1759 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1760 edge e, fallthru = NULL;
1761 rtx note;
1762 edge_iterator ei;
1764 if (JUMP_P (BB_END (bb))
1765 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
1766 && EDGE_COUNT (bb->succs) >= 2
1767 && any_condjump_p (BB_END (bb)))
1769 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
1770 && profile_status != PROFILE_ABSENT)
1772 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
1773 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1774 err = 1;
1777 FOR_EACH_EDGE (e, ei, bb->succs)
1779 if (e->flags & EDGE_FALLTHRU)
1781 n_fallthru++, fallthru = e;
1782 if ((e->flags & EDGE_CROSSING)
1783 || (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
1784 && e->src != ENTRY_BLOCK_PTR
1785 && e->dest != EXIT_BLOCK_PTR))
1787 error ("fallthru edge crosses section boundary (bb %i)",
1788 e->src->index);
1789 err = 1;
1793 if ((e->flags & ~(EDGE_DFS_BACK
1794 | EDGE_CAN_FALLTHRU
1795 | EDGE_IRREDUCIBLE_LOOP
1796 | EDGE_LOOP_EXIT
1797 | EDGE_CROSSING)) == 0)
1798 n_branch++;
1800 if (e->flags & EDGE_ABNORMAL_CALL)
1801 n_call++;
1803 if (e->flags & EDGE_EH)
1804 n_eh++;
1805 else if (e->flags & EDGE_ABNORMAL)
1806 n_abnormal++;
1809 if (n_eh && GET_CODE (PATTERN (BB_END (bb))) != RESX
1810 && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
1812 error ("missing REG_EH_REGION note in the end of bb %i", bb->index);
1813 err = 1;
1815 if (n_branch
1816 && (!JUMP_P (BB_END (bb))
1817 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
1818 || any_condjump_p (BB_END (bb))))))
1820 error ("too many outgoing branch edges from bb %i", bb->index);
1821 err = 1;
1823 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
1825 error ("fallthru edge after unconditional jump %i", bb->index);
1826 err = 1;
1828 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
1830 error ("wrong amount of branch edges after unconditional jump %i", bb->index);
1831 err = 1;
1833 if (n_branch != 1 && any_condjump_p (BB_END (bb))
1834 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
1836 error ("wrong amount of branch edges after conditional jump %i",
1837 bb->index);
1838 err = 1;
1840 if (n_call && !CALL_P (BB_END (bb)))
1842 error ("call edges for non-call insn in bb %i", bb->index);
1843 err = 1;
1845 if (n_abnormal
1846 && (!CALL_P (BB_END (bb)) && n_call != n_abnormal)
1847 && (!JUMP_P (BB_END (bb))
1848 || any_condjump_p (BB_END (bb))
1849 || any_uncondjump_p (BB_END (bb))))
1851 error ("abnormal edges for no purpose in bb %i", bb->index);
1852 err = 1;
1855 for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
1856 /* We may have a barrier inside a basic block before dead code
1857 elimination. There is no BLOCK_FOR_INSN field in a barrier. */
1858 if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
1860 debug_rtx (x);
1861 if (! BLOCK_FOR_INSN (x))
1862 error
1863 ("insn %d inside basic block %d but block_for_insn is NULL",
1864 INSN_UID (x), bb->index);
1865 else
1866 error
1867 ("insn %d inside basic block %d but block_for_insn is %i",
1868 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
1870 err = 1;
1873 /* OK pointers are correct. Now check the header of basic
1874 block. It ought to contain optional CODE_LABEL followed
1875 by NOTE_BASIC_BLOCK. */
1876 x = BB_HEAD (bb);
1877 if (LABEL_P (x))
1879 if (BB_END (bb) == x)
1881 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1882 bb->index);
1883 err = 1;
1886 x = NEXT_INSN (x);
1889 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
1891 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1892 bb->index);
1893 err = 1;
1896 if (BB_END (bb) == x)
1897 /* Do checks for empty blocks here. */
1899 else
1900 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
1902 if (NOTE_INSN_BASIC_BLOCK_P (x))
1904 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
1905 INSN_UID (x), bb->index);
1906 err = 1;
1909 if (x == BB_END (bb))
1910 break;
1912 if (control_flow_insn_p (x))
1914 error ("in basic block %d:", bb->index);
1915 fatal_insn ("flow control insn inside a basic block", x);
1920 /* Clean up. */
1921 return err;
1924 /* Verify the CFG and RTL consistency common for both underlying RTL and
1925 cfglayout RTL.
1927 Currently it does following checks:
1928 - all checks of rtl_verify_flow_info_1
1929 - test head/end pointers
1930 - check that all insns are in the basic blocks
1931 (except the switch handling code, barriers and notes)
1932 - check that all returns are followed by barriers
1933 - check that all fallthru edge points to the adjacent blocks. */
1935 static int
1936 rtl_verify_flow_info (void)
1938 basic_block bb;
1939 int err = rtl_verify_flow_info_1 ();
1940 rtx x;
1941 rtx last_head = get_last_insn ();
1942 basic_block *bb_info;
1943 int num_bb_notes;
1944 const rtx rtx_first = get_insns ();
1945 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
1946 const int max_uid = get_max_uid ();
1948 bb_info = XCNEWVEC (basic_block, max_uid);
1950 FOR_EACH_BB_REVERSE (bb)
1952 edge e;
1953 edge_iterator ei;
1954 rtx head = BB_HEAD (bb);
1955 rtx end = BB_END (bb);
1957 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
1959 /* Verify the end of the basic block is in the INSN chain. */
1960 if (x == end)
1961 break;
1963 /* And that the code outside of basic blocks has NULL bb field. */
1964 if (!BARRIER_P (x)
1965 && BLOCK_FOR_INSN (x) != NULL)
1967 error ("insn %d outside of basic blocks has non-NULL bb field",
1968 INSN_UID (x));
1969 err = 1;
1973 if (!x)
1975 error ("end insn %d for block %d not found in the insn stream",
1976 INSN_UID (end), bb->index);
1977 err = 1;
1980 /* Work backwards from the end to the head of the basic block
1981 to verify the head is in the RTL chain. */
1982 for (; x != NULL_RTX; x = PREV_INSN (x))
1984 /* While walking over the insn chain, verify insns appear
1985 in only one basic block. */
1986 if (bb_info[INSN_UID (x)] != NULL)
1988 error ("insn %d is in multiple basic blocks (%d and %d)",
1989 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
1990 err = 1;
1993 bb_info[INSN_UID (x)] = bb;
1995 if (x == head)
1996 break;
1998 if (!x)
2000 error ("head insn %d for block %d not found in the insn stream",
2001 INSN_UID (head), bb->index);
2002 err = 1;
2005 last_head = PREV_INSN (x);
2007 FOR_EACH_EDGE (e, ei, bb->succs)
2008 if (e->flags & EDGE_FALLTHRU)
2009 break;
2010 if (!e)
2012 rtx insn;
2014 /* Ensure existence of barrier in BB with no fallthru edges. */
2015 for (insn = BB_END (bb); !insn || !BARRIER_P (insn);
2016 insn = NEXT_INSN (insn))
2017 if (!insn
2018 || NOTE_INSN_BASIC_BLOCK_P (insn))
2020 error ("missing barrier after block %i", bb->index);
2021 err = 1;
2022 break;
2025 else if (e->src != ENTRY_BLOCK_PTR
2026 && e->dest != EXIT_BLOCK_PTR)
2028 rtx insn;
2030 if (e->src->next_bb != e->dest)
2032 error
2033 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2034 e->src->index, e->dest->index);
2035 err = 1;
2037 else
2038 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2039 insn = NEXT_INSN (insn))
2040 if (BARRIER_P (insn) || INSN_P (insn))
2042 error ("verify_flow_info: Incorrect fallthru %i->%i",
2043 e->src->index, e->dest->index);
2044 fatal_insn ("wrong insn in the fallthru edge", insn);
2045 err = 1;
2050 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2052 /* Check that the code before the first basic block has NULL
2053 bb field. */
2054 if (!BARRIER_P (x)
2055 && BLOCK_FOR_INSN (x) != NULL)
2057 error ("insn %d outside of basic blocks has non-NULL bb field",
2058 INSN_UID (x));
2059 err = 1;
2062 free (bb_info);
2064 num_bb_notes = 0;
2065 last_bb_seen = ENTRY_BLOCK_PTR;
2067 for (x = rtx_first; x; x = NEXT_INSN (x))
2069 if (NOTE_INSN_BASIC_BLOCK_P (x))
2071 bb = NOTE_BASIC_BLOCK (x);
2073 num_bb_notes++;
2074 if (bb != last_bb_seen->next_bb)
2075 internal_error ("basic blocks not laid down consecutively");
2077 curr_bb = last_bb_seen = bb;
2080 if (!curr_bb)
2082 switch (GET_CODE (x))
2084 case BARRIER:
2085 case NOTE:
2086 break;
2088 case CODE_LABEL:
2089 /* An addr_vec is placed outside any basic block. */
2090 if (NEXT_INSN (x)
2091 && JUMP_P (NEXT_INSN (x))
2092 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
2093 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
2094 x = NEXT_INSN (x);
2096 /* But in any case, non-deletable labels can appear anywhere. */
2097 break;
2099 default:
2100 fatal_insn ("insn outside basic block", x);
2104 if (JUMP_P (x)
2105 && returnjump_p (x) && ! condjump_p (x)
2106 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
2107 fatal_insn ("return not followed by barrier", x);
2108 if (curr_bb && x == BB_END (curr_bb))
2109 curr_bb = NULL;
2112 if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
2113 internal_error
2114 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2115 num_bb_notes, n_basic_blocks);
2117 return err;
2120 /* Assume that the preceding pass has possibly eliminated jump instructions
2121 or converted the unconditional jumps. Eliminate the edges from CFG.
2122 Return true if any edges are eliminated. */
2124 bool
2125 purge_dead_edges (basic_block bb)
2127 edge e;
2128 rtx insn = BB_END (bb), note;
2129 bool purged = false;
2130 bool found;
2131 edge_iterator ei;
2133 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2134 if (NONJUMP_INSN_P (insn)
2135 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2137 rtx eqnote;
2139 if (! may_trap_p (PATTERN (insn))
2140 || ((eqnote = find_reg_equal_equiv_note (insn))
2141 && ! may_trap_p (XEXP (eqnote, 0))))
2142 remove_note (insn, note);
2145 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2146 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2148 /* There are three types of edges we need to handle correctly here: EH
2149 edges, abnormal call EH edges, and abnormal call non-EH edges. The
2150 latter can appear when nonlocal gotos are used. */
2151 if (e->flags & EDGE_EH)
2153 if (can_throw_internal (BB_END (bb))
2154 /* If this is a call edge, verify that this is a call insn. */
2155 && (! (e->flags & EDGE_ABNORMAL_CALL)
2156 || CALL_P (BB_END (bb))))
2158 ei_next (&ei);
2159 continue;
2162 else if (e->flags & EDGE_ABNORMAL_CALL)
2164 if (CALL_P (BB_END (bb))
2165 && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
2166 || INTVAL (XEXP (note, 0)) >= 0))
2168 ei_next (&ei);
2169 continue;
2172 else
2174 ei_next (&ei);
2175 continue;
2178 remove_edge (e);
2179 df_set_bb_dirty (bb);
2180 purged = true;
2183 if (JUMP_P (insn))
2185 rtx note;
2186 edge b,f;
2187 edge_iterator ei;
2189 /* We do care only about conditional jumps and simplejumps. */
2190 if (!any_condjump_p (insn)
2191 && !returnjump_p (insn)
2192 && !simplejump_p (insn))
2193 return purged;
2195 /* Branch probability/prediction notes are defined only for
2196 condjumps. We've possibly turned condjump into simplejump. */
2197 if (simplejump_p (insn))
2199 note = find_reg_note (insn, REG_BR_PROB, NULL);
2200 if (note)
2201 remove_note (insn, note);
2202 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2203 remove_note (insn, note);
2206 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2208 /* Avoid abnormal flags to leak from computed jumps turned
2209 into simplejumps. */
2211 e->flags &= ~EDGE_ABNORMAL;
2213 /* See if this edge is one we should keep. */
2214 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2215 /* A conditional jump can fall through into the next
2216 block, so we should keep the edge. */
2218 ei_next (&ei);
2219 continue;
2221 else if (e->dest != EXIT_BLOCK_PTR
2222 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
2223 /* If the destination block is the target of the jump,
2224 keep the edge. */
2226 ei_next (&ei);
2227 continue;
2229 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2230 /* If the destination block is the exit block, and this
2231 instruction is a return, then keep the edge. */
2233 ei_next (&ei);
2234 continue;
2236 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2237 /* Keep the edges that correspond to exceptions thrown by
2238 this instruction and rematerialize the EDGE_ABNORMAL
2239 flag we just cleared above. */
2241 e->flags |= EDGE_ABNORMAL;
2242 ei_next (&ei);
2243 continue;
2246 /* We do not need this edge. */
2247 df_set_bb_dirty (bb);
2248 purged = true;
2249 remove_edge (e);
2252 if (EDGE_COUNT (bb->succs) == 0 || !purged)
2253 return purged;
2255 if (dump_file)
2256 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
2258 if (!optimize)
2259 return purged;
2261 /* Redistribute probabilities. */
2262 if (single_succ_p (bb))
2264 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2265 single_succ_edge (bb)->count = bb->count;
2267 else
2269 note = find_reg_note (insn, REG_BR_PROB, NULL);
2270 if (!note)
2271 return purged;
2273 b = BRANCH_EDGE (bb);
2274 f = FALLTHRU_EDGE (bb);
2275 b->probability = INTVAL (XEXP (note, 0));
2276 f->probability = REG_BR_PROB_BASE - b->probability;
2277 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2278 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2281 return purged;
2283 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
2285 /* First, there should not be any EH or ABCALL edges resulting
2286 from non-local gotos and the like. If there were, we shouldn't
2287 have created the sibcall in the first place. Second, there
2288 should of course never have been a fallthru edge. */
2289 gcc_assert (single_succ_p (bb));
2290 gcc_assert (single_succ_edge (bb)->flags
2291 == (EDGE_SIBCALL | EDGE_ABNORMAL));
2293 return 0;
2296 /* If we don't see a jump insn, we don't know exactly why the block would
2297 have been broken at this point. Look for a simple, non-fallthru edge,
2298 as these are only created by conditional branches. If we find such an
2299 edge we know that there used to be a jump here and can then safely
2300 remove all non-fallthru edges. */
2301 found = false;
2302 FOR_EACH_EDGE (e, ei, bb->succs)
2303 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
2305 found = true;
2306 break;
2309 if (!found)
2310 return purged;
2312 /* Remove all but the fake and fallthru edges. The fake edge may be
2313 the only successor for this block in the case of noreturn
2314 calls. */
2315 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2317 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
2319 df_set_bb_dirty (bb);
2320 remove_edge (e);
2321 purged = true;
2323 else
2324 ei_next (&ei);
2327 gcc_assert (single_succ_p (bb));
2329 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2330 single_succ_edge (bb)->count = bb->count;
2332 if (dump_file)
2333 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
2334 bb->index);
2335 return purged;
2338 /* Search all basic blocks for potentially dead edges and purge them. Return
2339 true if some edge has been eliminated. */
2341 bool
2342 purge_all_dead_edges (void)
2344 int purged = false;
2345 basic_block bb;
2347 FOR_EACH_BB (bb)
2349 bool purged_here = purge_dead_edges (bb);
2351 purged |= purged_here;
2354 return purged;
2357 /* Same as split_block but update cfg_layout structures. */
2359 static basic_block
2360 cfg_layout_split_block (basic_block bb, void *insnp)
2362 rtx insn = (rtx) insnp;
2363 basic_block new_bb = rtl_split_block (bb, insn);
2365 new_bb->il.rtl->footer = bb->il.rtl->footer;
2366 bb->il.rtl->footer = NULL;
2368 return new_bb;
2371 /* Redirect Edge to DEST. */
2372 static edge
2373 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
2375 basic_block src = e->src;
2376 edge ret;
2378 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
2379 return NULL;
2381 if (e->dest == dest)
2382 return e;
2384 if (e->src != ENTRY_BLOCK_PTR
2385 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
2387 df_set_bb_dirty (src);
2388 return ret;
2391 if (e->src == ENTRY_BLOCK_PTR
2392 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
2394 if (dump_file)
2395 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
2396 e->src->index, dest->index);
2398 df_set_bb_dirty (e->src);
2399 redirect_edge_succ (e, dest);
2400 return e;
2403 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
2404 in the case the basic block appears to be in sequence. Avoid this
2405 transformation. */
2407 if (e->flags & EDGE_FALLTHRU)
2409 /* Redirect any branch edges unified with the fallthru one. */
2410 if (JUMP_P (BB_END (src))
2411 && label_is_jump_target_p (BB_HEAD (e->dest),
2412 BB_END (src)))
2414 edge redirected;
2416 if (dump_file)
2417 fprintf (dump_file, "Fallthru edge unified with branch "
2418 "%i->%i redirected to %i\n",
2419 e->src->index, e->dest->index, dest->index);
2420 e->flags &= ~EDGE_FALLTHRU;
2421 redirected = redirect_branch_edge (e, dest);
2422 gcc_assert (redirected);
2423 e->flags |= EDGE_FALLTHRU;
2424 df_set_bb_dirty (e->src);
2425 return e;
2427 /* In case we are redirecting fallthru edge to the branch edge
2428 of conditional jump, remove it. */
2429 if (EDGE_COUNT (src->succs) == 2)
2431 /* Find the edge that is different from E. */
2432 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
2434 if (s->dest == dest
2435 && any_condjump_p (BB_END (src))
2436 && onlyjump_p (BB_END (src)))
2437 delete_insn (BB_END (src));
2439 ret = redirect_edge_succ_nodup (e, dest);
2440 if (dump_file)
2441 fprintf (dump_file, "Fallthru edge %i->%i redirected to %i\n",
2442 e->src->index, e->dest->index, dest->index);
2444 else
2445 ret = redirect_branch_edge (e, dest);
2447 /* We don't want simplejumps in the insn stream during cfglayout. */
2448 gcc_assert (!simplejump_p (BB_END (src)));
2450 df_set_bb_dirty (src);
2451 return ret;
2454 /* Simple wrapper as we always can redirect fallthru edges. */
2455 static basic_block
2456 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
2458 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
2460 gcc_assert (redirected);
2461 return NULL;
2464 /* Same as delete_basic_block but update cfg_layout structures. */
2466 static void
2467 cfg_layout_delete_block (basic_block bb)
2469 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
2471 if (bb->il.rtl->header)
2473 next = BB_HEAD (bb);
2474 if (prev)
2475 NEXT_INSN (prev) = bb->il.rtl->header;
2476 else
2477 set_first_insn (bb->il.rtl->header);
2478 PREV_INSN (bb->il.rtl->header) = prev;
2479 insn = bb->il.rtl->header;
2480 while (NEXT_INSN (insn))
2481 insn = NEXT_INSN (insn);
2482 NEXT_INSN (insn) = next;
2483 PREV_INSN (next) = insn;
2485 next = NEXT_INSN (BB_END (bb));
2486 if (bb->il.rtl->footer)
2488 insn = bb->il.rtl->footer;
2489 while (insn)
2491 if (BARRIER_P (insn))
2493 if (PREV_INSN (insn))
2494 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2495 else
2496 bb->il.rtl->footer = NEXT_INSN (insn);
2497 if (NEXT_INSN (insn))
2498 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2500 if (LABEL_P (insn))
2501 break;
2502 insn = NEXT_INSN (insn);
2504 if (bb->il.rtl->footer)
2506 insn = BB_END (bb);
2507 NEXT_INSN (insn) = bb->il.rtl->footer;
2508 PREV_INSN (bb->il.rtl->footer) = insn;
2509 while (NEXT_INSN (insn))
2510 insn = NEXT_INSN (insn);
2511 NEXT_INSN (insn) = next;
2512 if (next)
2513 PREV_INSN (next) = insn;
2514 else
2515 set_last_insn (insn);
2518 if (bb->next_bb != EXIT_BLOCK_PTR)
2519 to = &bb->next_bb->il.rtl->header;
2520 else
2521 to = &cfg_layout_function_footer;
2523 rtl_delete_block (bb);
2525 if (prev)
2526 prev = NEXT_INSN (prev);
2527 else
2528 prev = get_insns ();
2529 if (next)
2530 next = PREV_INSN (next);
2531 else
2532 next = get_last_insn ();
2534 if (next && NEXT_INSN (next) != prev)
2536 remaints = unlink_insn_chain (prev, next);
2537 insn = remaints;
2538 while (NEXT_INSN (insn))
2539 insn = NEXT_INSN (insn);
2540 NEXT_INSN (insn) = *to;
2541 if (*to)
2542 PREV_INSN (*to) = insn;
2543 *to = remaints;
2547 /* Return true when blocks A and B can be safely merged. */
2548 static bool
2549 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
2551 /* If we are partitioning hot/cold basic blocks, we don't want to
2552 mess up unconditional or indirect jumps that cross between hot
2553 and cold sections.
2555 Basic block partitioning may result in some jumps that appear to
2556 be optimizable (or blocks that appear to be mergeable), but which really
2557 must be left untouched (they are required to make it safely across
2558 partition boundaries). See the comments at the top of
2559 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
2561 if (BB_PARTITION (a) != BB_PARTITION (b))
2562 return false;
2564 /* There must be exactly one edge in between the blocks. */
2565 return (single_succ_p (a)
2566 && single_succ (a) == b
2567 && single_pred_p (b) == 1
2568 && a != b
2569 /* Must be simple edge. */
2570 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
2571 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
2572 /* If the jump insn has side effects, we can't kill the edge.
2573 When not optimizing, try_redirect_by_replacing_jump will
2574 not allow us to redirect an edge by replacing a table jump. */
2575 && (!JUMP_P (BB_END (a))
2576 || ((!optimize || reload_completed)
2577 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
2580 /* Merge block A and B. The blocks must be mergeable. */
2582 static void
2583 cfg_layout_merge_blocks (basic_block a, basic_block b)
2585 #ifdef ENABLE_CHECKING
2586 gcc_assert (cfg_layout_can_merge_blocks_p (a, b));
2587 #endif
2589 if (dump_file)
2590 fprintf (dump_file, "merging block %d into block %d\n", b->index, a->index);
2592 /* If there was a CODE_LABEL beginning B, delete it. */
2593 if (LABEL_P (BB_HEAD (b)))
2595 /* This might have been an EH label that no longer has incoming
2596 EH edges. Update data structures to match. */
2597 maybe_remove_eh_handler (BB_HEAD (b));
2599 delete_insn (BB_HEAD (b));
2602 /* We should have fallthru edge in a, or we can do dummy redirection to get
2603 it cleaned up. */
2604 if (JUMP_P (BB_END (a)))
2605 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
2606 gcc_assert (!JUMP_P (BB_END (a)));
2608 /* Possible line number notes should appear in between. */
2609 if (b->il.rtl->header)
2611 rtx first = BB_END (a), last;
2613 last = emit_insn_after_noloc (b->il.rtl->header, BB_END (a), a);
2614 delete_insn_chain (NEXT_INSN (first), last, false);
2615 b->il.rtl->header = NULL;
2618 /* In the case basic blocks are not adjacent, move them around. */
2619 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
2621 rtx first = unlink_insn_chain (BB_HEAD (b), BB_END (b));
2623 emit_insn_after_noloc (first, BB_END (a), a);
2624 /* Skip possible DELETED_LABEL insn. */
2625 if (!NOTE_INSN_BASIC_BLOCK_P (first))
2626 first = NEXT_INSN (first);
2627 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (first));
2628 BB_HEAD (b) = NULL;
2629 delete_insn (first);
2631 /* Otherwise just re-associate the instructions. */
2632 else
2634 rtx insn;
2636 for (insn = BB_HEAD (b);
2637 insn != NEXT_INSN (BB_END (b));
2638 insn = NEXT_INSN (insn))
2640 set_block_for_insn (insn, a);
2641 df_insn_change_bb (insn);
2644 insn = BB_HEAD (b);
2645 /* Skip possible DELETED_LABEL insn. */
2646 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
2647 insn = NEXT_INSN (insn);
2648 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
2649 BB_HEAD (b) = NULL;
2650 BB_END (a) = BB_END (b);
2651 delete_insn (insn);
2654 df_bb_delete (b->index);
2656 /* Possible tablejumps and barriers should appear after the block. */
2657 if (b->il.rtl->footer)
2659 if (!a->il.rtl->footer)
2660 a->il.rtl->footer = b->il.rtl->footer;
2661 else
2663 rtx last = a->il.rtl->footer;
2665 while (NEXT_INSN (last))
2666 last = NEXT_INSN (last);
2667 NEXT_INSN (last) = b->il.rtl->footer;
2668 PREV_INSN (b->il.rtl->footer) = last;
2670 b->il.rtl->footer = NULL;
2673 if (dump_file)
2674 fprintf (dump_file, "Merged blocks %d and %d.\n",
2675 a->index, b->index);
2678 /* Split edge E. */
2680 static basic_block
2681 cfg_layout_split_edge (edge e)
2683 basic_block new_bb =
2684 create_basic_block (e->src != ENTRY_BLOCK_PTR
2685 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
2686 NULL_RTX, e->src);
2688 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
2689 redirect_edge_and_branch_force (e, new_bb);
2691 return new_bb;
2694 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
2696 static void
2697 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
2701 /* Return 1 if BB ends with a call, possibly followed by some
2702 instructions that must stay with the call, 0 otherwise. */
2704 static bool
2705 rtl_block_ends_with_call_p (basic_block bb)
2707 rtx insn = BB_END (bb);
2709 while (!CALL_P (insn)
2710 && insn != BB_HEAD (bb)
2711 && keep_with_call_p (insn))
2712 insn = PREV_INSN (insn);
2713 return (CALL_P (insn));
2716 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
2718 static bool
2719 rtl_block_ends_with_condjump_p (basic_block bb)
2721 return any_condjump_p (BB_END (bb));
2724 /* Return true if we need to add fake edge to exit.
2725 Helper function for rtl_flow_call_edges_add. */
2727 static bool
2728 need_fake_edge_p (rtx insn)
2730 if (!INSN_P (insn))
2731 return false;
2733 if ((CALL_P (insn)
2734 && !SIBLING_CALL_P (insn)
2735 && !find_reg_note (insn, REG_NORETURN, NULL)
2736 && !CONST_OR_PURE_CALL_P (insn)))
2737 return true;
2739 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2740 && MEM_VOLATILE_P (PATTERN (insn)))
2741 || (GET_CODE (PATTERN (insn)) == PARALLEL
2742 && asm_noperands (insn) != -1
2743 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
2744 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2747 /* Add fake edges to the function exit for any non constant and non noreturn
2748 calls, volatile inline assembly in the bitmap of blocks specified by
2749 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
2750 that were split.
2752 The goal is to expose cases in which entering a basic block does not imply
2753 that all subsequent instructions must be executed. */
2755 static int
2756 rtl_flow_call_edges_add (sbitmap blocks)
2758 int i;
2759 int blocks_split = 0;
2760 int last_bb = last_basic_block;
2761 bool check_last_block = false;
2763 if (n_basic_blocks == NUM_FIXED_BLOCKS)
2764 return 0;
2766 if (! blocks)
2767 check_last_block = true;
2768 else
2769 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
2771 /* In the last basic block, before epilogue generation, there will be
2772 a fallthru edge to EXIT. Special care is required if the last insn
2773 of the last basic block is a call because make_edge folds duplicate
2774 edges, which would result in the fallthru edge also being marked
2775 fake, which would result in the fallthru edge being removed by
2776 remove_fake_edges, which would result in an invalid CFG.
2778 Moreover, we can't elide the outgoing fake edge, since the block
2779 profiler needs to take this into account in order to solve the minimal
2780 spanning tree in the case that the call doesn't return.
2782 Handle this by adding a dummy instruction in a new last basic block. */
2783 if (check_last_block)
2785 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
2786 rtx insn = BB_END (bb);
2788 /* Back up past insns that must be kept in the same block as a call. */
2789 while (insn != BB_HEAD (bb)
2790 && keep_with_call_p (insn))
2791 insn = PREV_INSN (insn);
2793 if (need_fake_edge_p (insn))
2795 edge e;
2797 e = find_edge (bb, EXIT_BLOCK_PTR);
2798 if (e)
2800 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
2801 commit_edge_insertions ();
2806 /* Now add fake edges to the function exit for any non constant
2807 calls since there is no way that we can determine if they will
2808 return or not... */
2810 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
2812 basic_block bb = BASIC_BLOCK (i);
2813 rtx insn;
2814 rtx prev_insn;
2816 if (!bb)
2817 continue;
2819 if (blocks && !TEST_BIT (blocks, i))
2820 continue;
2822 for (insn = BB_END (bb); ; insn = prev_insn)
2824 prev_insn = PREV_INSN (insn);
2825 if (need_fake_edge_p (insn))
2827 edge e;
2828 rtx split_at_insn = insn;
2830 /* Don't split the block between a call and an insn that should
2831 remain in the same block as the call. */
2832 if (CALL_P (insn))
2833 while (split_at_insn != BB_END (bb)
2834 && keep_with_call_p (NEXT_INSN (split_at_insn)))
2835 split_at_insn = NEXT_INSN (split_at_insn);
2837 /* The handling above of the final block before the epilogue
2838 should be enough to verify that there is no edge to the exit
2839 block in CFG already. Calling make_edge in such case would
2840 cause us to mark that edge as fake and remove it later. */
2842 #ifdef ENABLE_CHECKING
2843 if (split_at_insn == BB_END (bb))
2845 e = find_edge (bb, EXIT_BLOCK_PTR);
2846 gcc_assert (e == NULL);
2848 #endif
2850 /* Note that the following may create a new basic block
2851 and renumber the existing basic blocks. */
2852 if (split_at_insn != BB_END (bb))
2854 e = split_block (bb, split_at_insn);
2855 if (e)
2856 blocks_split++;
2859 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2862 if (insn == BB_HEAD (bb))
2863 break;
2867 if (blocks_split)
2868 verify_flow_info ();
2870 return blocks_split;
2873 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
2874 the conditional branch target, SECOND_HEAD should be the fall-thru
2875 there is no need to handle this here the loop versioning code handles
2876 this. the reason for SECON_HEAD is that it is needed for condition
2877 in trees, and this should be of the same type since it is a hook. */
2878 static void
2879 rtl_lv_add_condition_to_bb (basic_block first_head ,
2880 basic_block second_head ATTRIBUTE_UNUSED,
2881 basic_block cond_bb, void *comp_rtx)
2883 rtx label, seq, jump;
2884 rtx op0 = XEXP ((rtx)comp_rtx, 0);
2885 rtx op1 = XEXP ((rtx)comp_rtx, 1);
2886 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
2887 enum machine_mode mode;
2890 label = block_label (first_head);
2891 mode = GET_MODE (op0);
2892 if (mode == VOIDmode)
2893 mode = GET_MODE (op1);
2895 start_sequence ();
2896 op0 = force_operand (op0, NULL_RTX);
2897 op1 = force_operand (op1, NULL_RTX);
2898 do_compare_rtx_and_jump (op0, op1, comp, 0,
2899 mode, NULL_RTX, NULL_RTX, label);
2900 jump = get_last_insn ();
2901 JUMP_LABEL (jump) = label;
2902 LABEL_NUSES (label)++;
2903 seq = get_insns ();
2904 end_sequence ();
2906 /* Add the new cond , in the new head. */
2907 emit_insn_after(seq, BB_END(cond_bb));
2911 /* Given a block B with unconditional branch at its end, get the
2912 store the return the branch edge and the fall-thru edge in
2913 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
2914 static void
2915 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
2916 edge *fallthru_edge)
2918 edge e = EDGE_SUCC (b, 0);
2920 if (e->flags & EDGE_FALLTHRU)
2922 *fallthru_edge = e;
2923 *branch_edge = EDGE_SUCC (b, 1);
2925 else
2927 *branch_edge = e;
2928 *fallthru_edge = EDGE_SUCC (b, 1);
2932 void
2933 init_rtl_bb_info (basic_block bb)
2935 gcc_assert (!bb->il.rtl);
2936 bb->il.rtl = GGC_CNEW (struct rtl_bb_info);
2940 /* Add EXPR to the end of basic block BB. */
2943 insert_insn_end_bb_new (rtx pat, basic_block bb)
2945 rtx insn = BB_END (bb);
2946 rtx new_insn;
2947 rtx pat_end = pat;
2949 while (NEXT_INSN (pat_end) != NULL_RTX)
2950 pat_end = NEXT_INSN (pat_end);
2952 /* If the last insn is a jump, insert EXPR in front [taking care to
2953 handle cc0, etc. properly]. Similarly we need to care trapping
2954 instructions in presence of non-call exceptions. */
2956 if (JUMP_P (insn)
2957 || (NONJUMP_INSN_P (insn)
2958 && (!single_succ_p (bb)
2959 || single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
2961 #ifdef HAVE_cc0
2962 rtx note;
2963 #endif
2964 /* If this is a jump table, then we can't insert stuff here. Since
2965 we know the previous real insn must be the tablejump, we insert
2966 the new instruction just before the tablejump. */
2967 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
2968 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
2969 insn = prev_real_insn (insn);
2971 #ifdef HAVE_cc0
2972 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
2973 if cc0 isn't set. */
2974 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2975 if (note)
2976 insn = XEXP (note, 0);
2977 else
2979 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
2980 if (maybe_cc0_setter
2981 && INSN_P (maybe_cc0_setter)
2982 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
2983 insn = maybe_cc0_setter;
2985 #endif
2986 /* FIXME: What if something in cc0/jump uses value set in new
2987 insn? */
2988 new_insn = emit_insn_before_noloc (pat, insn, bb);
2991 /* Likewise if the last insn is a call, as will happen in the presence
2992 of exception handling. */
2993 else if (CALL_P (insn)
2994 && (!single_succ_p (bb)
2995 || single_succ_edge (bb)->flags & EDGE_ABNORMAL))
2997 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
2998 we search backward and place the instructions before the first
2999 parameter is loaded. Do this for everyone for consistency and a
3000 presumption that we'll get better code elsewhere as well. */
3002 /* Since different machines initialize their parameter registers
3003 in different orders, assume nothing. Collect the set of all
3004 parameter registers. */
3005 insn = find_first_parameter_load (insn, BB_HEAD (bb));
3007 /* If we found all the parameter loads, then we want to insert
3008 before the first parameter load.
3010 If we did not find all the parameter loads, then we might have
3011 stopped on the head of the block, which could be a CODE_LABEL.
3012 If we inserted before the CODE_LABEL, then we would be putting
3013 the insn in the wrong basic block. In that case, put the insn
3014 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
3015 while (LABEL_P (insn)
3016 || NOTE_INSN_BASIC_BLOCK_P (insn))
3017 insn = NEXT_INSN (insn);
3019 new_insn = emit_insn_before_noloc (pat, insn, bb);
3021 else
3022 new_insn = emit_insn_after_noloc (pat, insn, bb);
3024 return new_insn;
3027 /* Returns true if it is possible to remove edge E by redirecting
3028 it to the destination of the other edge from E->src. */
3030 static bool
3031 rtl_can_remove_branch_p (edge e)
3033 basic_block src = e->src;
3034 basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
3035 rtx insn = BB_END (src), set;
3037 /* The conditions are taken from try_redirect_by_replacing_jump. */
3038 if (target == EXIT_BLOCK_PTR)
3039 return false;
3041 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
3042 return false;
3044 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
3045 || BB_PARTITION (src) != BB_PARTITION (target))
3046 return false;
3048 if (!onlyjump_p (insn)
3049 || tablejump_p (insn, NULL, NULL))
3050 return false;
3052 set = single_set (insn);
3053 if (!set || side_effects_p (set))
3054 return false;
3056 return true;
3059 /* Implementation of CFG manipulation for linearized RTL. */
3060 struct cfg_hooks rtl_cfg_hooks = {
3061 "rtl",
3062 rtl_verify_flow_info,
3063 rtl_dump_bb,
3064 rtl_create_basic_block,
3065 rtl_redirect_edge_and_branch,
3066 rtl_redirect_edge_and_branch_force,
3067 rtl_can_remove_branch_p,
3068 rtl_delete_block,
3069 rtl_split_block,
3070 rtl_move_block_after,
3071 rtl_can_merge_blocks, /* can_merge_blocks_p */
3072 rtl_merge_blocks,
3073 rtl_predict_edge,
3074 rtl_predicted_by_p,
3075 NULL, /* can_duplicate_block_p */
3076 NULL, /* duplicate_block */
3077 rtl_split_edge,
3078 rtl_make_forwarder_block,
3079 rtl_tidy_fallthru_edge,
3080 rtl_block_ends_with_call_p,
3081 rtl_block_ends_with_condjump_p,
3082 rtl_flow_call_edges_add,
3083 NULL, /* execute_on_growing_pred */
3084 NULL, /* execute_on_shrinking_pred */
3085 NULL, /* duplicate loop for trees */
3086 NULL, /* lv_add_condition_to_bb */
3087 NULL, /* lv_adjust_loop_header_phi*/
3088 NULL, /* extract_cond_bb_edges */
3089 NULL /* flush_pending_stmts */
3092 /* Implementation of CFG manipulation for cfg layout RTL, where
3093 basic block connected via fallthru edges does not have to be adjacent.
3094 This representation will hopefully become the default one in future
3095 version of the compiler. */
3097 /* We do not want to declare these functions in a header file, since they
3098 should only be used through the cfghooks interface, and we do not want to
3099 move them here since it would require also moving quite a lot of related
3100 code. They are in cfglayout.c. */
3101 extern bool cfg_layout_can_duplicate_bb_p (basic_block);
3102 extern basic_block cfg_layout_duplicate_bb (basic_block);
3104 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
3105 "cfglayout mode",
3106 rtl_verify_flow_info_1,
3107 rtl_dump_bb,
3108 cfg_layout_create_basic_block,
3109 cfg_layout_redirect_edge_and_branch,
3110 cfg_layout_redirect_edge_and_branch_force,
3111 rtl_can_remove_branch_p,
3112 cfg_layout_delete_block,
3113 cfg_layout_split_block,
3114 rtl_move_block_after,
3115 cfg_layout_can_merge_blocks_p,
3116 cfg_layout_merge_blocks,
3117 rtl_predict_edge,
3118 rtl_predicted_by_p,
3119 cfg_layout_can_duplicate_bb_p,
3120 cfg_layout_duplicate_bb,
3121 cfg_layout_split_edge,
3122 rtl_make_forwarder_block,
3123 NULL,
3124 rtl_block_ends_with_call_p,
3125 rtl_block_ends_with_condjump_p,
3126 rtl_flow_call_edges_add,
3127 NULL, /* execute_on_growing_pred */
3128 NULL, /* execute_on_shrinking_pred */
3129 duplicate_loop_to_header_edge, /* duplicate loop for trees */
3130 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
3131 NULL, /* lv_adjust_loop_header_phi*/
3132 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
3133 NULL /* flush_pending_stmts */