1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
23 Available functionality:
24 - Basic CFG/RTL manipulation API documented in cfghooks.h
25 - CFG-aware instruction chain manipulation
26 delete_insn, delete_insn_chain
27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
31 - CFG fixing after coarse manipulation
34 Functions not supposed for generic use:
35 - Infrastructure to determine quickly basic block for insn
36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37 - Edge redirection with updating and optimizing of insn chain
38 block_label, tidy_fallthru_edge, force_nonfallthru */
42 #include "coretypes.h"
45 #include "hard-reg-set.h"
46 #include "basic-block.h"
51 #include "rtl-error.h"
54 #include "insn-attr.h"
55 #include "insn-config.h"
58 #include "common/common-target.h"
61 #include "tree-pass.h"
64 /* Holds the interesting leading and trailing notes for the function.
65 Only applicable if the CFG is in cfglayout mode. */
66 static GTY(()) rtx cfg_layout_function_footer
;
67 static GTY(()) rtx cfg_layout_function_header
;
69 static rtx
skip_insns_after_block (basic_block
);
70 static void record_effective_endpoints (void);
71 static rtx
label_for_bb (basic_block
);
72 static void fixup_reorder_chain (void);
74 void verify_insn_chain (void);
75 static void fixup_fallthru_exit_predecessor (void);
76 static int can_delete_note_p (const_rtx
);
77 static int can_delete_label_p (const_rtx
);
78 static basic_block
rtl_split_edge (edge
);
79 static bool rtl_move_block_after (basic_block
, basic_block
);
80 static int rtl_verify_flow_info (void);
81 static basic_block
cfg_layout_split_block (basic_block
, void *);
82 static edge
cfg_layout_redirect_edge_and_branch (edge
, basic_block
);
83 static basic_block
cfg_layout_redirect_edge_and_branch_force (edge
, basic_block
);
84 static void cfg_layout_delete_block (basic_block
);
85 static void rtl_delete_block (basic_block
);
86 static basic_block
rtl_redirect_edge_and_branch_force (edge
, basic_block
);
87 static edge
rtl_redirect_edge_and_branch (edge
, basic_block
);
88 static basic_block
rtl_split_block (basic_block
, void *);
89 static void rtl_dump_bb (FILE *, basic_block
, int, int);
90 static int rtl_verify_flow_info_1 (void);
91 static void rtl_make_forwarder_block (edge
);
93 /* Return true if NOTE is not one of the ones that must be kept paired,
94 so that we may simply delete it. */
97 can_delete_note_p (const_rtx note
)
99 switch (NOTE_KIND (note
))
101 case NOTE_INSN_DELETED
:
102 case NOTE_INSN_BASIC_BLOCK
:
103 case NOTE_INSN_EPILOGUE_BEG
:
111 /* True if a given label can be deleted. */
114 can_delete_label_p (const_rtx label
)
116 return (!LABEL_PRESERVE_P (label
)
117 /* User declared labels must be preserved. */
118 && LABEL_NAME (label
) == 0
119 && !in_expr_list_p (forced_labels
, label
));
122 /* Delete INSN by patching it out. */
125 delete_insn (rtx insn
)
128 bool really_delete
= true;
132 /* Some labels can't be directly removed from the INSN chain, as they
133 might be references via variables, constant pool etc.
134 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
135 if (! can_delete_label_p (insn
))
137 const char *name
= LABEL_NAME (insn
);
138 basic_block bb
= BLOCK_FOR_INSN (insn
);
139 rtx bb_note
= NEXT_INSN (insn
);
141 really_delete
= false;
142 PUT_CODE (insn
, NOTE
);
143 NOTE_KIND (insn
) = NOTE_INSN_DELETED_LABEL
;
144 NOTE_DELETED_LABEL_NAME (insn
) = name
;
146 if (bb_note
!= NULL_RTX
&& NOTE_INSN_BASIC_BLOCK_P (bb_note
)
147 && BLOCK_FOR_INSN (bb_note
) == bb
)
149 reorder_insns_nobb (insn
, insn
, bb_note
);
150 BB_HEAD (bb
) = bb_note
;
151 if (BB_END (bb
) == bb_note
)
156 remove_node_from_expr_list (insn
, &nonlocal_goto_handler_labels
);
161 /* If this insn has already been deleted, something is very wrong. */
162 gcc_assert (!INSN_DELETED_P (insn
));
164 INSN_DELETED_P (insn
) = 1;
167 /* If deleting a jump, decrement the use count of the label. Deleting
168 the label itself should happen in the normal course of block merging. */
171 if (JUMP_LABEL (insn
)
172 && LABEL_P (JUMP_LABEL (insn
)))
173 LABEL_NUSES (JUMP_LABEL (insn
))--;
175 /* If there are more targets, remove them too. */
177 = find_reg_note (insn
, REG_LABEL_TARGET
, NULL_RTX
)) != NULL_RTX
178 && LABEL_P (XEXP (note
, 0)))
180 LABEL_NUSES (XEXP (note
, 0))--;
181 remove_note (insn
, note
);
185 /* Also if deleting any insn that references a label as an operand. */
186 while ((note
= find_reg_note (insn
, REG_LABEL_OPERAND
, NULL_RTX
)) != NULL_RTX
187 && LABEL_P (XEXP (note
, 0)))
189 LABEL_NUSES (XEXP (note
, 0))--;
190 remove_note (insn
, note
);
193 if (JUMP_TABLE_DATA_P (insn
))
195 rtx pat
= PATTERN (insn
);
196 int diff_vec_p
= GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
;
197 int len
= XVECLEN (pat
, diff_vec_p
);
200 for (i
= 0; i
< len
; i
++)
202 rtx label
= XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0);
204 /* When deleting code in bulk (e.g. removing many unreachable
205 blocks) we can delete a label that's a target of the vector
206 before deleting the vector itself. */
208 LABEL_NUSES (label
)--;
213 /* Like delete_insn but also purge dead edges from BB. */
216 delete_insn_and_edges (rtx insn
)
221 && BLOCK_FOR_INSN (insn
)
222 && BB_END (BLOCK_FOR_INSN (insn
)) == insn
)
226 purge_dead_edges (BLOCK_FOR_INSN (insn
));
229 /* Unlink a chain of insns between START and FINISH, leaving notes
230 that must be paired. If CLEAR_BB is true, we set bb field for
231 insns that cannot be removed to NULL. */
234 delete_insn_chain (rtx start
, rtx finish
, bool clear_bb
)
238 /* Unchain the insns one by one. It would be quicker to delete all of these
239 with a single unchaining, rather than one at a time, but we need to keep
244 prev
= PREV_INSN (current
);
245 if (NOTE_P (current
) && !can_delete_note_p (current
))
248 delete_insn (current
);
250 if (clear_bb
&& !INSN_DELETED_P (current
))
251 set_block_for_insn (current
, NULL
);
253 if (current
== start
)
259 /* Create a new basic block consisting of the instructions between HEAD and END
260 inclusive. This function is designed to allow fast BB construction - reuses
261 the note and basic block struct in BB_NOTE, if any and do not grow
262 BASIC_BLOCK chain and should be used directly only by CFG construction code.
263 END can be NULL in to create new empty basic block before HEAD. Both END
264 and HEAD can be NULL to create basic block at the end of INSN chain.
265 AFTER is the basic block we should be put after. */
268 create_basic_block_structure (rtx head
, rtx end
, rtx bb_note
, basic_block after
)
273 && (bb
= NOTE_BASIC_BLOCK (bb_note
)) != NULL
276 /* If we found an existing note, thread it back onto the chain. */
284 after
= PREV_INSN (head
);
288 if (after
!= bb_note
&& NEXT_INSN (after
) != bb_note
)
289 reorder_insns_nobb (bb_note
, bb_note
, after
);
293 /* Otherwise we must create a note and a basic block structure. */
297 init_rtl_bb_info (bb
);
300 = emit_note_after (NOTE_INSN_BASIC_BLOCK
, get_last_insn ());
301 else if (LABEL_P (head
) && end
)
303 bb_note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, head
);
309 bb_note
= emit_note_before (NOTE_INSN_BASIC_BLOCK
, head
);
315 NOTE_BASIC_BLOCK (bb_note
) = bb
;
318 /* Always include the bb note in the block. */
319 if (NEXT_INSN (end
) == bb_note
)
324 bb
->index
= last_basic_block
++;
325 bb
->flags
= BB_NEW
| BB_RTL
;
326 link_block (bb
, after
);
327 SET_BASIC_BLOCK (bb
->index
, bb
);
328 df_bb_refs_record (bb
->index
, false);
329 update_bb_for_insn (bb
);
330 BB_SET_PARTITION (bb
, BB_UNPARTITIONED
);
332 /* Tag the block so that we know it has been used when considering
333 other basic block notes. */
339 /* Create new basic block consisting of instructions in between HEAD and END
340 and place it to the BB chain after block AFTER. END can be NULL to
341 create a new empty basic block before HEAD. Both END and HEAD can be
342 NULL to create basic block at the end of INSN chain. */
345 rtl_create_basic_block (void *headp
, void *endp
, basic_block after
)
347 rtx head
= (rtx
) headp
, end
= (rtx
) endp
;
350 /* Grow the basic block array if needed. */
351 if ((size_t) last_basic_block
>= basic_block_info
->length ())
353 size_t new_size
= last_basic_block
+ (last_basic_block
+ 3) / 4;
354 vec_safe_grow_cleared (basic_block_info
, new_size
);
359 bb
= create_basic_block_structure (head
, end
, NULL
, after
);
365 cfg_layout_create_basic_block (void *head
, void *end
, basic_block after
)
367 basic_block newbb
= rtl_create_basic_block (head
, end
, after
);
372 /* Delete the insns in a (non-live) block. We physically delete every
373 non-deleted-note insn, and update the flow graph appropriately.
375 Return nonzero if we deleted an exception handler. */
377 /* ??? Preserving all such notes strikes me as wrong. It would be nice
378 to post-process the stream to remove empty blocks, loops, ranges, etc. */
381 rtl_delete_block (basic_block b
)
385 /* If the head of this block is a CODE_LABEL, then it might be the
386 label for an exception handler which can't be reached. We need
387 to remove the label from the exception_handler_label list. */
390 end
= get_last_bb_insn (b
);
392 /* Selectively delete the entire chain. */
394 delete_insn_chain (insn
, end
, true);
398 fprintf (dump_file
, "deleting block %d\n", b
->index
);
399 df_bb_delete (b
->index
);
402 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
405 compute_bb_for_insn (void)
411 rtx end
= BB_END (bb
);
414 for (insn
= BB_HEAD (bb
); ; insn
= NEXT_INSN (insn
))
416 BLOCK_FOR_INSN (insn
) = bb
;
423 /* Release the basic_block_for_insn array. */
426 free_bb_for_insn (void)
429 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
430 if (!BARRIER_P (insn
))
431 BLOCK_FOR_INSN (insn
) = NULL
;
436 rest_of_pass_free_cfg (void)
439 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
440 valid at that point so it would be too late to call df_analyze. */
441 if (optimize
> 0 && flag_delayed_branch
)
443 df_note_add_problem ();
452 struct rtl_opt_pass pass_free_cfg
=
456 "*free_cfg", /* name */
457 OPTGROUP_NONE
, /* optinfo_flags */
459 rest_of_pass_free_cfg
, /* execute */
462 0, /* static_pass_number */
464 0, /* properties_required */
465 0, /* properties_provided */
466 PROP_cfg
, /* properties_destroyed */
467 0, /* todo_flags_start */
468 0, /* todo_flags_finish */
472 /* Return RTX to emit after when we want to emit code on the entry of function. */
474 entry_of_function (void)
476 return (n_basic_blocks
> NUM_FIXED_BLOCKS
?
477 BB_HEAD (ENTRY_BLOCK_PTR
->next_bb
) : get_insns ());
480 /* Emit INSN at the entry point of the function, ensuring that it is only
481 executed once per function. */
483 emit_insn_at_entry (rtx insn
)
485 edge_iterator ei
= ei_start (ENTRY_BLOCK_PTR
->succs
);
486 edge e
= ei_safe_edge (ei
);
487 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
489 insert_insn_on_edge (insn
, e
);
490 commit_edge_insertions ();
493 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
494 (or BARRIER if found) and notify df of the bb change.
495 The insn chain range is inclusive
496 (i.e. both BEGIN and END will be updated. */
499 update_bb_for_insn_chain (rtx begin
, rtx end
, basic_block bb
)
503 end
= NEXT_INSN (end
);
504 for (insn
= begin
; insn
!= end
; insn
= NEXT_INSN (insn
))
505 if (!BARRIER_P (insn
))
506 df_insn_change_bb (insn
, bb
);
509 /* Update BLOCK_FOR_INSN of insns in BB to BB,
510 and notify df of the change. */
513 update_bb_for_insn (basic_block bb
)
515 update_bb_for_insn_chain (BB_HEAD (bb
), BB_END (bb
), bb
);
519 /* Like active_insn_p, except keep the return value clobber around
520 even after reload. */
523 flow_active_insn_p (const_rtx insn
)
525 if (active_insn_p (insn
))
528 /* A clobber of the function return value exists for buggy
529 programs that fail to return a value. Its effect is to
530 keep the return value from being live across the entire
531 function. If we allow it to be skipped, we introduce the
532 possibility for register lifetime confusion. */
533 if (GET_CODE (PATTERN (insn
)) == CLOBBER
534 && REG_P (XEXP (PATTERN (insn
), 0))
535 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn
), 0)))
541 /* Return true if the block has no effect and only forwards control flow to
542 its single destination. */
545 contains_no_active_insn_p (const_basic_block bb
)
549 if (bb
== EXIT_BLOCK_PTR
|| bb
== ENTRY_BLOCK_PTR
550 || !single_succ_p (bb
))
553 for (insn
= BB_HEAD (bb
); insn
!= BB_END (bb
); insn
= NEXT_INSN (insn
))
554 if (INSN_P (insn
) && flow_active_insn_p (insn
))
557 return (!INSN_P (insn
)
558 || (JUMP_P (insn
) && simplejump_p (insn
))
559 || !flow_active_insn_p (insn
));
562 /* Likewise, but protect loop latches, headers and preheaders. */
563 /* FIXME: Make this a cfg hook. */
566 forwarder_block_p (const_basic_block bb
)
568 if (!contains_no_active_insn_p (bb
))
571 /* Protect loop latches, headers and preheaders. */
575 if (bb
->loop_father
->header
== bb
)
577 dest
= EDGE_SUCC (bb
, 0)->dest
;
578 if (dest
->loop_father
->header
== dest
)
585 /* Return nonzero if we can reach target from src by falling through. */
586 /* FIXME: Make this a cfg hook. */
589 can_fallthru (basic_block src
, basic_block target
)
591 rtx insn
= BB_END (src
);
596 if (target
== EXIT_BLOCK_PTR
)
598 if (src
->next_bb
!= target
)
600 FOR_EACH_EDGE (e
, ei
, src
->succs
)
601 if (e
->dest
== EXIT_BLOCK_PTR
602 && e
->flags
& EDGE_FALLTHRU
)
605 insn2
= BB_HEAD (target
);
606 if (insn2
&& !active_insn_p (insn2
))
607 insn2
= next_active_insn (insn2
);
609 /* ??? Later we may add code to move jump tables offline. */
610 return next_active_insn (insn
) == insn2
;
613 /* Return nonzero if we could reach target from src by falling through,
614 if the target was made adjacent. If we already have a fall-through
615 edge to the exit block, we can't do that. */
617 could_fall_through (basic_block src
, basic_block target
)
622 if (target
== EXIT_BLOCK_PTR
)
624 FOR_EACH_EDGE (e
, ei
, src
->succs
)
625 if (e
->dest
== EXIT_BLOCK_PTR
626 && e
->flags
& EDGE_FALLTHRU
)
631 /* Return the NOTE_INSN_BASIC_BLOCK of BB. */
633 bb_note (basic_block bb
)
639 note
= NEXT_INSN (note
);
641 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note
));
645 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
646 note associated with the BLOCK. */
649 first_insn_after_basic_block_note (basic_block block
)
653 /* Get the first instruction in the block. */
654 insn
= BB_HEAD (block
);
656 if (insn
== NULL_RTX
)
659 insn
= NEXT_INSN (insn
);
660 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn
));
662 return NEXT_INSN (insn
);
665 /* Creates a new basic block just after basic block B by splitting
666 everything after specified instruction I. */
669 rtl_split_block (basic_block bb
, void *insnp
)
672 rtx insn
= (rtx
) insnp
;
678 insn
= first_insn_after_basic_block_note (bb
);
684 insn
= PREV_INSN (insn
);
686 /* If the block contains only debug insns, insn would have
687 been NULL in a non-debug compilation, and then we'd end
688 up emitting a DELETED note. For -fcompare-debug
689 stability, emit the note too. */
690 if (insn
!= BB_END (bb
)
691 && DEBUG_INSN_P (next
)
692 && DEBUG_INSN_P (BB_END (bb
)))
694 while (next
!= BB_END (bb
) && DEBUG_INSN_P (next
))
695 next
= NEXT_INSN (next
);
697 if (next
== BB_END (bb
))
698 emit_note_after (NOTE_INSN_DELETED
, next
);
702 insn
= get_last_insn ();
705 /* We probably should check type of the insn so that we do not create
706 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
708 if (insn
== BB_END (bb
))
709 emit_note_after (NOTE_INSN_DELETED
, insn
);
711 /* Create the new basic block. */
712 new_bb
= create_basic_block (NEXT_INSN (insn
), BB_END (bb
), bb
);
713 BB_COPY_PARTITION (new_bb
, bb
);
716 /* Redirect the outgoing edges. */
717 new_bb
->succs
= bb
->succs
;
719 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
722 /* The new block starts off being dirty. */
723 df_set_bb_dirty (bb
);
727 /* Return true if the single edge between blocks A and B is the only place
728 in RTL which holds some unique locus. */
731 unique_locus_on_edge_between_p (basic_block a
, basic_block b
)
733 const location_t goto_locus
= EDGE_SUCC (a
, 0)->goto_locus
;
736 if (LOCATION_LOCUS (goto_locus
) == UNKNOWN_LOCATION
)
739 /* First scan block A backward. */
741 end
= PREV_INSN (BB_HEAD (a
));
742 while (insn
!= end
&& (!NONDEBUG_INSN_P (insn
) || !INSN_HAS_LOCATION (insn
)))
743 insn
= PREV_INSN (insn
);
745 if (insn
!= end
&& INSN_LOCATION (insn
) == goto_locus
)
748 /* Then scan block B forward. */
752 end
= NEXT_INSN (BB_END (b
));
753 while (insn
!= end
&& !NONDEBUG_INSN_P (insn
))
754 insn
= NEXT_INSN (insn
);
756 if (insn
!= end
&& INSN_HAS_LOCATION (insn
)
757 && INSN_LOCATION (insn
) == goto_locus
)
764 /* If the single edge between blocks A and B is the only place in RTL which
765 holds some unique locus, emit a nop with that locus between the blocks. */
768 emit_nop_for_unique_locus_between (basic_block a
, basic_block b
)
770 if (!unique_locus_on_edge_between_p (a
, b
))
773 BB_END (a
) = emit_insn_after_noloc (gen_nop (), BB_END (a
), a
);
774 INSN_LOCATION (BB_END (a
)) = EDGE_SUCC (a
, 0)->goto_locus
;
777 /* Blocks A and B are to be merged into a single block A. The insns
778 are already contiguous. */
781 rtl_merge_blocks (basic_block a
, basic_block b
)
783 rtx b_head
= BB_HEAD (b
), b_end
= BB_END (b
), a_end
= BB_END (a
);
784 rtx del_first
= NULL_RTX
, del_last
= NULL_RTX
;
785 rtx b_debug_start
= b_end
, b_debug_end
= b_end
;
786 bool forwarder_p
= (b
->flags
& BB_FORWARDER_BLOCK
) != 0;
790 fprintf (dump_file
, "Merging block %d into block %d...\n", b
->index
,
793 while (DEBUG_INSN_P (b_end
))
794 b_end
= PREV_INSN (b_debug_start
= b_end
);
796 /* If there was a CODE_LABEL beginning B, delete it. */
797 if (LABEL_P (b_head
))
799 /* Detect basic blocks with nothing but a label. This can happen
800 in particular at the end of a function. */
804 del_first
= del_last
= b_head
;
805 b_head
= NEXT_INSN (b_head
);
808 /* Delete the basic block note and handle blocks containing just that
810 if (NOTE_INSN_BASIC_BLOCK_P (b_head
))
818 b_head
= NEXT_INSN (b_head
);
821 /* If there was a jump out of A, delete it. */
826 for (prev
= PREV_INSN (a_end
); ; prev
= PREV_INSN (prev
))
828 || NOTE_INSN_BASIC_BLOCK_P (prev
)
829 || prev
== BB_HEAD (a
))
835 /* If this was a conditional jump, we need to also delete
836 the insn that set cc0. */
837 if (only_sets_cc0_p (prev
))
841 prev
= prev_nonnote_insn (prev
);
848 a_end
= PREV_INSN (del_first
);
850 else if (BARRIER_P (NEXT_INSN (a_end
)))
851 del_first
= NEXT_INSN (a_end
);
853 /* Delete everything marked above as well as crap that might be
854 hanging out between the two blocks. */
856 BB_HEAD (b
) = b_empty
? NULL_RTX
: b_head
;
857 delete_insn_chain (del_first
, del_last
, true);
859 /* When not optimizing CFG and the edge is the only place in RTL which holds
860 some unique locus, emit a nop with that locus in between. */
863 emit_nop_for_unique_locus_between (a
, b
);
867 /* Reassociate the insns of B with A. */
870 update_bb_for_insn_chain (a_end
, b_debug_end
, a
);
872 BB_END (a
) = b_debug_end
;
873 BB_HEAD (b
) = NULL_RTX
;
875 else if (b_end
!= b_debug_end
)
877 /* Move any deleted labels and other notes between the end of A
878 and the debug insns that make up B after the debug insns,
879 bringing the debug insns into A while keeping the notes after
881 if (NEXT_INSN (a_end
) != b_debug_start
)
882 reorder_insns_nobb (NEXT_INSN (a_end
), PREV_INSN (b_debug_start
),
884 update_bb_for_insn_chain (b_debug_start
, b_debug_end
, a
);
885 BB_END (a
) = b_debug_end
;
888 df_bb_delete (b
->index
);
890 /* If B was a forwarder block, propagate the locus on the edge. */
892 && LOCATION_LOCUS (EDGE_SUCC (b
, 0)->goto_locus
) == UNKNOWN_LOCATION
)
893 EDGE_SUCC (b
, 0)->goto_locus
= EDGE_SUCC (a
, 0)->goto_locus
;
896 fprintf (dump_file
, "Merged blocks %d and %d.\n", a
->index
, b
->index
);
900 /* Return true when block A and B can be merged. */
903 rtl_can_merge_blocks (basic_block a
, basic_block b
)
905 /* If we are partitioning hot/cold basic blocks, we don't want to
906 mess up unconditional or indirect jumps that cross between hot
909 Basic block partitioning may result in some jumps that appear to
910 be optimizable (or blocks that appear to be mergeable), but which really
911 must be left untouched (they are required to make it safely across
912 partition boundaries). See the comments at the top of
913 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
915 if (BB_PARTITION (a
) != BB_PARTITION (b
))
918 /* Protect the loop latches. */
919 if (current_loops
&& b
->loop_father
->latch
== b
)
922 /* There must be exactly one edge in between the blocks. */
923 return (single_succ_p (a
)
924 && single_succ (a
) == b
927 /* Must be simple edge. */
928 && !(single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
930 && a
!= ENTRY_BLOCK_PTR
&& b
!= EXIT_BLOCK_PTR
931 /* If the jump insn has side effects,
932 we can't kill the edge. */
933 && (!JUMP_P (BB_END (a
))
935 ? simplejump_p (BB_END (a
)) : onlyjump_p (BB_END (a
)))));
938 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
942 block_label (basic_block block
)
944 if (block
== EXIT_BLOCK_PTR
)
947 if (!LABEL_P (BB_HEAD (block
)))
949 BB_HEAD (block
) = emit_label_before (gen_label_rtx (), BB_HEAD (block
));
952 return BB_HEAD (block
);
955 /* Attempt to perform edge redirection by replacing possibly complex jump
956 instruction by unconditional jump or removing jump completely. This can
957 apply only if all edges now point to the same block. The parameters and
958 return values are equivalent to redirect_edge_and_branch. */
961 try_redirect_by_replacing_jump (edge e
, basic_block target
, bool in_cfglayout
)
963 basic_block src
= e
->src
;
964 rtx insn
= BB_END (src
), kill_from
;
968 /* If we are partitioning hot/cold basic blocks, we don't want to
969 mess up unconditional or indirect jumps that cross between hot
972 Basic block partitioning may result in some jumps that appear to
973 be optimizable (or blocks that appear to be mergeable), but which really
974 must be left untouched (they are required to make it safely across
975 partition boundaries). See the comments at the top of
976 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
978 if (find_reg_note (insn
, REG_CROSSING_JUMP
, NULL_RTX
)
979 || BB_PARTITION (src
) != BB_PARTITION (target
))
982 /* We can replace or remove a complex jump only when we have exactly
983 two edges. Also, if we have exactly one outgoing edge, we can
985 if (EDGE_COUNT (src
->succs
) >= 3
986 /* Verify that all targets will be TARGET. Specifically, the
987 edge that is not E must also go to TARGET. */
988 || (EDGE_COUNT (src
->succs
) == 2
989 && EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
))
992 if (!onlyjump_p (insn
))
994 if ((!optimize
|| reload_completed
) && tablejump_p (insn
, NULL
, NULL
))
997 /* Avoid removing branch with side effects. */
998 set
= single_set (insn
);
999 if (!set
|| side_effects_p (set
))
1002 /* In case we zap a conditional jump, we'll need to kill
1003 the cc0 setter too. */
1006 if (reg_mentioned_p (cc0_rtx
, PATTERN (insn
))
1007 && only_sets_cc0_p (PREV_INSN (insn
)))
1008 kill_from
= PREV_INSN (insn
);
1011 /* See if we can create the fallthru edge. */
1012 if (in_cfglayout
|| can_fallthru (src
, target
))
1015 fprintf (dump_file
, "Removing jump %i.\n", INSN_UID (insn
));
1018 /* Selectively unlink whole insn chain. */
1021 rtx insn
= BB_FOOTER (src
);
1023 delete_insn_chain (kill_from
, BB_END (src
), false);
1025 /* Remove barriers but keep jumptables. */
1028 if (BARRIER_P (insn
))
1030 if (PREV_INSN (insn
))
1031 NEXT_INSN (PREV_INSN (insn
)) = NEXT_INSN (insn
);
1033 BB_FOOTER (src
) = NEXT_INSN (insn
);
1034 if (NEXT_INSN (insn
))
1035 PREV_INSN (NEXT_INSN (insn
)) = PREV_INSN (insn
);
1039 insn
= NEXT_INSN (insn
);
1043 delete_insn_chain (kill_from
, PREV_INSN (BB_HEAD (target
)),
1047 /* If this already is simplejump, redirect it. */
1048 else if (simplejump_p (insn
))
1050 if (e
->dest
== target
)
1053 fprintf (dump_file
, "Redirecting jump %i from %i to %i.\n",
1054 INSN_UID (insn
), e
->dest
->index
, target
->index
);
1055 if (!redirect_jump (insn
, block_label (target
), 0))
1057 gcc_assert (target
== EXIT_BLOCK_PTR
);
1062 /* Cannot do anything for target exit block. */
1063 else if (target
== EXIT_BLOCK_PTR
)
1066 /* Or replace possibly complicated jump insn by simple jump insn. */
1069 rtx target_label
= block_label (target
);
1070 rtx barrier
, label
, table
;
1072 emit_jump_insn_after_noloc (gen_jump (target_label
), insn
);
1073 JUMP_LABEL (BB_END (src
)) = target_label
;
1074 LABEL_NUSES (target_label
)++;
1076 fprintf (dump_file
, "Replacing insn %i by jump %i\n",
1077 INSN_UID (insn
), INSN_UID (BB_END (src
)));
1080 delete_insn_chain (kill_from
, insn
, false);
1082 /* Recognize a tablejump that we are converting to a
1083 simple jump and remove its associated CODE_LABEL
1084 and ADDR_VEC or ADDR_DIFF_VEC. */
1085 if (tablejump_p (insn
, &label
, &table
))
1086 delete_insn_chain (label
, table
, false);
1088 barrier
= next_nonnote_insn (BB_END (src
));
1089 if (!barrier
|| !BARRIER_P (barrier
))
1090 emit_barrier_after (BB_END (src
));
1093 if (barrier
!= NEXT_INSN (BB_END (src
)))
1095 /* Move the jump before barrier so that the notes
1096 which originally were or were created before jump table are
1097 inside the basic block. */
1098 rtx new_insn
= BB_END (src
);
1100 update_bb_for_insn_chain (NEXT_INSN (BB_END (src
)),
1101 PREV_INSN (barrier
), src
);
1103 NEXT_INSN (PREV_INSN (new_insn
)) = NEXT_INSN (new_insn
);
1104 PREV_INSN (NEXT_INSN (new_insn
)) = PREV_INSN (new_insn
);
1106 NEXT_INSN (new_insn
) = barrier
;
1107 NEXT_INSN (PREV_INSN (barrier
)) = new_insn
;
1109 PREV_INSN (new_insn
) = PREV_INSN (barrier
);
1110 PREV_INSN (barrier
) = new_insn
;
1115 /* Keep only one edge out and set proper flags. */
1116 if (!single_succ_p (src
))
1118 gcc_assert (single_succ_p (src
));
1120 e
= single_succ_edge (src
);
1122 e
->flags
= EDGE_FALLTHRU
;
1126 e
->probability
= REG_BR_PROB_BASE
;
1127 e
->count
= src
->count
;
1129 if (e
->dest
!= target
)
1130 redirect_edge_succ (e
, target
);
1134 /* Subroutine of redirect_branch_edge that tries to patch the jump
1135 instruction INSN so that it reaches block NEW. Do this
1136 only when it originally reached block OLD. Return true if this
1137 worked or the original target wasn't OLD, return false if redirection
1141 patch_jump_insn (rtx insn
, rtx old_label
, basic_block new_bb
)
1144 /* Recognize a tablejump and adjust all matching cases. */
1145 if (tablejump_p (insn
, NULL
, &tmp
))
1149 rtx new_label
= block_label (new_bb
);
1151 if (new_bb
== EXIT_BLOCK_PTR
)
1153 if (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
)
1154 vec
= XVEC (PATTERN (tmp
), 0);
1156 vec
= XVEC (PATTERN (tmp
), 1);
1158 for (j
= GET_NUM_ELEM (vec
) - 1; j
>= 0; --j
)
1159 if (XEXP (RTVEC_ELT (vec
, j
), 0) == old_label
)
1161 RTVEC_ELT (vec
, j
) = gen_rtx_LABEL_REF (Pmode
, new_label
);
1162 --LABEL_NUSES (old_label
);
1163 ++LABEL_NUSES (new_label
);
1166 /* Handle casesi dispatch insns. */
1167 if ((tmp
= single_set (insn
)) != NULL
1168 && SET_DEST (tmp
) == pc_rtx
1169 && GET_CODE (SET_SRC (tmp
)) == IF_THEN_ELSE
1170 && GET_CODE (XEXP (SET_SRC (tmp
), 2)) == LABEL_REF
1171 && XEXP (XEXP (SET_SRC (tmp
), 2), 0) == old_label
)
1173 XEXP (SET_SRC (tmp
), 2) = gen_rtx_LABEL_REF (Pmode
,
1175 --LABEL_NUSES (old_label
);
1176 ++LABEL_NUSES (new_label
);
1179 else if ((tmp
= extract_asm_operands (PATTERN (insn
))) != NULL
)
1181 int i
, n
= ASM_OPERANDS_LABEL_LENGTH (tmp
);
1182 rtx new_label
, note
;
1184 if (new_bb
== EXIT_BLOCK_PTR
)
1186 new_label
= block_label (new_bb
);
1188 for (i
= 0; i
< n
; ++i
)
1190 rtx old_ref
= ASM_OPERANDS_LABEL (tmp
, i
);
1191 gcc_assert (GET_CODE (old_ref
) == LABEL_REF
);
1192 if (XEXP (old_ref
, 0) == old_label
)
1194 ASM_OPERANDS_LABEL (tmp
, i
)
1195 = gen_rtx_LABEL_REF (Pmode
, new_label
);
1196 --LABEL_NUSES (old_label
);
1197 ++LABEL_NUSES (new_label
);
1201 if (JUMP_LABEL (insn
) == old_label
)
1203 JUMP_LABEL (insn
) = new_label
;
1204 note
= find_reg_note (insn
, REG_LABEL_TARGET
, new_label
);
1206 remove_note (insn
, note
);
1210 note
= find_reg_note (insn
, REG_LABEL_TARGET
, old_label
);
1212 remove_note (insn
, note
);
1213 if (JUMP_LABEL (insn
) != new_label
1214 && !find_reg_note (insn
, REG_LABEL_TARGET
, new_label
))
1215 add_reg_note (insn
, REG_LABEL_TARGET
, new_label
);
1217 while ((note
= find_reg_note (insn
, REG_LABEL_OPERAND
, old_label
))
1219 XEXP (note
, 0) = new_label
;
1223 /* ?? We may play the games with moving the named labels from
1224 one basic block to the other in case only one computed_jump is
1226 if (computed_jump_p (insn
)
1227 /* A return instruction can't be redirected. */
1228 || returnjump_p (insn
))
1231 if (!currently_expanding_to_rtl
|| JUMP_LABEL (insn
) == old_label
)
1233 /* If the insn doesn't go where we think, we're confused. */
1234 gcc_assert (JUMP_LABEL (insn
) == old_label
);
1236 /* If the substitution doesn't succeed, die. This can happen
1237 if the back end emitted unrecognizable instructions or if
1238 target is exit block on some arches. */
1239 if (!redirect_jump (insn
, block_label (new_bb
), 0))
1241 gcc_assert (new_bb
== EXIT_BLOCK_PTR
);
1250 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1253 redirect_branch_edge (edge e
, basic_block target
)
1255 rtx old_label
= BB_HEAD (e
->dest
);
1256 basic_block src
= e
->src
;
1257 rtx insn
= BB_END (src
);
1259 /* We can only redirect non-fallthru edges of jump insn. */
1260 if (e
->flags
& EDGE_FALLTHRU
)
1262 else if (!JUMP_P (insn
) && !currently_expanding_to_rtl
)
1265 if (!currently_expanding_to_rtl
)
1267 if (!patch_jump_insn (insn
, old_label
, target
))
1271 /* When expanding this BB might actually contain multiple
1272 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1273 Redirect all of those that match our label. */
1274 FOR_BB_INSNS (src
, insn
)
1275 if (JUMP_P (insn
) && !patch_jump_insn (insn
, old_label
, target
))
1279 fprintf (dump_file
, "Edge %i->%i redirected to %i\n",
1280 e
->src
->index
, e
->dest
->index
, target
->index
);
1282 if (e
->dest
!= target
)
1283 e
= redirect_edge_succ_nodup (e
, target
);
1288 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
1289 expense of adding new instructions or reordering basic blocks.
1291 Function can be also called with edge destination equivalent to the TARGET.
1292 Then it should try the simplifications and do nothing if none is possible.
1294 Return edge representing the branch if transformation succeeded. Return NULL
1296 We still return NULL in case E already destinated TARGET and we didn't
1297 managed to simplify instruction stream. */
1300 rtl_redirect_edge_and_branch (edge e
, basic_block target
)
1303 basic_block src
= e
->src
;
1305 if (e
->flags
& (EDGE_ABNORMAL_CALL
| EDGE_EH
))
1308 if (e
->dest
== target
)
1311 if ((ret
= try_redirect_by_replacing_jump (e
, target
, false)) != NULL
)
1313 df_set_bb_dirty (src
);
1317 ret
= redirect_branch_edge (e
, target
);
1321 df_set_bb_dirty (src
);
1325 /* Like force_nonfallthru below, but additionally performs redirection
1326 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1327 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1328 simple_return_rtx, indicating which kind of returnjump to create.
1329 It should be NULL otherwise. */
1332 force_nonfallthru_and_redirect (edge e
, basic_block target
, rtx jump_label
)
1334 basic_block jump_block
, new_bb
= NULL
, src
= e
->src
;
1337 int abnormal_edge_flags
= 0;
1338 bool asm_goto_edge
= false;
1341 /* In the case the last instruction is conditional jump to the next
1342 instruction, first redirect the jump itself and then continue
1343 by creating a basic block afterwards to redirect fallthru edge. */
1344 if (e
->src
!= ENTRY_BLOCK_PTR
&& e
->dest
!= EXIT_BLOCK_PTR
1345 && any_condjump_p (BB_END (e
->src
))
1346 && JUMP_LABEL (BB_END (e
->src
)) == BB_HEAD (e
->dest
))
1349 edge b
= unchecked_make_edge (e
->src
, target
, 0);
1352 redirected
= redirect_jump (BB_END (e
->src
), block_label (target
), 0);
1353 gcc_assert (redirected
);
1355 note
= find_reg_note (BB_END (e
->src
), REG_BR_PROB
, NULL_RTX
);
1358 int prob
= INTVAL (XEXP (note
, 0));
1360 b
->probability
= prob
;
1361 b
->count
= e
->count
* prob
/ REG_BR_PROB_BASE
;
1362 e
->probability
-= e
->probability
;
1363 e
->count
-= b
->count
;
1364 if (e
->probability
< 0)
1371 if (e
->flags
& EDGE_ABNORMAL
)
1373 /* Irritating special case - fallthru edge to the same block as abnormal
1375 We can't redirect abnormal edge, but we still can split the fallthru
1376 one and create separate abnormal edge to original destination.
1377 This allows bb-reorder to make such edge non-fallthru. */
1378 gcc_assert (e
->dest
== target
);
1379 abnormal_edge_flags
= e
->flags
& ~EDGE_FALLTHRU
;
1380 e
->flags
&= EDGE_FALLTHRU
;
1384 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
1385 if (e
->src
== ENTRY_BLOCK_PTR
)
1387 /* We can't redirect the entry block. Create an empty block
1388 at the start of the function which we use to add the new
1394 basic_block bb
= create_basic_block (BB_HEAD (e
->dest
), NULL
, ENTRY_BLOCK_PTR
);
1396 /* Change the existing edge's source to be the new block, and add
1397 a new edge from the entry block to the new block. */
1399 for (ei
= ei_start (ENTRY_BLOCK_PTR
->succs
); (tmp
= ei_safe_edge (ei
)); )
1403 ENTRY_BLOCK_PTR
->succs
->unordered_remove (ei
.index
);
1413 vec_safe_push (bb
->succs
, e
);
1414 make_single_succ_edge (ENTRY_BLOCK_PTR
, bb
, EDGE_FALLTHRU
);
1418 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1419 don't point to the target or fallthru label. */
1420 if (JUMP_P (BB_END (e
->src
))
1421 && target
!= EXIT_BLOCK_PTR
1422 && (e
->flags
& EDGE_FALLTHRU
)
1423 && (note
= extract_asm_operands (PATTERN (BB_END (e
->src
)))))
1425 int i
, n
= ASM_OPERANDS_LABEL_LENGTH (note
);
1426 bool adjust_jump_target
= false;
1428 for (i
= 0; i
< n
; ++i
)
1430 if (XEXP (ASM_OPERANDS_LABEL (note
, i
), 0) == BB_HEAD (e
->dest
))
1432 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note
, i
), 0))--;
1433 XEXP (ASM_OPERANDS_LABEL (note
, i
), 0) = block_label (target
);
1434 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note
, i
), 0))++;
1435 adjust_jump_target
= true;
1437 if (XEXP (ASM_OPERANDS_LABEL (note
, i
), 0) == BB_HEAD (target
))
1438 asm_goto_edge
= true;
1440 if (adjust_jump_target
)
1442 rtx insn
= BB_END (e
->src
), note
;
1443 rtx old_label
= BB_HEAD (e
->dest
);
1444 rtx new_label
= BB_HEAD (target
);
1446 if (JUMP_LABEL (insn
) == old_label
)
1448 JUMP_LABEL (insn
) = new_label
;
1449 note
= find_reg_note (insn
, REG_LABEL_TARGET
, new_label
);
1451 remove_note (insn
, note
);
1455 note
= find_reg_note (insn
, REG_LABEL_TARGET
, old_label
);
1457 remove_note (insn
, note
);
1458 if (JUMP_LABEL (insn
) != new_label
1459 && !find_reg_note (insn
, REG_LABEL_TARGET
, new_label
))
1460 add_reg_note (insn
, REG_LABEL_TARGET
, new_label
);
1462 while ((note
= find_reg_note (insn
, REG_LABEL_OPERAND
, old_label
))
1464 XEXP (note
, 0) = new_label
;
1468 if (EDGE_COUNT (e
->src
->succs
) >= 2 || abnormal_edge_flags
|| asm_goto_edge
)
1470 gcov_type count
= e
->count
;
1471 int probability
= e
->probability
;
1472 /* Create the new structures. */
1474 /* If the old block ended with a tablejump, skip its table
1475 by searching forward from there. Otherwise start searching
1476 forward from the last instruction of the old block. */
1477 if (!tablejump_p (BB_END (e
->src
), NULL
, ¬e
))
1478 note
= BB_END (e
->src
);
1479 note
= NEXT_INSN (note
);
1481 jump_block
= create_basic_block (note
, NULL
, e
->src
);
1482 jump_block
->count
= count
;
1483 jump_block
->frequency
= EDGE_FREQUENCY (e
);
1485 /* Make sure new block ends up in correct hot/cold section. */
1487 BB_COPY_PARTITION (jump_block
, e
->src
);
1488 if (flag_reorder_blocks_and_partition
1489 && targetm_common
.have_named_sections
1490 && JUMP_P (BB_END (jump_block
))
1491 && !any_condjump_p (BB_END (jump_block
))
1492 && (EDGE_SUCC (jump_block
, 0)->flags
& EDGE_CROSSING
))
1493 add_reg_note (BB_END (jump_block
), REG_CROSSING_JUMP
, NULL_RTX
);
1496 new_edge
= make_edge (e
->src
, jump_block
, EDGE_FALLTHRU
);
1497 new_edge
->probability
= probability
;
1498 new_edge
->count
= count
;
1500 /* Redirect old edge. */
1501 redirect_edge_pred (e
, jump_block
);
1502 e
->probability
= REG_BR_PROB_BASE
;
1504 /* If asm goto has any label refs to target's label,
1505 add also edge from asm goto bb to target. */
1508 new_edge
->probability
/= 2;
1509 new_edge
->count
/= 2;
1510 jump_block
->count
/= 2;
1511 jump_block
->frequency
/= 2;
1512 new_edge
= make_edge (new_edge
->src
, target
,
1513 e
->flags
& ~EDGE_FALLTHRU
);
1514 new_edge
->probability
= probability
- probability
/ 2;
1515 new_edge
->count
= count
- count
/ 2;
1518 new_bb
= jump_block
;
1521 jump_block
= e
->src
;
1523 loc
= e
->goto_locus
;
1524 e
->flags
&= ~EDGE_FALLTHRU
;
1525 if (target
== EXIT_BLOCK_PTR
)
1527 if (jump_label
== ret_rtx
)
1530 emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block
), loc
);
1537 gcc_assert (jump_label
== simple_return_rtx
);
1538 #ifdef HAVE_simple_return
1539 emit_jump_insn_after_setloc (gen_simple_return (),
1540 BB_END (jump_block
), loc
);
1545 set_return_jump_label (BB_END (jump_block
));
1549 rtx label
= block_label (target
);
1550 emit_jump_insn_after_setloc (gen_jump (label
), BB_END (jump_block
), loc
);
1551 JUMP_LABEL (BB_END (jump_block
)) = label
;
1552 LABEL_NUSES (label
)++;
1555 emit_barrier_after (BB_END (jump_block
));
1556 redirect_edge_succ_nodup (e
, target
);
1558 if (abnormal_edge_flags
)
1559 make_edge (src
, target
, abnormal_edge_flags
);
1561 df_mark_solutions_dirty ();
1565 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1566 (and possibly create new basic block) to make edge non-fallthru.
1567 Return newly created BB or NULL if none. */
1570 rtl_force_nonfallthru (edge e
)
1572 return force_nonfallthru_and_redirect (e
, e
->dest
, NULL_RTX
);
1575 /* Redirect edge even at the expense of creating new jump insn or
1576 basic block. Return new basic block if created, NULL otherwise.
1577 Conversion must be possible. */
1580 rtl_redirect_edge_and_branch_force (edge e
, basic_block target
)
1582 if (redirect_edge_and_branch (e
, target
)
1583 || e
->dest
== target
)
1586 /* In case the edge redirection failed, try to force it to be non-fallthru
1587 and redirect newly created simplejump. */
1588 df_set_bb_dirty (e
->src
);
1589 return force_nonfallthru_and_redirect (e
, target
, NULL_RTX
);
1592 /* The given edge should potentially be a fallthru edge. If that is in
1593 fact true, delete the jump and barriers that are in the way. */
1596 rtl_tidy_fallthru_edge (edge e
)
1599 basic_block b
= e
->src
, c
= b
->next_bb
;
1601 /* ??? In a late-running flow pass, other folks may have deleted basic
1602 blocks by nopping out blocks, leaving multiple BARRIERs between here
1603 and the target label. They ought to be chastised and fixed.
1605 We can also wind up with a sequence of undeletable labels between
1606 one block and the next.
1608 So search through a sequence of barriers, labels, and notes for
1609 the head of block C and assert that we really do fall through. */
1611 for (q
= NEXT_INSN (BB_END (b
)); q
!= BB_HEAD (c
); q
= NEXT_INSN (q
))
1615 /* Remove what will soon cease being the jump insn from the source block.
1616 If block B consisted only of this single jump, turn it into a deleted
1621 && (any_uncondjump_p (q
)
1622 || single_succ_p (b
)))
1625 /* If this was a conditional jump, we need to also delete
1626 the insn that set cc0. */
1627 if (any_condjump_p (q
) && only_sets_cc0_p (PREV_INSN (q
)))
1634 /* Selectively unlink the sequence. */
1635 if (q
!= PREV_INSN (BB_HEAD (c
)))
1636 delete_insn_chain (NEXT_INSN (q
), PREV_INSN (BB_HEAD (c
)), false);
1638 e
->flags
|= EDGE_FALLTHRU
;
1641 /* Should move basic block BB after basic block AFTER. NIY. */
1644 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED
,
1645 basic_block after ATTRIBUTE_UNUSED
)
1650 /* Split a (typically critical) edge. Return the new block.
1651 The edge must not be abnormal.
1653 ??? The code generally expects to be called on critical edges.
1654 The case of a block ending in an unconditional jump to a
1655 block with multiple predecessors is not handled optimally. */
1658 rtl_split_edge (edge edge_in
)
1663 /* Abnormal edges cannot be split. */
1664 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
1666 /* We are going to place the new block in front of edge destination.
1667 Avoid existence of fallthru predecessors. */
1668 if ((edge_in
->flags
& EDGE_FALLTHRU
) == 0)
1670 edge e
= find_fallthru_edge (edge_in
->dest
->preds
);
1673 force_nonfallthru (e
);
1676 /* Create the basic block note. */
1677 if (edge_in
->dest
!= EXIT_BLOCK_PTR
)
1678 before
= BB_HEAD (edge_in
->dest
);
1682 /* If this is a fall through edge to the exit block, the blocks might be
1683 not adjacent, and the right place is after the source. */
1684 if ((edge_in
->flags
& EDGE_FALLTHRU
) && edge_in
->dest
== EXIT_BLOCK_PTR
)
1686 before
= NEXT_INSN (BB_END (edge_in
->src
));
1687 bb
= create_basic_block (before
, NULL
, edge_in
->src
);
1688 BB_COPY_PARTITION (bb
, edge_in
->src
);
1692 bb
= create_basic_block (before
, NULL
, edge_in
->dest
->prev_bb
);
1693 /* ??? Why not edge_in->dest->prev_bb here? */
1694 BB_COPY_PARTITION (bb
, edge_in
->dest
);
1697 make_single_succ_edge (bb
, edge_in
->dest
, EDGE_FALLTHRU
);
1699 /* For non-fallthru edges, we must adjust the predecessor's
1700 jump instruction to target our new block. */
1701 if ((edge_in
->flags
& EDGE_FALLTHRU
) == 0)
1703 edge redirected
= redirect_edge_and_branch (edge_in
, bb
);
1704 gcc_assert (redirected
);
1708 if (edge_in
->src
!= ENTRY_BLOCK_PTR
)
1710 /* For asm goto even splitting of fallthru edge might
1711 need insn patching, as other labels might point to the
1713 rtx last
= BB_END (edge_in
->src
);
1716 && edge_in
->dest
!= EXIT_BLOCK_PTR
1717 && extract_asm_operands (PATTERN (last
)) != NULL_RTX
1718 && patch_jump_insn (last
, before
, bb
))
1719 df_set_bb_dirty (edge_in
->src
);
1721 redirect_edge_succ (edge_in
, bb
);
1727 /* Queue instructions for insertion on an edge between two basic blocks.
1728 The new instructions and basic blocks (if any) will not appear in the
1729 CFG until commit_edge_insertions is called. */
1732 insert_insn_on_edge (rtx pattern
, edge e
)
1734 /* We cannot insert instructions on an abnormal critical edge.
1735 It will be easier to find the culprit if we die now. */
1736 gcc_assert (!((e
->flags
& EDGE_ABNORMAL
) && EDGE_CRITICAL_P (e
)));
1738 if (e
->insns
.r
== NULL_RTX
)
1741 push_to_sequence (e
->insns
.r
);
1743 emit_insn (pattern
);
1745 e
->insns
.r
= get_insns ();
1749 /* Update the CFG for the instructions queued on edge E. */
1752 commit_one_edge_insertion (edge e
)
1754 rtx before
= NULL_RTX
, after
= NULL_RTX
, insns
, tmp
, last
;
1757 /* Pull the insns off the edge now since the edge might go away. */
1759 e
->insns
.r
= NULL_RTX
;
1761 /* Figure out where to put these insns. If the destination has
1762 one predecessor, insert there. Except for the exit block. */
1763 if (single_pred_p (e
->dest
) && e
->dest
!= EXIT_BLOCK_PTR
)
1767 /* Get the location correct wrt a code label, and "nice" wrt
1768 a basic block note, and before everything else. */
1771 tmp
= NEXT_INSN (tmp
);
1772 if (NOTE_INSN_BASIC_BLOCK_P (tmp
))
1773 tmp
= NEXT_INSN (tmp
);
1774 if (tmp
== BB_HEAD (bb
))
1777 after
= PREV_INSN (tmp
);
1779 after
= get_last_insn ();
1782 /* If the source has one successor and the edge is not abnormal,
1783 insert there. Except for the entry block. */
1784 else if ((e
->flags
& EDGE_ABNORMAL
) == 0
1785 && single_succ_p (e
->src
)
1786 && e
->src
!= ENTRY_BLOCK_PTR
)
1790 /* It is possible to have a non-simple jump here. Consider a target
1791 where some forms of unconditional jumps clobber a register. This
1792 happens on the fr30 for example.
1794 We know this block has a single successor, so we can just emit
1795 the queued insns before the jump. */
1796 if (JUMP_P (BB_END (bb
)))
1797 before
= BB_END (bb
);
1800 /* We'd better be fallthru, or we've lost track of what's what. */
1801 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
1803 after
= BB_END (bb
);
1807 /* Otherwise we must split the edge. */
1810 bb
= split_edge (e
);
1811 after
= BB_END (bb
);
1813 if (flag_reorder_blocks_and_partition
1814 && targetm_common
.have_named_sections
1815 && e
->src
!= ENTRY_BLOCK_PTR
1816 && BB_PARTITION (e
->src
) == BB_COLD_PARTITION
1817 && !(e
->flags
& EDGE_CROSSING
)
1819 && !any_condjump_p (after
)
1820 && (single_succ_edge (bb
)->flags
& EDGE_CROSSING
))
1821 add_reg_note (after
, REG_CROSSING_JUMP
, NULL_RTX
);
1824 /* Now that we've found the spot, do the insertion. */
1827 emit_insn_before_noloc (insns
, before
, bb
);
1828 last
= prev_nonnote_insn (before
);
1831 last
= emit_insn_after_noloc (insns
, after
, bb
);
1833 if (returnjump_p (last
))
1835 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1836 This is not currently a problem because this only happens
1837 for the (single) epilogue, which already has a fallthru edge
1840 e
= single_succ_edge (bb
);
1841 gcc_assert (e
->dest
== EXIT_BLOCK_PTR
1842 && single_succ_p (bb
) && (e
->flags
& EDGE_FALLTHRU
));
1844 e
->flags
&= ~EDGE_FALLTHRU
;
1845 emit_barrier_after (last
);
1848 delete_insn (before
);
1851 gcc_assert (!JUMP_P (last
));
1854 /* Update the CFG for all queued instructions. */
1857 commit_edge_insertions (void)
1861 #ifdef ENABLE_CHECKING
1862 verify_flow_info ();
1865 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, next_bb
)
1870 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1872 commit_one_edge_insertion (e
);
1877 /* Print out RTL-specific basic block information (live information
1878 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
1879 documented in dumpfile.h. */
1882 rtl_dump_bb (FILE *outf
, basic_block bb
, int indent
, int flags
)
1888 s_indent
= (char *) alloca ((size_t) indent
+ 1);
1889 memset (s_indent
, ' ', (size_t) indent
);
1890 s_indent
[indent
] = '\0';
1892 if (df
&& (flags
& TDF_DETAILS
))
1894 df_dump_top (bb
, outf
);
1898 if (bb
->index
!= ENTRY_BLOCK
&& bb
->index
!= EXIT_BLOCK
)
1899 for (insn
= BB_HEAD (bb
), last
= NEXT_INSN (BB_END (bb
)); insn
!= last
;
1900 insn
= NEXT_INSN (insn
))
1902 if (flags
& TDF_DETAILS
)
1903 df_dump_insn_top (insn
, outf
);
1904 if (! (flags
& TDF_SLIM
))
1905 print_rtl_single (outf
, insn
);
1907 dump_insn_slim (outf
, insn
);
1908 if (flags
& TDF_DETAILS
)
1909 df_dump_insn_bottom (insn
, outf
);
1912 if (df
&& (flags
& TDF_DETAILS
))
1914 df_dump_bottom (bb
, outf
);
1920 /* Like dump_function_to_file, but for RTL. Print out dataflow information
1921 for the start of each basic block. FLAGS are the TDF_* masks documented
1925 print_rtl_with_bb (FILE *outf
, const_rtx rtx_first
, int flags
)
1929 fprintf (outf
, "(nil)\n");
1932 enum bb_state
{ NOT_IN_BB
, IN_ONE_BB
, IN_MULTIPLE_BB
};
1933 int max_uid
= get_max_uid ();
1934 basic_block
*start
= XCNEWVEC (basic_block
, max_uid
);
1935 basic_block
*end
= XCNEWVEC (basic_block
, max_uid
);
1936 enum bb_state
*in_bb_p
= XCNEWVEC (enum bb_state
, max_uid
);
1939 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
1940 insns, but the CFG is not maintained so the basic block info
1941 is not reliable. Therefore it's omitted from the dumps. */
1942 if (! (cfun
->curr_properties
& PROP_cfg
))
1943 flags
&= ~TDF_BLOCKS
;
1946 df_dump_start (outf
);
1948 if (flags
& TDF_BLOCKS
)
1950 FOR_EACH_BB_REVERSE (bb
)
1954 start
[INSN_UID (BB_HEAD (bb
))] = bb
;
1955 end
[INSN_UID (BB_END (bb
))] = bb
;
1956 for (x
= BB_HEAD (bb
); x
!= NULL_RTX
; x
= NEXT_INSN (x
))
1958 enum bb_state state
= IN_MULTIPLE_BB
;
1960 if (in_bb_p
[INSN_UID (x
)] == NOT_IN_BB
)
1962 in_bb_p
[INSN_UID (x
)] = state
;
1964 if (x
== BB_END (bb
))
1970 for (tmp_rtx
= rtx_first
; NULL
!= tmp_rtx
; tmp_rtx
= NEXT_INSN (tmp_rtx
))
1972 if (flags
& TDF_BLOCKS
)
1974 bb
= start
[INSN_UID (tmp_rtx
)];
1977 dump_bb_info (outf
, bb
, 0, dump_flags
| TDF_COMMENT
, true, false);
1978 if (df
&& (flags
& TDF_DETAILS
))
1979 df_dump_top (bb
, outf
);
1982 if (in_bb_p
[INSN_UID (tmp_rtx
)] == NOT_IN_BB
1983 && !NOTE_P (tmp_rtx
)
1984 && !BARRIER_P (tmp_rtx
))
1985 fprintf (outf
, ";; Insn is not within a basic block\n");
1986 else if (in_bb_p
[INSN_UID (tmp_rtx
)] == IN_MULTIPLE_BB
)
1987 fprintf (outf
, ";; Insn is in multiple basic blocks\n");
1990 if (flags
& TDF_DETAILS
)
1991 df_dump_insn_top (tmp_rtx
, outf
);
1992 if (! (flags
& TDF_SLIM
))
1993 print_rtl_single (outf
, tmp_rtx
);
1995 dump_insn_slim (outf
, tmp_rtx
);
1996 if (flags
& TDF_DETAILS
)
1997 df_dump_insn_bottom (tmp_rtx
, outf
);
1999 if (flags
& TDF_BLOCKS
)
2001 bb
= end
[INSN_UID (tmp_rtx
)];
2004 dump_bb_info (outf
, bb
, 0, dump_flags
| TDF_COMMENT
, false, true);
2005 if (df
&& (flags
& TDF_DETAILS
))
2006 df_dump_bottom (bb
, outf
);
2018 /* Update the branch probability of BB if a REG_BR_PROB is present. */
2021 update_br_prob_note (basic_block bb
)
2024 if (!JUMP_P (BB_END (bb
)))
2026 note
= find_reg_note (BB_END (bb
), REG_BR_PROB
, NULL_RTX
);
2027 if (!note
|| INTVAL (XEXP (note
, 0)) == BRANCH_EDGE (bb
)->probability
)
2029 XEXP (note
, 0) = GEN_INT (BRANCH_EDGE (bb
)->probability
);
2032 /* Get the last insn associated with block BB (that includes barriers and
2033 tablejumps after BB). */
2035 get_last_bb_insn (basic_block bb
)
2038 rtx end
= BB_END (bb
);
2040 /* Include any jump table following the basic block. */
2041 if (tablejump_p (end
, NULL
, &tmp
))
2044 /* Include any barriers that may follow the basic block. */
2045 tmp
= next_nonnote_insn_bb (end
);
2046 while (tmp
&& BARRIER_P (tmp
))
2049 tmp
= next_nonnote_insn_bb (end
);
2055 /* Verify the CFG and RTL consistency common for both underlying RTL and
2058 Currently it does following checks:
2060 - overlapping of basic blocks
2061 - insns with wrong BLOCK_FOR_INSN pointers
2062 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2063 - tails of basic blocks (ensure that boundary is necessary)
2064 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2065 and NOTE_INSN_BASIC_BLOCK
2066 - verify that no fall_thru edge crosses hot/cold partition boundaries
2067 - verify that there are no pending RTL branch predictions
2069 In future it can be extended check a lot of other stuff as well
2070 (reachability of basic blocks, life information, etc. etc.). */
2073 rtl_verify_flow_info_1 (void)
2079 /* Check the general integrity of the basic blocks. */
2080 FOR_EACH_BB_REVERSE (bb
)
2084 if (!(bb
->flags
& BB_RTL
))
2086 error ("BB_RTL flag not set for block %d", bb
->index
);
2090 FOR_BB_INSNS (bb
, insn
)
2091 if (BLOCK_FOR_INSN (insn
) != bb
)
2093 error ("insn %d basic block pointer is %d, should be %d",
2095 BLOCK_FOR_INSN (insn
) ? BLOCK_FOR_INSN (insn
)->index
: 0,
2100 for (insn
= BB_HEADER (bb
); insn
; insn
= NEXT_INSN (insn
))
2101 if (!BARRIER_P (insn
)
2102 && BLOCK_FOR_INSN (insn
) != NULL
)
2104 error ("insn %d in header of bb %d has non-NULL basic block",
2105 INSN_UID (insn
), bb
->index
);
2108 for (insn
= BB_FOOTER (bb
); insn
; insn
= NEXT_INSN (insn
))
2109 if (!BARRIER_P (insn
)
2110 && BLOCK_FOR_INSN (insn
) != NULL
)
2112 error ("insn %d in footer of bb %d has non-NULL basic block",
2113 INSN_UID (insn
), bb
->index
);
2118 /* Now check the basic blocks (boundaries etc.) */
2119 FOR_EACH_BB_REVERSE (bb
)
2121 int n_fallthru
= 0, n_branch
= 0, n_abnormal_call
= 0, n_sibcall
= 0;
2122 int n_eh
= 0, n_abnormal
= 0;
2123 edge e
, fallthru
= NULL
;
2127 if (JUMP_P (BB_END (bb
))
2128 && (note
= find_reg_note (BB_END (bb
), REG_BR_PROB
, NULL_RTX
))
2129 && EDGE_COUNT (bb
->succs
) >= 2
2130 && any_condjump_p (BB_END (bb
)))
2132 if (INTVAL (XEXP (note
, 0)) != BRANCH_EDGE (bb
)->probability
2133 && profile_status
!= PROFILE_ABSENT
)
2135 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
2136 INTVAL (XEXP (note
, 0)), BRANCH_EDGE (bb
)->probability
);
2140 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2144 if (e
->flags
& EDGE_FALLTHRU
)
2145 n_fallthru
++, fallthru
= e
;
2147 is_crossing
= (BB_PARTITION (e
->src
) != BB_PARTITION (e
->dest
)
2148 && e
->src
!= ENTRY_BLOCK_PTR
2149 && e
->dest
!= EXIT_BLOCK_PTR
);
2150 if (e
->flags
& EDGE_CROSSING
)
2154 error ("EDGE_CROSSING incorrectly set across same section");
2157 if (e
->flags
& EDGE_FALLTHRU
)
2159 error ("fallthru edge crosses section boundary in bb %i",
2163 if (e
->flags
& EDGE_EH
)
2165 error ("EH edge crosses section boundary in bb %i",
2170 else if (is_crossing
)
2172 error ("EDGE_CROSSING missing across section boundary");
2176 if ((e
->flags
& ~(EDGE_DFS_BACK
2178 | EDGE_IRREDUCIBLE_LOOP
2181 | EDGE_PRESERVE
)) == 0)
2184 if (e
->flags
& EDGE_ABNORMAL_CALL
)
2187 if (e
->flags
& EDGE_SIBCALL
)
2190 if (e
->flags
& EDGE_EH
)
2193 if (e
->flags
& EDGE_ABNORMAL
)
2197 if (n_eh
&& !find_reg_note (BB_END (bb
), REG_EH_REGION
, NULL_RTX
))
2199 error ("missing REG_EH_REGION note at the end of bb %i", bb
->index
);
2204 error ("too many exception handling edges in bb %i", bb
->index
);
2208 && (!JUMP_P (BB_END (bb
))
2209 || (n_branch
> 1 && (any_uncondjump_p (BB_END (bb
))
2210 || any_condjump_p (BB_END (bb
))))))
2212 error ("too many outgoing branch edges from bb %i", bb
->index
);
2215 if (n_fallthru
&& any_uncondjump_p (BB_END (bb
)))
2217 error ("fallthru edge after unconditional jump in bb %i", bb
->index
);
2220 if (n_branch
!= 1 && any_uncondjump_p (BB_END (bb
)))
2222 error ("wrong number of branch edges after unconditional jump"
2223 " in bb %i", bb
->index
);
2226 if (n_branch
!= 1 && any_condjump_p (BB_END (bb
))
2227 && JUMP_LABEL (BB_END (bb
)) != BB_HEAD (fallthru
->dest
))
2229 error ("wrong amount of branch edges after conditional jump"
2230 " in bb %i", bb
->index
);
2233 if (n_abnormal_call
&& !CALL_P (BB_END (bb
)))
2235 error ("abnormal call edges for non-call insn in bb %i", bb
->index
);
2238 if (n_sibcall
&& !CALL_P (BB_END (bb
)))
2240 error ("sibcall edges for non-call insn in bb %i", bb
->index
);
2243 if (n_abnormal
> n_eh
2244 && !(CALL_P (BB_END (bb
))
2245 && n_abnormal
== n_abnormal_call
+ n_sibcall
)
2246 && (!JUMP_P (BB_END (bb
))
2247 || any_condjump_p (BB_END (bb
))
2248 || any_uncondjump_p (BB_END (bb
))))
2250 error ("abnormal edges for no purpose in bb %i", bb
->index
);
2254 for (x
= BB_HEAD (bb
); x
!= NEXT_INSN (BB_END (bb
)); x
= NEXT_INSN (x
))
2255 /* We may have a barrier inside a basic block before dead code
2256 elimination. There is no BLOCK_FOR_INSN field in a barrier. */
2257 if (!BARRIER_P (x
) && BLOCK_FOR_INSN (x
) != bb
)
2260 if (! BLOCK_FOR_INSN (x
))
2262 ("insn %d inside basic block %d but block_for_insn is NULL",
2263 INSN_UID (x
), bb
->index
);
2266 ("insn %d inside basic block %d but block_for_insn is %i",
2267 INSN_UID (x
), bb
->index
, BLOCK_FOR_INSN (x
)->index
);
2272 /* OK pointers are correct. Now check the header of basic
2273 block. It ought to contain optional CODE_LABEL followed
2274 by NOTE_BASIC_BLOCK. */
2278 if (BB_END (bb
) == x
)
2280 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2288 if (!NOTE_INSN_BASIC_BLOCK_P (x
) || NOTE_BASIC_BLOCK (x
) != bb
)
2290 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2295 if (BB_END (bb
) == x
)
2296 /* Do checks for empty blocks here. */
2299 for (x
= NEXT_INSN (x
); x
; x
= NEXT_INSN (x
))
2301 if (NOTE_INSN_BASIC_BLOCK_P (x
))
2303 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2304 INSN_UID (x
), bb
->index
);
2308 if (x
== BB_END (bb
))
2311 if (control_flow_insn_p (x
))
2313 error ("in basic block %d:", bb
->index
);
2314 fatal_insn ("flow control insn inside a basic block", x
);
2323 /* Verify the CFG and RTL consistency common for both underlying RTL and
2326 Currently it does following checks:
2327 - all checks of rtl_verify_flow_info_1
2328 - test head/end pointers
2329 - check that all insns are in the basic blocks
2330 (except the switch handling code, barriers and notes)
2331 - check that all returns are followed by barriers
2332 - check that all fallthru edge points to the adjacent blocks. */
2335 rtl_verify_flow_info (void)
2338 int err
= rtl_verify_flow_info_1 ();
2340 rtx last_head
= get_last_insn ();
2341 basic_block
*bb_info
;
2343 const rtx rtx_first
= get_insns ();
2344 basic_block last_bb_seen
= ENTRY_BLOCK_PTR
, curr_bb
= NULL
;
2345 const int max_uid
= get_max_uid ();
2347 bb_info
= XCNEWVEC (basic_block
, max_uid
);
2349 FOR_EACH_BB_REVERSE (bb
)
2352 rtx head
= BB_HEAD (bb
);
2353 rtx end
= BB_END (bb
);
2355 for (x
= last_head
; x
!= NULL_RTX
; x
= PREV_INSN (x
))
2357 /* Verify the end of the basic block is in the INSN chain. */
2361 /* And that the code outside of basic blocks has NULL bb field. */
2363 && BLOCK_FOR_INSN (x
) != NULL
)
2365 error ("insn %d outside of basic blocks has non-NULL bb field",
2373 error ("end insn %d for block %d not found in the insn stream",
2374 INSN_UID (end
), bb
->index
);
2378 /* Work backwards from the end to the head of the basic block
2379 to verify the head is in the RTL chain. */
2380 for (; x
!= NULL_RTX
; x
= PREV_INSN (x
))
2382 /* While walking over the insn chain, verify insns appear
2383 in only one basic block. */
2384 if (bb_info
[INSN_UID (x
)] != NULL
)
2386 error ("insn %d is in multiple basic blocks (%d and %d)",
2387 INSN_UID (x
), bb
->index
, bb_info
[INSN_UID (x
)]->index
);
2391 bb_info
[INSN_UID (x
)] = bb
;
2398 error ("head insn %d for block %d not found in the insn stream",
2399 INSN_UID (head
), bb
->index
);
2403 last_head
= PREV_INSN (x
);
2405 e
= find_fallthru_edge (bb
->succs
);
2410 /* Ensure existence of barrier in BB with no fallthru edges. */
2411 for (insn
= NEXT_INSN (BB_END (bb
)); ; insn
= NEXT_INSN (insn
))
2413 if (!insn
|| NOTE_INSN_BASIC_BLOCK_P (insn
))
2415 error ("missing barrier after block %i", bb
->index
);
2419 if (BARRIER_P (insn
))
2423 else if (e
->src
!= ENTRY_BLOCK_PTR
2424 && e
->dest
!= EXIT_BLOCK_PTR
)
2428 if (e
->src
->next_bb
!= e
->dest
)
2431 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2432 e
->src
->index
, e
->dest
->index
);
2436 for (insn
= NEXT_INSN (BB_END (e
->src
)); insn
!= BB_HEAD (e
->dest
);
2437 insn
= NEXT_INSN (insn
))
2438 if (BARRIER_P (insn
) || INSN_P (insn
))
2440 error ("verify_flow_info: Incorrect fallthru %i->%i",
2441 e
->src
->index
, e
->dest
->index
);
2442 fatal_insn ("wrong insn in the fallthru edge", insn
);
2448 for (x
= last_head
; x
!= NULL_RTX
; x
= PREV_INSN (x
))
2450 /* Check that the code before the first basic block has NULL
2453 && BLOCK_FOR_INSN (x
) != NULL
)
2455 error ("insn %d outside of basic blocks has non-NULL bb field",
2463 last_bb_seen
= ENTRY_BLOCK_PTR
;
2465 for (x
= rtx_first
; x
; x
= NEXT_INSN (x
))
2467 if (NOTE_INSN_BASIC_BLOCK_P (x
))
2469 bb
= NOTE_BASIC_BLOCK (x
);
2472 if (bb
!= last_bb_seen
->next_bb
)
2473 internal_error ("basic blocks not laid down consecutively");
2475 curr_bb
= last_bb_seen
= bb
;
2480 switch (GET_CODE (x
))
2487 /* An addr_vec is placed outside any basic block. */
2489 && JUMP_TABLE_DATA_P (NEXT_INSN (x
)))
2492 /* But in any case, non-deletable labels can appear anywhere. */
2496 fatal_insn ("insn outside basic block", x
);
2501 && returnjump_p (x
) && ! condjump_p (x
)
2502 && ! (next_nonnote_insn (x
) && BARRIER_P (next_nonnote_insn (x
))))
2503 fatal_insn ("return not followed by barrier", x
);
2504 if (curr_bb
&& x
== BB_END (curr_bb
))
2508 if (num_bb_notes
!= n_basic_blocks
- NUM_FIXED_BLOCKS
)
2510 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2511 num_bb_notes
, n_basic_blocks
);
2516 /* Assume that the preceding pass has possibly eliminated jump instructions
2517 or converted the unconditional jumps. Eliminate the edges from CFG.
2518 Return true if any edges are eliminated. */
2521 purge_dead_edges (basic_block bb
)
2524 rtx insn
= BB_END (bb
), note
;
2525 bool purged
= false;
2529 if (DEBUG_INSN_P (insn
) && insn
!= BB_HEAD (bb
))
2531 insn
= PREV_INSN (insn
);
2532 while ((DEBUG_INSN_P (insn
) || NOTE_P (insn
)) && insn
!= BB_HEAD (bb
));
2534 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2535 if (NONJUMP_INSN_P (insn
)
2536 && (note
= find_reg_note (insn
, REG_EH_REGION
, NULL
)))
2540 if (! may_trap_p (PATTERN (insn
))
2541 || ((eqnote
= find_reg_equal_equiv_note (insn
))
2542 && ! may_trap_p (XEXP (eqnote
, 0))))
2543 remove_note (insn
, note
);
2546 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2547 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
2549 bool remove
= false;
2551 /* There are three types of edges we need to handle correctly here: EH
2552 edges, abnormal call EH edges, and abnormal call non-EH edges. The
2553 latter can appear when nonlocal gotos are used. */
2554 if (e
->flags
& EDGE_ABNORMAL_CALL
)
2558 else if (can_nonlocal_goto (insn
))
2560 else if ((e
->flags
& EDGE_EH
) && can_throw_internal (insn
))
2562 else if (flag_tm
&& find_reg_note (insn
, REG_TM
, NULL
))
2567 else if (e
->flags
& EDGE_EH
)
2568 remove
= !can_throw_internal (insn
);
2573 df_set_bb_dirty (bb
);
2586 /* We do care only about conditional jumps and simplejumps. */
2587 if (!any_condjump_p (insn
)
2588 && !returnjump_p (insn
)
2589 && !simplejump_p (insn
))
2592 /* Branch probability/prediction notes are defined only for
2593 condjumps. We've possibly turned condjump into simplejump. */
2594 if (simplejump_p (insn
))
2596 note
= find_reg_note (insn
, REG_BR_PROB
, NULL
);
2598 remove_note (insn
, note
);
2599 while ((note
= find_reg_note (insn
, REG_BR_PRED
, NULL
)))
2600 remove_note (insn
, note
);
2603 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
2605 /* Avoid abnormal flags to leak from computed jumps turned
2606 into simplejumps. */
2608 e
->flags
&= ~EDGE_ABNORMAL
;
2610 /* See if this edge is one we should keep. */
2611 if ((e
->flags
& EDGE_FALLTHRU
) && any_condjump_p (insn
))
2612 /* A conditional jump can fall through into the next
2613 block, so we should keep the edge. */
2618 else if (e
->dest
!= EXIT_BLOCK_PTR
2619 && BB_HEAD (e
->dest
) == JUMP_LABEL (insn
))
2620 /* If the destination block is the target of the jump,
2626 else if (e
->dest
== EXIT_BLOCK_PTR
&& returnjump_p (insn
))
2627 /* If the destination block is the exit block, and this
2628 instruction is a return, then keep the edge. */
2633 else if ((e
->flags
& EDGE_EH
) && can_throw_internal (insn
))
2634 /* Keep the edges that correspond to exceptions thrown by
2635 this instruction and rematerialize the EDGE_ABNORMAL
2636 flag we just cleared above. */
2638 e
->flags
|= EDGE_ABNORMAL
;
2643 /* We do not need this edge. */
2644 df_set_bb_dirty (bb
);
2649 if (EDGE_COUNT (bb
->succs
) == 0 || !purged
)
2653 fprintf (dump_file
, "Purged edges from bb %i\n", bb
->index
);
2658 /* Redistribute probabilities. */
2659 if (single_succ_p (bb
))
2661 single_succ_edge (bb
)->probability
= REG_BR_PROB_BASE
;
2662 single_succ_edge (bb
)->count
= bb
->count
;
2666 note
= find_reg_note (insn
, REG_BR_PROB
, NULL
);
2670 b
= BRANCH_EDGE (bb
);
2671 f
= FALLTHRU_EDGE (bb
);
2672 b
->probability
= INTVAL (XEXP (note
, 0));
2673 f
->probability
= REG_BR_PROB_BASE
- b
->probability
;
2674 b
->count
= bb
->count
* b
->probability
/ REG_BR_PROB_BASE
;
2675 f
->count
= bb
->count
* f
->probability
/ REG_BR_PROB_BASE
;
2680 else if (CALL_P (insn
) && SIBLING_CALL_P (insn
))
2682 /* First, there should not be any EH or ABCALL edges resulting
2683 from non-local gotos and the like. If there were, we shouldn't
2684 have created the sibcall in the first place. Second, there
2685 should of course never have been a fallthru edge. */
2686 gcc_assert (single_succ_p (bb
));
2687 gcc_assert (single_succ_edge (bb
)->flags
2688 == (EDGE_SIBCALL
| EDGE_ABNORMAL
));
2693 /* If we don't see a jump insn, we don't know exactly why the block would
2694 have been broken at this point. Look for a simple, non-fallthru edge,
2695 as these are only created by conditional branches. If we find such an
2696 edge we know that there used to be a jump here and can then safely
2697 remove all non-fallthru edges. */
2699 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2700 if (! (e
->flags
& (EDGE_COMPLEX
| EDGE_FALLTHRU
)))
2709 /* Remove all but the fake and fallthru edges. The fake edge may be
2710 the only successor for this block in the case of noreturn
2712 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
2714 if (!(e
->flags
& (EDGE_FALLTHRU
| EDGE_FAKE
)))
2716 df_set_bb_dirty (bb
);
2724 gcc_assert (single_succ_p (bb
));
2726 single_succ_edge (bb
)->probability
= REG_BR_PROB_BASE
;
2727 single_succ_edge (bb
)->count
= bb
->count
;
2730 fprintf (dump_file
, "Purged non-fallthru edges from bb %i\n",
2735 /* Search all basic blocks for potentially dead edges and purge them. Return
2736 true if some edge has been eliminated. */
2739 purge_all_dead_edges (void)
2746 bool purged_here
= purge_dead_edges (bb
);
2748 purged
|= purged_here
;
2754 /* This is used by a few passes that emit some instructions after abnormal
2755 calls, moving the basic block's end, while they in fact do want to emit
2756 them on the fallthru edge. Look for abnormal call edges, find backward
2757 the call in the block and insert the instructions on the edge instead.
2759 Similarly, handle instructions throwing exceptions internally.
2761 Return true when instructions have been found and inserted on edges. */
2764 fixup_abnormal_edges (void)
2766 bool inserted
= false;
2774 /* Look for cases we are interested in - calls or instructions causing
2776 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2777 if ((e
->flags
& EDGE_ABNORMAL_CALL
)
2778 || ((e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
2779 == (EDGE_ABNORMAL
| EDGE_EH
)))
2782 if (e
&& !CALL_P (BB_END (bb
)) && !can_throw_internal (BB_END (bb
)))
2786 /* Get past the new insns generated. Allow notes, as the insns
2787 may be already deleted. */
2789 while ((NONJUMP_INSN_P (insn
) || NOTE_P (insn
))
2790 && !can_throw_internal (insn
)
2791 && insn
!= BB_HEAD (bb
))
2792 insn
= PREV_INSN (insn
);
2794 if (CALL_P (insn
) || can_throw_internal (insn
))
2798 e
= find_fallthru_edge (bb
->succs
);
2800 stop
= NEXT_INSN (BB_END (bb
));
2803 for (insn
= NEXT_INSN (insn
); insn
!= stop
; insn
= next
)
2805 next
= NEXT_INSN (insn
);
2810 /* Sometimes there's still the return value USE.
2811 If it's placed after a trapping call (i.e. that
2812 call is the last insn anyway), we have no fallthru
2813 edge. Simply delete this use and don't try to insert
2814 on the non-existent edge. */
2815 if (GET_CODE (PATTERN (insn
)) != USE
)
2817 /* We're not deleting it, we're moving it. */
2818 INSN_DELETED_P (insn
) = 0;
2819 PREV_INSN (insn
) = NULL_RTX
;
2820 NEXT_INSN (insn
) = NULL_RTX
;
2822 insert_insn_on_edge (insn
, e
);
2826 else if (!BARRIER_P (insn
))
2827 set_block_for_insn (insn
, NULL
);
2831 /* It may be that we don't find any trapping insn. In this
2832 case we discovered quite late that the insn that had been
2833 marked as can_throw_internal in fact couldn't trap at all.
2834 So we should in fact delete the EH edges out of the block. */
2836 purge_dead_edges (bb
);
2843 /* Cut the insns from FIRST to LAST out of the insns stream. */
2846 unlink_insn_chain (rtx first
, rtx last
)
2848 rtx prevfirst
= PREV_INSN (first
);
2849 rtx nextlast
= NEXT_INSN (last
);
2851 PREV_INSN (first
) = NULL
;
2852 NEXT_INSN (last
) = NULL
;
2854 NEXT_INSN (prevfirst
) = nextlast
;
2856 PREV_INSN (nextlast
) = prevfirst
;
2858 set_last_insn (prevfirst
);
2860 set_first_insn (nextlast
);
2864 /* Skip over inter-block insns occurring after BB which are typically
2865 associated with BB (e.g., barriers). If there are any such insns,
2866 we return the last one. Otherwise, we return the end of BB. */
2869 skip_insns_after_block (basic_block bb
)
2871 rtx insn
, last_insn
, next_head
, prev
;
2873 next_head
= NULL_RTX
;
2874 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
2875 next_head
= BB_HEAD (bb
->next_bb
);
2877 for (last_insn
= insn
= BB_END (bb
); (insn
= NEXT_INSN (insn
)) != 0; )
2879 if (insn
== next_head
)
2882 switch (GET_CODE (insn
))
2889 switch (NOTE_KIND (insn
))
2891 case NOTE_INSN_BLOCK_END
:
2901 if (NEXT_INSN (insn
)
2902 && JUMP_TABLE_DATA_P (NEXT_INSN (insn
)))
2904 insn
= NEXT_INSN (insn
);
2917 /* It is possible to hit contradictory sequence. For instance:
2923 Where barrier belongs to jump_insn, but the note does not. This can be
2924 created by removing the basic block originally following
2925 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
2927 for (insn
= last_insn
; insn
!= BB_END (bb
); insn
= prev
)
2929 prev
= PREV_INSN (insn
);
2931 switch (NOTE_KIND (insn
))
2933 case NOTE_INSN_BLOCK_END
:
2936 case NOTE_INSN_DELETED
:
2937 case NOTE_INSN_DELETED_LABEL
:
2938 case NOTE_INSN_DELETED_DEBUG_LABEL
:
2941 reorder_insns (insn
, insn
, last_insn
);
2948 /* Locate or create a label for a given basic block. */
2951 label_for_bb (basic_block bb
)
2953 rtx label
= BB_HEAD (bb
);
2955 if (!LABEL_P (label
))
2958 fprintf (dump_file
, "Emitting label for block %d\n", bb
->index
);
2960 label
= block_label (bb
);
2966 /* Locate the effective beginning and end of the insn chain for each
2967 block, as defined by skip_insns_after_block above. */
2970 record_effective_endpoints (void)
2976 for (insn
= get_insns ();
2979 && NOTE_KIND (insn
) != NOTE_INSN_BASIC_BLOCK
;
2980 insn
= NEXT_INSN (insn
))
2982 /* No basic blocks at all? */
2985 if (PREV_INSN (insn
))
2986 cfg_layout_function_header
=
2987 unlink_insn_chain (get_insns (), PREV_INSN (insn
));
2989 cfg_layout_function_header
= NULL_RTX
;
2991 next_insn
= get_insns ();
2996 if (PREV_INSN (BB_HEAD (bb
)) && next_insn
!= BB_HEAD (bb
))
2997 BB_HEADER (bb
) = unlink_insn_chain (next_insn
,
2998 PREV_INSN (BB_HEAD (bb
)));
2999 end
= skip_insns_after_block (bb
);
3000 if (NEXT_INSN (BB_END (bb
)) && BB_END (bb
) != end
)
3001 BB_FOOTER (bb
) = unlink_insn_chain (NEXT_INSN (BB_END (bb
)), end
);
3002 next_insn
= NEXT_INSN (BB_END (bb
));
3005 cfg_layout_function_footer
= next_insn
;
3006 if (cfg_layout_function_footer
)
3007 cfg_layout_function_footer
= unlink_insn_chain (cfg_layout_function_footer
, get_last_insn ());
3011 into_cfg_layout_mode (void)
3013 cfg_layout_initialize (0);
3018 outof_cfg_layout_mode (void)
3023 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
3024 bb
->aux
= bb
->next_bb
;
3026 cfg_layout_finalize ();
3031 struct rtl_opt_pass pass_into_cfg_layout_mode
=
3035 "into_cfglayout", /* name */
3036 OPTGROUP_NONE
, /* optinfo_flags */
3038 into_cfg_layout_mode
, /* execute */
3041 0, /* static_pass_number */
3043 0, /* properties_required */
3044 PROP_cfglayout
, /* properties_provided */
3045 0, /* properties_destroyed */
3046 0, /* todo_flags_start */
3047 0 /* todo_flags_finish */
3051 struct rtl_opt_pass pass_outof_cfg_layout_mode
=
3055 "outof_cfglayout", /* name */
3056 OPTGROUP_NONE
, /* optinfo_flags */
3058 outof_cfg_layout_mode
, /* execute */
3061 0, /* static_pass_number */
3063 0, /* properties_required */
3064 0, /* properties_provided */
3065 PROP_cfglayout
, /* properties_destroyed */
3066 0, /* todo_flags_start */
3067 0 /* todo_flags_finish */
3072 /* Link the basic blocks in the correct order, compacting the basic
3073 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3074 function also clears the basic block header and footer fields.
3076 This function is usually called after a pass (e.g. tracer) finishes
3077 some transformations while in cfglayout mode. The required sequence
3078 of the basic blocks is in a linked list along the bb->aux field.
3079 This functions re-links the basic block prev_bb and next_bb pointers
3080 accordingly, and it compacts and renumbers the blocks.
3082 FIXME: This currently works only for RTL, but the only RTL-specific
3083 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3084 to GIMPLE a long time ago, but it doesn't relink the basic block
3085 chain. It could do that (to give better initial RTL) if this function
3086 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
3089 relink_block_chain (bool stay_in_cfglayout_mode
)
3091 basic_block bb
, prev_bb
;
3094 /* Maybe dump the re-ordered sequence. */
3097 fprintf (dump_file
, "Reordered sequence:\n");
3098 for (bb
= ENTRY_BLOCK_PTR
->next_bb
, index
= NUM_FIXED_BLOCKS
;
3100 bb
= (basic_block
) bb
->aux
, index
++)
3102 fprintf (dump_file
, " %i ", index
);
3103 if (get_bb_original (bb
))
3104 fprintf (dump_file
, "duplicate of %i ",
3105 get_bb_original (bb
)->index
);
3106 else if (forwarder_block_p (bb
)
3107 && !LABEL_P (BB_HEAD (bb
)))
3108 fprintf (dump_file
, "compensation ");
3110 fprintf (dump_file
, "bb %i ", bb
->index
);
3111 fprintf (dump_file
, " [%i]\n", bb
->frequency
);
3115 /* Now reorder the blocks. */
3116 prev_bb
= ENTRY_BLOCK_PTR
;
3117 bb
= ENTRY_BLOCK_PTR
->next_bb
;
3118 for (; bb
; prev_bb
= bb
, bb
= (basic_block
) bb
->aux
)
3120 bb
->prev_bb
= prev_bb
;
3121 prev_bb
->next_bb
= bb
;
3123 prev_bb
->next_bb
= EXIT_BLOCK_PTR
;
3124 EXIT_BLOCK_PTR
->prev_bb
= prev_bb
;
3126 /* Then, clean up the aux fields. */
3130 if (!stay_in_cfglayout_mode
)
3131 BB_HEADER (bb
) = BB_FOOTER (bb
) = NULL
;
3134 /* Maybe reset the original copy tables, they are not valid anymore
3135 when we renumber the basic blocks in compact_blocks. If we are
3136 are going out of cfglayout mode, don't re-allocate the tables. */
3137 free_original_copy_tables ();
3138 if (stay_in_cfglayout_mode
)
3139 initialize_original_copy_tables ();
3141 /* Finally, put basic_block_info in the new order. */
3146 /* Given a reorder chain, rearrange the code to match. */
3149 fixup_reorder_chain (void)
3154 if (cfg_layout_function_header
)
3156 set_first_insn (cfg_layout_function_header
);
3157 insn
= cfg_layout_function_header
;
3158 while (NEXT_INSN (insn
))
3159 insn
= NEXT_INSN (insn
);
3162 /* First do the bulk reordering -- rechain the blocks without regard to
3163 the needed changes to jumps and labels. */
3165 for (bb
= ENTRY_BLOCK_PTR
->next_bb
; bb
; bb
= (basic_block
) bb
->aux
)
3170 NEXT_INSN (insn
) = BB_HEADER (bb
);
3172 set_first_insn (BB_HEADER (bb
));
3173 PREV_INSN (BB_HEADER (bb
)) = insn
;
3174 insn
= BB_HEADER (bb
);
3175 while (NEXT_INSN (insn
))
3176 insn
= NEXT_INSN (insn
);
3179 NEXT_INSN (insn
) = BB_HEAD (bb
);
3181 set_first_insn (BB_HEAD (bb
));
3182 PREV_INSN (BB_HEAD (bb
)) = insn
;
3186 NEXT_INSN (insn
) = BB_FOOTER (bb
);
3187 PREV_INSN (BB_FOOTER (bb
)) = insn
;
3188 while (NEXT_INSN (insn
))
3189 insn
= NEXT_INSN (insn
);
3193 NEXT_INSN (insn
) = cfg_layout_function_footer
;
3194 if (cfg_layout_function_footer
)
3195 PREV_INSN (cfg_layout_function_footer
) = insn
;
3197 while (NEXT_INSN (insn
))
3198 insn
= NEXT_INSN (insn
);
3200 set_last_insn (insn
);
3201 #ifdef ENABLE_CHECKING
3202 verify_insn_chain ();
3205 /* Now add jumps and labels as needed to match the blocks new
3208 for (bb
= ENTRY_BLOCK_PTR
->next_bb
; bb
; bb
= (basic_block
) bb
->aux
)
3210 edge e_fall
, e_taken
, e
;
3212 rtx ret_label
= NULL_RTX
;
3213 basic_block nb
, src_bb
;
3216 if (EDGE_COUNT (bb
->succs
) == 0)
3219 /* Find the old fallthru edge, and another non-EH edge for
3221 e_taken
= e_fall
= NULL
;
3223 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3224 if (e
->flags
& EDGE_FALLTHRU
)
3226 else if (! (e
->flags
& EDGE_EH
))
3229 bb_end_insn
= BB_END (bb
);
3230 if (JUMP_P (bb_end_insn
))
3232 ret_label
= JUMP_LABEL (bb_end_insn
);
3233 if (any_condjump_p (bb_end_insn
))
3235 /* This might happen if the conditional jump has side
3236 effects and could therefore not be optimized away.
3237 Make the basic block to end with a barrier in order
3238 to prevent rtl_verify_flow_info from complaining. */
3241 gcc_assert (!onlyjump_p (bb_end_insn
)
3242 || returnjump_p (bb_end_insn
));
3243 BB_FOOTER (bb
) = emit_barrier_after (bb_end_insn
);
3247 /* If the old fallthru is still next, nothing to do. */
3248 if (bb
->aux
== e_fall
->dest
3249 || e_fall
->dest
== EXIT_BLOCK_PTR
)
3252 /* The degenerated case of conditional jump jumping to the next
3253 instruction can happen for jumps with side effects. We need
3254 to construct a forwarder block and this will be done just
3255 fine by force_nonfallthru below. */
3259 /* There is another special case: if *neither* block is next,
3260 such as happens at the very end of a function, then we'll
3261 need to add a new unconditional jump. Choose the taken
3262 edge based on known or assumed probability. */
3263 else if (bb
->aux
!= e_taken
->dest
)
3265 rtx note
= find_reg_note (bb_end_insn
, REG_BR_PROB
, 0);
3268 && INTVAL (XEXP (note
, 0)) < REG_BR_PROB_BASE
/ 2
3269 && invert_jump (bb_end_insn
,
3270 (e_fall
->dest
== EXIT_BLOCK_PTR
3272 : label_for_bb (e_fall
->dest
)), 0))
3274 e_fall
->flags
&= ~EDGE_FALLTHRU
;
3275 gcc_checking_assert (could_fall_through
3276 (e_taken
->src
, e_taken
->dest
));
3277 e_taken
->flags
|= EDGE_FALLTHRU
;
3278 update_br_prob_note (bb
);
3279 e
= e_fall
, e_fall
= e_taken
, e_taken
= e
;
3283 /* If the "jumping" edge is a crossing edge, and the fall
3284 through edge is non-crossing, leave things as they are. */
3285 else if ((e_taken
->flags
& EDGE_CROSSING
)
3286 && !(e_fall
->flags
& EDGE_CROSSING
))
3289 /* Otherwise we can try to invert the jump. This will
3290 basically never fail, however, keep up the pretense. */
3291 else if (invert_jump (bb_end_insn
,
3292 (e_fall
->dest
== EXIT_BLOCK_PTR
3294 : label_for_bb (e_fall
->dest
)), 0))
3296 e_fall
->flags
&= ~EDGE_FALLTHRU
;
3297 gcc_checking_assert (could_fall_through
3298 (e_taken
->src
, e_taken
->dest
));
3299 e_taken
->flags
|= EDGE_FALLTHRU
;
3300 update_br_prob_note (bb
);
3301 if (LABEL_NUSES (ret_label
) == 0
3302 && single_pred_p (e_taken
->dest
))
3303 delete_insn (ret_label
);
3307 else if (extract_asm_operands (PATTERN (bb_end_insn
)) != NULL
)
3309 /* If the old fallthru is still next or if
3310 asm goto doesn't have a fallthru (e.g. when followed by
3311 __builtin_unreachable ()), nothing to do. */
3313 || bb
->aux
== e_fall
->dest
3314 || e_fall
->dest
== EXIT_BLOCK_PTR
)
3317 /* Otherwise we'll have to use the fallthru fixup below. */
3321 /* Otherwise we have some return, switch or computed
3322 jump. In the 99% case, there should not have been a
3324 gcc_assert (returnjump_p (bb_end_insn
) || !e_fall
);
3330 /* No fallthru implies a noreturn function with EH edges, or
3331 something similarly bizarre. In any case, we don't need to
3336 /* If the fallthru block is still next, nothing to do. */
3337 if (bb
->aux
== e_fall
->dest
)
3340 /* A fallthru to exit block. */
3341 if (e_fall
->dest
== EXIT_BLOCK_PTR
)
3345 /* We got here if we need to add a new jump insn.
3346 Note force_nonfallthru can delete E_FALL and thus we have to
3347 save E_FALL->src prior to the call to force_nonfallthru. */
3348 src_bb
= e_fall
->src
;
3349 nb
= force_nonfallthru_and_redirect (e_fall
, e_fall
->dest
, ret_label
);
3354 /* Don't process this new block. */
3357 /* Make sure new bb is tagged for correct section (same as
3358 fall-thru source, since you cannot fall-thru across
3359 section boundaries). */
3360 BB_COPY_PARTITION (src_bb
, single_pred (bb
));
3361 if (flag_reorder_blocks_and_partition
3362 && targetm_common
.have_named_sections
3363 && JUMP_P (BB_END (bb
))
3364 && !any_condjump_p (BB_END (bb
))
3365 && (EDGE_SUCC (bb
, 0)->flags
& EDGE_CROSSING
))
3366 add_reg_note (BB_END (bb
), REG_CROSSING_JUMP
, NULL_RTX
);
3370 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3372 /* Annoying special case - jump around dead jumptables left in the code. */
3375 edge e
= find_fallthru_edge (bb
->succs
);
3377 if (e
&& !can_fallthru (e
->src
, e
->dest
))
3378 force_nonfallthru (e
);
3381 /* Ensure goto_locus from edges has some instructions with that locus
3389 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3390 if (LOCATION_LOCUS (e
->goto_locus
) != UNKNOWN_LOCATION
3391 && !(e
->flags
& EDGE_ABNORMAL
))
3395 basic_block dest
, nb
;
3398 insn
= BB_END (e
->src
);
3399 end
= PREV_INSN (BB_HEAD (e
->src
));
3401 && (!NONDEBUG_INSN_P (insn
) || !INSN_HAS_LOCATION (insn
)))
3402 insn
= PREV_INSN (insn
);
3404 && INSN_LOCATION (insn
) == e
->goto_locus
)
3406 if (simplejump_p (BB_END (e
->src
))
3407 && !INSN_HAS_LOCATION (BB_END (e
->src
)))
3409 INSN_LOCATION (BB_END (e
->src
)) = e
->goto_locus
;
3413 if (dest
== EXIT_BLOCK_PTR
)
3415 /* Non-fallthru edges to the exit block cannot be split. */
3416 if (!(e
->flags
& EDGE_FALLTHRU
))
3421 insn
= BB_HEAD (dest
);
3422 end
= NEXT_INSN (BB_END (dest
));
3423 while (insn
!= end
&& !NONDEBUG_INSN_P (insn
))
3424 insn
= NEXT_INSN (insn
);
3425 if (insn
!= end
&& INSN_HAS_LOCATION (insn
)
3426 && INSN_LOCATION (insn
) == e
->goto_locus
)
3429 nb
= split_edge (e
);
3430 if (!INSN_P (BB_END (nb
)))
3431 BB_END (nb
) = emit_insn_after_noloc (gen_nop (), BB_END (nb
),
3433 INSN_LOCATION (BB_END (nb
)) = e
->goto_locus
;
3435 /* If there are other incoming edges to the destination block
3436 with the same goto locus, redirect them to the new block as
3437 well, this can prevent other such blocks from being created
3438 in subsequent iterations of the loop. */
3439 for (ei2
= ei_start (dest
->preds
); (e2
= ei_safe_edge (ei2
)); )
3440 if (LOCATION_LOCUS (e2
->goto_locus
) != UNKNOWN_LOCATION
3441 && !(e2
->flags
& (EDGE_ABNORMAL
| EDGE_FALLTHRU
))
3442 && e
->goto_locus
== e2
->goto_locus
)
3443 redirect_edge_and_branch (e2
, nb
);
3450 /* Perform sanity checks on the insn chain.
3451 1. Check that next/prev pointers are consistent in both the forward and
3453 2. Count insns in chain, going both directions, and check if equal.
3454 3. Check that get_last_insn () returns the actual end of chain. */
3457 verify_insn_chain (void)
3459 rtx x
, prevx
, nextx
;
3460 int insn_cnt1
, insn_cnt2
;
3462 for (prevx
= NULL
, insn_cnt1
= 1, x
= get_insns ();
3464 prevx
= x
, insn_cnt1
++, x
= NEXT_INSN (x
))
3465 gcc_assert (PREV_INSN (x
) == prevx
);
3467 gcc_assert (prevx
== get_last_insn ());
3469 for (nextx
= NULL
, insn_cnt2
= 1, x
= get_last_insn ();
3471 nextx
= x
, insn_cnt2
++, x
= PREV_INSN (x
))
3472 gcc_assert (NEXT_INSN (x
) == nextx
);
3474 gcc_assert (insn_cnt1
== insn_cnt2
);
3477 /* If we have assembler epilogues, the block falling through to exit must
3478 be the last one in the reordered chain when we reach final. Ensure
3479 that this condition is met. */
3481 fixup_fallthru_exit_predecessor (void)
3484 basic_block bb
= NULL
;
3486 /* This transformation is not valid before reload, because we might
3487 separate a call from the instruction that copies the return
3489 gcc_assert (reload_completed
);
3491 e
= find_fallthru_edge (EXIT_BLOCK_PTR
->preds
);
3497 basic_block c
= ENTRY_BLOCK_PTR
->next_bb
;
3499 /* If the very first block is the one with the fall-through exit
3500 edge, we have to split that block. */
3503 bb
= split_block (bb
, NULL
)->dest
;
3506 BB_FOOTER (bb
) = BB_FOOTER (c
);
3507 BB_FOOTER (c
) = NULL
;
3510 while (c
->aux
!= bb
)
3511 c
= (basic_block
) c
->aux
;
3515 c
= (basic_block
) c
->aux
;
3522 /* In case there are more than one fallthru predecessors of exit, force that
3523 there is only one. */
3526 force_one_exit_fallthru (void)
3528 edge e
, predecessor
= NULL
;
3531 basic_block forwarder
, bb
;
3533 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
3534 if (e
->flags
& EDGE_FALLTHRU
)
3536 if (predecessor
== NULL
)
3548 /* Exit has several fallthru predecessors. Create a forwarder block for
3550 forwarder
= split_edge (predecessor
);
3551 for (ei
= ei_start (EXIT_BLOCK_PTR
->preds
); (e
= ei_safe_edge (ei
)); )
3553 if (e
->src
== forwarder
3554 || !(e
->flags
& EDGE_FALLTHRU
))
3557 redirect_edge_and_branch_force (e
, forwarder
);
3560 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
3564 if (bb
->aux
== NULL
&& bb
!= forwarder
)
3566 bb
->aux
= forwarder
;
3572 /* Return true in case it is possible to duplicate the basic block BB. */
3575 cfg_layout_can_duplicate_bb_p (const_basic_block bb
)
3577 /* Do not attempt to duplicate tablejumps, as we need to unshare
3578 the dispatch table. This is difficult to do, as the instructions
3579 computing jump destination may be hoisted outside the basic block. */
3580 if (tablejump_p (BB_END (bb
), NULL
, NULL
))
3583 /* Do not duplicate blocks containing insns that can't be copied. */
3584 if (targetm
.cannot_copy_insn_p
)
3586 rtx insn
= BB_HEAD (bb
);
3589 if (INSN_P (insn
) && targetm
.cannot_copy_insn_p (insn
))
3591 if (insn
== BB_END (bb
))
3593 insn
= NEXT_INSN (insn
);
3601 duplicate_insn_chain (rtx from
, rtx to
)
3603 rtx insn
, last
, copy
;
3605 /* Avoid updating of boundaries of previous basic block. The
3606 note will get removed from insn stream in fixup. */
3607 last
= emit_note (NOTE_INSN_DELETED
);
3609 /* Create copy at the end of INSN chain. The chain will
3610 be reordered later. */
3611 for (insn
= from
; insn
!= NEXT_INSN (to
); insn
= NEXT_INSN (insn
))
3613 switch (GET_CODE (insn
))
3616 /* Don't duplicate label debug insns. */
3617 if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn
)) == LABEL_DECL
)
3623 /* Avoid copying of dispatch tables. We never duplicate
3624 tablejumps, so this can hit only in case the table got
3625 moved far from original jump. */
3626 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
3627 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
3629 /* Avoid copying following barrier as well if any
3630 (and debug insns in between). */
3633 for (next
= NEXT_INSN (insn
);
3634 next
!= NEXT_INSN (to
);
3635 next
= NEXT_INSN (next
))
3636 if (!DEBUG_INSN_P (next
))
3638 if (next
!= NEXT_INSN (to
) && BARRIER_P (next
))
3642 copy
= emit_copy_of_insn_after (insn
, get_last_insn ());
3643 if (JUMP_P (insn
) && JUMP_LABEL (insn
) != NULL_RTX
3644 && ANY_RETURN_P (JUMP_LABEL (insn
)))
3645 JUMP_LABEL (copy
) = JUMP_LABEL (insn
);
3646 maybe_copy_prologue_epilogue_insn (insn
, copy
);
3657 switch (NOTE_KIND (insn
))
3659 /* In case prologue is empty and function contain label
3660 in first BB, we may want to copy the block. */
3661 case NOTE_INSN_PROLOGUE_END
:
3663 case NOTE_INSN_DELETED
:
3664 case NOTE_INSN_DELETED_LABEL
:
3665 case NOTE_INSN_DELETED_DEBUG_LABEL
:
3666 /* No problem to strip these. */
3667 case NOTE_INSN_FUNCTION_BEG
:
3668 /* There is always just single entry to function. */
3669 case NOTE_INSN_BASIC_BLOCK
:
3672 case NOTE_INSN_EPILOGUE_BEG
:
3673 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
3674 emit_note_copy (insn
);
3678 /* All other notes should have already been eliminated. */
3686 insn
= NEXT_INSN (last
);
3691 /* Create a duplicate of the basic block BB. */
3694 cfg_layout_duplicate_bb (basic_block bb
)
3699 insn
= duplicate_insn_chain (BB_HEAD (bb
), BB_END (bb
));
3700 new_bb
= create_basic_block (insn
,
3701 insn
? get_last_insn () : NULL
,
3702 EXIT_BLOCK_PTR
->prev_bb
);
3704 BB_COPY_PARTITION (new_bb
, bb
);
3707 insn
= BB_HEADER (bb
);
3708 while (NEXT_INSN (insn
))
3709 insn
= NEXT_INSN (insn
);
3710 insn
= duplicate_insn_chain (BB_HEADER (bb
), insn
);
3712 BB_HEADER (new_bb
) = unlink_insn_chain (insn
, get_last_insn ());
3717 insn
= BB_FOOTER (bb
);
3718 while (NEXT_INSN (insn
))
3719 insn
= NEXT_INSN (insn
);
3720 insn
= duplicate_insn_chain (BB_FOOTER (bb
), insn
);
3722 BB_FOOTER (new_bb
) = unlink_insn_chain (insn
, get_last_insn ());
3729 /* Main entry point to this module - initialize the datastructures for
3730 CFG layout changes. It keeps LOOPS up-to-date if not null.
3732 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
3735 cfg_layout_initialize (unsigned int flags
)
3740 initialize_original_copy_tables ();
3742 cfg_layout_rtl_register_cfg_hooks ();
3744 record_effective_endpoints ();
3746 /* Make sure that the targets of non local gotos are marked. */
3747 for (x
= nonlocal_goto_handler_labels
; x
; x
= XEXP (x
, 1))
3749 bb
= BLOCK_FOR_INSN (XEXP (x
, 0));
3750 bb
->flags
|= BB_NON_LOCAL_GOTO_TARGET
;
3753 cleanup_cfg (CLEANUP_CFGLAYOUT
| flags
);
3756 /* Splits superblocks. */
3758 break_superblocks (void)
3760 sbitmap superblocks
;
3764 superblocks
= sbitmap_alloc (last_basic_block
);
3765 bitmap_clear (superblocks
);
3768 if (bb
->flags
& BB_SUPERBLOCK
)
3770 bb
->flags
&= ~BB_SUPERBLOCK
;
3771 bitmap_set_bit (superblocks
, bb
->index
);
3777 rebuild_jump_labels (get_insns ());
3778 find_many_sub_basic_blocks (superblocks
);
3784 /* Finalize the changes: reorder insn list according to the sequence specified
3785 by aux pointers, enter compensation code, rebuild scope forest. */
3788 cfg_layout_finalize (void)
3790 #ifdef ENABLE_CHECKING
3791 verify_flow_info ();
3793 force_one_exit_fallthru ();
3794 rtl_register_cfg_hooks ();
3795 if (reload_completed
3796 #ifdef HAVE_epilogue
3800 fixup_fallthru_exit_predecessor ();
3801 fixup_reorder_chain ();
3803 rebuild_jump_labels (get_insns ());
3804 delete_dead_jumptables ();
3806 #ifdef ENABLE_CHECKING
3807 verify_insn_chain ();
3808 verify_flow_info ();
3813 /* Same as split_block but update cfg_layout structures. */
3816 cfg_layout_split_block (basic_block bb
, void *insnp
)
3818 rtx insn
= (rtx
) insnp
;
3819 basic_block new_bb
= rtl_split_block (bb
, insn
);
3821 BB_FOOTER (new_bb
) = BB_FOOTER (bb
);
3822 BB_FOOTER (bb
) = NULL
;
3827 /* Redirect Edge to DEST. */
3829 cfg_layout_redirect_edge_and_branch (edge e
, basic_block dest
)
3831 basic_block src
= e
->src
;
3834 if (e
->flags
& (EDGE_ABNORMAL_CALL
| EDGE_EH
))
3837 if (e
->dest
== dest
)
3840 if (e
->src
!= ENTRY_BLOCK_PTR
3841 && (ret
= try_redirect_by_replacing_jump (e
, dest
, true)))
3843 df_set_bb_dirty (src
);
3847 if (e
->src
== ENTRY_BLOCK_PTR
3848 && (e
->flags
& EDGE_FALLTHRU
) && !(e
->flags
& EDGE_COMPLEX
))
3851 fprintf (dump_file
, "Redirecting entry edge from bb %i to %i\n",
3852 e
->src
->index
, dest
->index
);
3854 df_set_bb_dirty (e
->src
);
3855 redirect_edge_succ (e
, dest
);
3859 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
3860 in the case the basic block appears to be in sequence. Avoid this
3863 if (e
->flags
& EDGE_FALLTHRU
)
3865 /* Redirect any branch edges unified with the fallthru one. */
3866 if (JUMP_P (BB_END (src
))
3867 && label_is_jump_target_p (BB_HEAD (e
->dest
),
3873 fprintf (dump_file
, "Fallthru edge unified with branch "
3874 "%i->%i redirected to %i\n",
3875 e
->src
->index
, e
->dest
->index
, dest
->index
);
3876 e
->flags
&= ~EDGE_FALLTHRU
;
3877 redirected
= redirect_branch_edge (e
, dest
);
3878 gcc_assert (redirected
);
3879 redirected
->flags
|= EDGE_FALLTHRU
;
3880 df_set_bb_dirty (redirected
->src
);
3883 /* In case we are redirecting fallthru edge to the branch edge
3884 of conditional jump, remove it. */
3885 if (EDGE_COUNT (src
->succs
) == 2)
3887 /* Find the edge that is different from E. */
3888 edge s
= EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
);
3891 && any_condjump_p (BB_END (src
))
3892 && onlyjump_p (BB_END (src
)))
3893 delete_insn (BB_END (src
));
3896 fprintf (dump_file
, "Redirecting fallthru edge %i->%i to %i\n",
3897 e
->src
->index
, e
->dest
->index
, dest
->index
);
3898 ret
= redirect_edge_succ_nodup (e
, dest
);
3901 ret
= redirect_branch_edge (e
, dest
);
3903 /* We don't want simplejumps in the insn stream during cfglayout. */
3904 gcc_assert (!simplejump_p (BB_END (src
)));
3906 df_set_bb_dirty (src
);
3910 /* Simple wrapper as we always can redirect fallthru edges. */
3912 cfg_layout_redirect_edge_and_branch_force (edge e
, basic_block dest
)
3914 edge redirected
= cfg_layout_redirect_edge_and_branch (e
, dest
);
3916 gcc_assert (redirected
);
3920 /* Same as delete_basic_block but update cfg_layout structures. */
3923 cfg_layout_delete_block (basic_block bb
)
3925 rtx insn
, next
, prev
= PREV_INSN (BB_HEAD (bb
)), *to
, remaints
;
3929 next
= BB_HEAD (bb
);
3931 NEXT_INSN (prev
) = BB_HEADER (bb
);
3933 set_first_insn (BB_HEADER (bb
));
3934 PREV_INSN (BB_HEADER (bb
)) = prev
;
3935 insn
= BB_HEADER (bb
);
3936 while (NEXT_INSN (insn
))
3937 insn
= NEXT_INSN (insn
);
3938 NEXT_INSN (insn
) = next
;
3939 PREV_INSN (next
) = insn
;
3941 next
= NEXT_INSN (BB_END (bb
));
3944 insn
= BB_FOOTER (bb
);
3947 if (BARRIER_P (insn
))
3949 if (PREV_INSN (insn
))
3950 NEXT_INSN (PREV_INSN (insn
)) = NEXT_INSN (insn
);
3952 BB_FOOTER (bb
) = NEXT_INSN (insn
);
3953 if (NEXT_INSN (insn
))
3954 PREV_INSN (NEXT_INSN (insn
)) = PREV_INSN (insn
);
3958 insn
= NEXT_INSN (insn
);
3963 NEXT_INSN (insn
) = BB_FOOTER (bb
);
3964 PREV_INSN (BB_FOOTER (bb
)) = insn
;
3965 while (NEXT_INSN (insn
))
3966 insn
= NEXT_INSN (insn
);
3967 NEXT_INSN (insn
) = next
;
3969 PREV_INSN (next
) = insn
;
3971 set_last_insn (insn
);
3974 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
3975 to
= &BB_HEADER (bb
->next_bb
);
3977 to
= &cfg_layout_function_footer
;
3979 rtl_delete_block (bb
);
3982 prev
= NEXT_INSN (prev
);
3984 prev
= get_insns ();
3986 next
= PREV_INSN (next
);
3988 next
= get_last_insn ();
3990 if (next
&& NEXT_INSN (next
) != prev
)
3992 remaints
= unlink_insn_chain (prev
, next
);
3994 while (NEXT_INSN (insn
))
3995 insn
= NEXT_INSN (insn
);
3996 NEXT_INSN (insn
) = *to
;
3998 PREV_INSN (*to
) = insn
;
4003 /* Return true when blocks A and B can be safely merged. */
4006 cfg_layout_can_merge_blocks_p (basic_block a
, basic_block b
)
4008 /* If we are partitioning hot/cold basic blocks, we don't want to
4009 mess up unconditional or indirect jumps that cross between hot
4012 Basic block partitioning may result in some jumps that appear to
4013 be optimizable (or blocks that appear to be mergeable), but which really
4014 must be left untouched (they are required to make it safely across
4015 partition boundaries). See the comments at the top of
4016 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4018 if (BB_PARTITION (a
) != BB_PARTITION (b
))
4021 /* Protect the loop latches. */
4022 if (current_loops
&& b
->loop_father
->latch
== b
)
4025 /* If we would end up moving B's instructions, make sure it doesn't fall
4026 through into the exit block, since we cannot recover from a fallthrough
4027 edge into the exit block occurring in the middle of a function. */
4028 if (NEXT_INSN (BB_END (a
)) != BB_HEAD (b
))
4030 edge e
= find_fallthru_edge (b
->succs
);
4031 if (e
&& e
->dest
== EXIT_BLOCK_PTR
)
4035 /* There must be exactly one edge in between the blocks. */
4036 return (single_succ_p (a
)
4037 && single_succ (a
) == b
4038 && single_pred_p (b
) == 1
4040 /* Must be simple edge. */
4041 && !(single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
4042 && a
!= ENTRY_BLOCK_PTR
&& b
!= EXIT_BLOCK_PTR
4043 /* If the jump insn has side effects, we can't kill the edge.
4044 When not optimizing, try_redirect_by_replacing_jump will
4045 not allow us to redirect an edge by replacing a table jump. */
4046 && (!JUMP_P (BB_END (a
))
4047 || ((!optimize
|| reload_completed
)
4048 ? simplejump_p (BB_END (a
)) : onlyjump_p (BB_END (a
)))));
4051 /* Merge block A and B. The blocks must be mergeable. */
4054 cfg_layout_merge_blocks (basic_block a
, basic_block b
)
4056 bool forwarder_p
= (b
->flags
& BB_FORWARDER_BLOCK
) != 0;
4059 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a
, b
));
4062 fprintf (dump_file
, "Merging block %d into block %d...\n", b
->index
,
4065 /* If there was a CODE_LABEL beginning B, delete it. */
4066 if (LABEL_P (BB_HEAD (b
)))
4068 delete_insn (BB_HEAD (b
));
4071 /* We should have fallthru edge in a, or we can do dummy redirection to get
4073 if (JUMP_P (BB_END (a
)))
4074 try_redirect_by_replacing_jump (EDGE_SUCC (a
, 0), b
, true);
4075 gcc_assert (!JUMP_P (BB_END (a
)));
4077 /* When not optimizing CFG and the edge is the only place in RTL which holds
4078 some unique locus, emit a nop with that locus in between. */
4080 emit_nop_for_unique_locus_between (a
, b
);
4082 /* Possible line number notes should appear in between. */
4085 rtx first
= BB_END (a
), last
;
4087 last
= emit_insn_after_noloc (BB_HEADER (b
), BB_END (a
), a
);
4088 /* The above might add a BARRIER as BB_END, but as barriers
4089 aren't valid parts of a bb, remove_insn doesn't update
4090 BB_END if it is a barrier. So adjust BB_END here. */
4091 while (BB_END (a
) != first
&& BARRIER_P (BB_END (a
)))
4092 BB_END (a
) = PREV_INSN (BB_END (a
));
4093 delete_insn_chain (NEXT_INSN (first
), last
, false);
4094 BB_HEADER (b
) = NULL
;
4097 /* In the case basic blocks are not adjacent, move them around. */
4098 if (NEXT_INSN (BB_END (a
)) != BB_HEAD (b
))
4100 insn
= unlink_insn_chain (BB_HEAD (b
), BB_END (b
));
4102 emit_insn_after_noloc (insn
, BB_END (a
), a
);
4104 /* Otherwise just re-associate the instructions. */
4108 BB_END (a
) = BB_END (b
);
4111 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4112 We need to explicitly call. */
4113 update_bb_for_insn_chain (insn
, BB_END (b
), a
);
4115 /* Skip possible DELETED_LABEL insn. */
4116 if (!NOTE_INSN_BASIC_BLOCK_P (insn
))
4117 insn
= NEXT_INSN (insn
);
4118 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn
));
4122 df_bb_delete (b
->index
);
4124 /* Possible tablejumps and barriers should appear after the block. */
4128 BB_FOOTER (a
) = BB_FOOTER (b
);
4131 rtx last
= BB_FOOTER (a
);
4133 while (NEXT_INSN (last
))
4134 last
= NEXT_INSN (last
);
4135 NEXT_INSN (last
) = BB_FOOTER (b
);
4136 PREV_INSN (BB_FOOTER (b
)) = last
;
4138 BB_FOOTER (b
) = NULL
;
4141 /* If B was a forwarder block, propagate the locus on the edge. */
4143 && LOCATION_LOCUS (EDGE_SUCC (b
, 0)->goto_locus
) == UNKNOWN_LOCATION
)
4144 EDGE_SUCC (b
, 0)->goto_locus
= EDGE_SUCC (a
, 0)->goto_locus
;
4147 fprintf (dump_file
, "Merged blocks %d and %d.\n", a
->index
, b
->index
);
4153 cfg_layout_split_edge (edge e
)
4155 basic_block new_bb
=
4156 create_basic_block (e
->src
!= ENTRY_BLOCK_PTR
4157 ? NEXT_INSN (BB_END (e
->src
)) : get_insns (),
4160 if (e
->dest
== EXIT_BLOCK_PTR
)
4161 BB_COPY_PARTITION (new_bb
, e
->src
);
4163 BB_COPY_PARTITION (new_bb
, e
->dest
);
4164 make_edge (new_bb
, e
->dest
, EDGE_FALLTHRU
);
4165 redirect_edge_and_branch_force (e
, new_bb
);
4170 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4173 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED
)
4177 /* Return true if BB contains only labels or non-executable
4181 rtl_block_empty_p (basic_block bb
)
4185 if (bb
== ENTRY_BLOCK_PTR
|| bb
== EXIT_BLOCK_PTR
)
4188 FOR_BB_INSNS (bb
, insn
)
4189 if (NONDEBUG_INSN_P (insn
) && !any_uncondjump_p (insn
))
4195 /* Split a basic block if it ends with a conditional branch and if
4196 the other part of the block is not empty. */
4199 rtl_split_block_before_cond_jump (basic_block bb
)
4202 rtx split_point
= NULL
;
4204 bool found_code
= false;
4206 FOR_BB_INSNS (bb
, insn
)
4208 if (any_condjump_p (insn
))
4210 else if (NONDEBUG_INSN_P (insn
))
4215 /* Did not find everything. */
4216 if (found_code
&& split_point
)
4217 return split_block (bb
, split_point
)->dest
;
4222 /* Return 1 if BB ends with a call, possibly followed by some
4223 instructions that must stay with the call, 0 otherwise. */
4226 rtl_block_ends_with_call_p (basic_block bb
)
4228 rtx insn
= BB_END (bb
);
4230 while (!CALL_P (insn
)
4231 && insn
!= BB_HEAD (bb
)
4232 && (keep_with_call_p (insn
)
4234 || DEBUG_INSN_P (insn
)))
4235 insn
= PREV_INSN (insn
);
4236 return (CALL_P (insn
));
4239 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4242 rtl_block_ends_with_condjump_p (const_basic_block bb
)
4244 return any_condjump_p (BB_END (bb
));
4247 /* Return true if we need to add fake edge to exit.
4248 Helper function for rtl_flow_call_edges_add. */
4251 need_fake_edge_p (const_rtx insn
)
4257 && !SIBLING_CALL_P (insn
)
4258 && !find_reg_note (insn
, REG_NORETURN
, NULL
)
4259 && !(RTL_CONST_OR_PURE_CALL_P (insn
))))
4262 return ((GET_CODE (PATTERN (insn
)) == ASM_OPERANDS
4263 && MEM_VOLATILE_P (PATTERN (insn
)))
4264 || (GET_CODE (PATTERN (insn
)) == PARALLEL
4265 && asm_noperands (insn
) != -1
4266 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn
), 0, 0)))
4267 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
);
4270 /* Add fake edges to the function exit for any non constant and non noreturn
4271 calls, volatile inline assembly in the bitmap of blocks specified by
4272 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4275 The goal is to expose cases in which entering a basic block does not imply
4276 that all subsequent instructions must be executed. */
4279 rtl_flow_call_edges_add (sbitmap blocks
)
4282 int blocks_split
= 0;
4283 int last_bb
= last_basic_block
;
4284 bool check_last_block
= false;
4286 if (n_basic_blocks
== NUM_FIXED_BLOCKS
)
4290 check_last_block
= true;
4292 check_last_block
= bitmap_bit_p (blocks
, EXIT_BLOCK_PTR
->prev_bb
->index
);
4294 /* In the last basic block, before epilogue generation, there will be
4295 a fallthru edge to EXIT. Special care is required if the last insn
4296 of the last basic block is a call because make_edge folds duplicate
4297 edges, which would result in the fallthru edge also being marked
4298 fake, which would result in the fallthru edge being removed by
4299 remove_fake_edges, which would result in an invalid CFG.
4301 Moreover, we can't elide the outgoing fake edge, since the block
4302 profiler needs to take this into account in order to solve the minimal
4303 spanning tree in the case that the call doesn't return.
4305 Handle this by adding a dummy instruction in a new last basic block. */
4306 if (check_last_block
)
4308 basic_block bb
= EXIT_BLOCK_PTR
->prev_bb
;
4309 rtx insn
= BB_END (bb
);
4311 /* Back up past insns that must be kept in the same block as a call. */
4312 while (insn
!= BB_HEAD (bb
)
4313 && keep_with_call_p (insn
))
4314 insn
= PREV_INSN (insn
);
4316 if (need_fake_edge_p (insn
))
4320 e
= find_edge (bb
, EXIT_BLOCK_PTR
);
4323 insert_insn_on_edge (gen_use (const0_rtx
), e
);
4324 commit_edge_insertions ();
4329 /* Now add fake edges to the function exit for any non constant
4330 calls since there is no way that we can determine if they will
4333 for (i
= NUM_FIXED_BLOCKS
; i
< last_bb
; i
++)
4335 basic_block bb
= BASIC_BLOCK (i
);
4342 if (blocks
&& !bitmap_bit_p (blocks
, i
))
4345 for (insn
= BB_END (bb
); ; insn
= prev_insn
)
4347 prev_insn
= PREV_INSN (insn
);
4348 if (need_fake_edge_p (insn
))
4351 rtx split_at_insn
= insn
;
4353 /* Don't split the block between a call and an insn that should
4354 remain in the same block as the call. */
4356 while (split_at_insn
!= BB_END (bb
)
4357 && keep_with_call_p (NEXT_INSN (split_at_insn
)))
4358 split_at_insn
= NEXT_INSN (split_at_insn
);
4360 /* The handling above of the final block before the epilogue
4361 should be enough to verify that there is no edge to the exit
4362 block in CFG already. Calling make_edge in such case would
4363 cause us to mark that edge as fake and remove it later. */
4365 #ifdef ENABLE_CHECKING
4366 if (split_at_insn
== BB_END (bb
))
4368 e
= find_edge (bb
, EXIT_BLOCK_PTR
);
4369 gcc_assert (e
== NULL
);
4373 /* Note that the following may create a new basic block
4374 and renumber the existing basic blocks. */
4375 if (split_at_insn
!= BB_END (bb
))
4377 e
= split_block (bb
, split_at_insn
);
4382 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
4385 if (insn
== BB_HEAD (bb
))
4391 verify_flow_info ();
4393 return blocks_split
;
4396 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
4397 the conditional branch target, SECOND_HEAD should be the fall-thru
4398 there is no need to handle this here the loop versioning code handles
4399 this. the reason for SECON_HEAD is that it is needed for condition
4400 in trees, and this should be of the same type since it is a hook. */
4402 rtl_lv_add_condition_to_bb (basic_block first_head
,
4403 basic_block second_head ATTRIBUTE_UNUSED
,
4404 basic_block cond_bb
, void *comp_rtx
)
4406 rtx label
, seq
, jump
;
4407 rtx op0
= XEXP ((rtx
)comp_rtx
, 0);
4408 rtx op1
= XEXP ((rtx
)comp_rtx
, 1);
4409 enum rtx_code comp
= GET_CODE ((rtx
)comp_rtx
);
4410 enum machine_mode mode
;
4413 label
= block_label (first_head
);
4414 mode
= GET_MODE (op0
);
4415 if (mode
== VOIDmode
)
4416 mode
= GET_MODE (op1
);
4419 op0
= force_operand (op0
, NULL_RTX
);
4420 op1
= force_operand (op1
, NULL_RTX
);
4421 do_compare_rtx_and_jump (op0
, op1
, comp
, 0,
4422 mode
, NULL_RTX
, NULL_RTX
, label
, -1);
4423 jump
= get_last_insn ();
4424 JUMP_LABEL (jump
) = label
;
4425 LABEL_NUSES (label
)++;
4429 /* Add the new cond , in the new head. */
4430 emit_insn_after(seq
, BB_END(cond_bb
));
4434 /* Given a block B with unconditional branch at its end, get the
4435 store the return the branch edge and the fall-thru edge in
4436 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
4438 rtl_extract_cond_bb_edges (basic_block b
, edge
*branch_edge
,
4439 edge
*fallthru_edge
)
4441 edge e
= EDGE_SUCC (b
, 0);
4443 if (e
->flags
& EDGE_FALLTHRU
)
4446 *branch_edge
= EDGE_SUCC (b
, 1);
4451 *fallthru_edge
= EDGE_SUCC (b
, 1);
4456 init_rtl_bb_info (basic_block bb
)
4458 gcc_assert (!bb
->il
.x
.rtl
);
4459 bb
->il
.x
.head_
= NULL
;
4460 bb
->il
.x
.rtl
= ggc_alloc_cleared_rtl_bb_info ();
4463 /* Returns true if it is possible to remove edge E by redirecting
4464 it to the destination of the other edge from E->src. */
4467 rtl_can_remove_branch_p (const_edge e
)
4469 const_basic_block src
= e
->src
;
4470 const_basic_block target
= EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
;
4471 const_rtx insn
= BB_END (src
), set
;
4473 /* The conditions are taken from try_redirect_by_replacing_jump. */
4474 if (target
== EXIT_BLOCK_PTR
)
4477 if (e
->flags
& (EDGE_ABNORMAL_CALL
| EDGE_EH
))
4480 if (find_reg_note (insn
, REG_CROSSING_JUMP
, NULL_RTX
)
4481 || BB_PARTITION (src
) != BB_PARTITION (target
))
4484 if (!onlyjump_p (insn
)
4485 || tablejump_p (insn
, NULL
, NULL
))
4488 set
= single_set (insn
);
4489 if (!set
|| side_effects_p (set
))
4496 rtl_duplicate_bb (basic_block bb
)
4498 bb
= cfg_layout_duplicate_bb (bb
);
4503 /* Do book-keeping of basic block BB for the profile consistency checker.
4504 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
4505 then do post-pass accounting. Store the counting in RECORD. */
4507 rtl_account_profile_record (basic_block bb
, int after_pass
,
4508 struct profile_record
*record
)
4511 FOR_BB_INSNS (bb
, insn
)
4514 record
->size
[after_pass
]
4515 += insn_rtx_cost (PATTERN (insn
), false);
4516 if (profile_status
== PROFILE_READ
)
4517 record
->time
[after_pass
]
4518 += insn_rtx_cost (PATTERN (insn
), true) * bb
->count
;
4519 else if (profile_status
== PROFILE_GUESSED
)
4520 record
->time
[after_pass
]
4521 += insn_rtx_cost (PATTERN (insn
), true) * bb
->frequency
;
4525 /* Implementation of CFG manipulation for linearized RTL. */
4526 struct cfg_hooks rtl_cfg_hooks
= {
4528 rtl_verify_flow_info
,
4530 rtl_dump_bb_for_graph
,
4531 rtl_create_basic_block
,
4532 rtl_redirect_edge_and_branch
,
4533 rtl_redirect_edge_and_branch_force
,
4534 rtl_can_remove_branch_p
,
4537 rtl_move_block_after
,
4538 rtl_can_merge_blocks
, /* can_merge_blocks_p */
4542 cfg_layout_can_duplicate_bb_p
,
4545 rtl_make_forwarder_block
,
4546 rtl_tidy_fallthru_edge
,
4547 rtl_force_nonfallthru
,
4548 rtl_block_ends_with_call_p
,
4549 rtl_block_ends_with_condjump_p
,
4550 rtl_flow_call_edges_add
,
4551 NULL
, /* execute_on_growing_pred */
4552 NULL
, /* execute_on_shrinking_pred */
4553 NULL
, /* duplicate loop for trees */
4554 NULL
, /* lv_add_condition_to_bb */
4555 NULL
, /* lv_adjust_loop_header_phi*/
4556 NULL
, /* extract_cond_bb_edges */
4557 NULL
, /* flush_pending_stmts */
4558 rtl_block_empty_p
, /* block_empty_p */
4559 rtl_split_block_before_cond_jump
, /* split_block_before_cond_jump */
4560 rtl_account_profile_record
,
4563 /* Implementation of CFG manipulation for cfg layout RTL, where
4564 basic block connected via fallthru edges does not have to be adjacent.
4565 This representation will hopefully become the default one in future
4566 version of the compiler. */
4568 struct cfg_hooks cfg_layout_rtl_cfg_hooks
= {
4570 rtl_verify_flow_info_1
,
4572 rtl_dump_bb_for_graph
,
4573 cfg_layout_create_basic_block
,
4574 cfg_layout_redirect_edge_and_branch
,
4575 cfg_layout_redirect_edge_and_branch_force
,
4576 rtl_can_remove_branch_p
,
4577 cfg_layout_delete_block
,
4578 cfg_layout_split_block
,
4579 rtl_move_block_after
,
4580 cfg_layout_can_merge_blocks_p
,
4581 cfg_layout_merge_blocks
,
4584 cfg_layout_can_duplicate_bb_p
,
4585 cfg_layout_duplicate_bb
,
4586 cfg_layout_split_edge
,
4587 rtl_make_forwarder_block
,
4588 NULL
, /* tidy_fallthru_edge */
4589 rtl_force_nonfallthru
,
4590 rtl_block_ends_with_call_p
,
4591 rtl_block_ends_with_condjump_p
,
4592 rtl_flow_call_edges_add
,
4593 NULL
, /* execute_on_growing_pred */
4594 NULL
, /* execute_on_shrinking_pred */
4595 duplicate_loop_to_header_edge
, /* duplicate loop for trees */
4596 rtl_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
4597 NULL
, /* lv_adjust_loop_header_phi*/
4598 rtl_extract_cond_bb_edges
, /* extract_cond_bb_edges */
4599 NULL
, /* flush_pending_stmts */
4600 rtl_block_empty_p
, /* block_empty_p */
4601 rtl_split_block_before_cond_jump
, /* split_block_before_cond_jump */
4602 rtl_account_profile_record
,
4605 #include "gt-cfgrtl.h"