1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
23 Available functionality:
24 - Basic CFG/RTL manipulation API documented in cfghooks.h
25 - CFG-aware instruction chain manipulation
26 delete_insn, delete_insn_chain
27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
31 - CFG fixing after coarse manipulation
34 Functions not supposed for generic use:
35 - Infrastructure to determine quickly basic block for insn
36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37 - Edge redirection with updating and optimizing of insn chain
38 block_label, tidy_fallthru_edge, force_nonfallthru */
42 #include "coretypes.h"
47 #include "double-int.h"
54 #include "hard-reg-set.h"
58 #include "dominance.h"
63 #include "cfgcleanup.h"
64 #include "basic-block.h"
65 #include "bb-reorder.h"
69 #include "rtl-error.h"
72 #include "insn-attr.h"
73 #include "insn-config.h"
76 #include "common/common-target.h"
79 #include "tree-pass.h"
82 /* Holds the interesting leading and trailing notes for the function.
83 Only applicable if the CFG is in cfglayout mode. */
84 static GTY(()) rtx_insn
*cfg_layout_function_footer
;
85 static GTY(()) rtx_insn
*cfg_layout_function_header
;
87 static rtx_insn
*skip_insns_after_block (basic_block
);
88 static void record_effective_endpoints (void);
89 static rtx
label_for_bb (basic_block
);
90 static void fixup_reorder_chain (void);
92 void verify_insn_chain (void);
93 static void fixup_fallthru_exit_predecessor (void);
94 static int can_delete_note_p (const rtx_note
*);
95 static int can_delete_label_p (const rtx_code_label
*);
96 static basic_block
rtl_split_edge (edge
);
97 static bool rtl_move_block_after (basic_block
, basic_block
);
98 static int rtl_verify_flow_info (void);
99 static basic_block
cfg_layout_split_block (basic_block
, void *);
100 static edge
cfg_layout_redirect_edge_and_branch (edge
, basic_block
);
101 static basic_block
cfg_layout_redirect_edge_and_branch_force (edge
, basic_block
);
102 static void cfg_layout_delete_block (basic_block
);
103 static void rtl_delete_block (basic_block
);
104 static basic_block
rtl_redirect_edge_and_branch_force (edge
, basic_block
);
105 static edge
rtl_redirect_edge_and_branch (edge
, basic_block
);
106 static basic_block
rtl_split_block (basic_block
, void *);
107 static void rtl_dump_bb (FILE *, basic_block
, int, int);
108 static int rtl_verify_flow_info_1 (void);
109 static void rtl_make_forwarder_block (edge
);
111 /* Return true if NOTE is not one of the ones that must be kept paired,
112 so that we may simply delete it. */
115 can_delete_note_p (const rtx_note
*note
)
117 switch (NOTE_KIND (note
))
119 case NOTE_INSN_DELETED
:
120 case NOTE_INSN_BASIC_BLOCK
:
121 case NOTE_INSN_EPILOGUE_BEG
:
129 /* True if a given label can be deleted. */
132 can_delete_label_p (const rtx_code_label
*label
)
134 return (!LABEL_PRESERVE_P (label
)
135 /* User declared labels must be preserved. */
136 && LABEL_NAME (label
) == 0
137 && !in_expr_list_p (forced_labels
, label
));
140 /* Delete INSN by patching it out. */
143 delete_insn (rtx uncast_insn
)
145 rtx_insn
*insn
= as_a
<rtx_insn
*> (uncast_insn
);
147 bool really_delete
= true;
151 /* Some labels can't be directly removed from the INSN chain, as they
152 might be references via variables, constant pool etc.
153 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
154 if (! can_delete_label_p (as_a
<rtx_code_label
*> (insn
)))
156 const char *name
= LABEL_NAME (insn
);
157 basic_block bb
= BLOCK_FOR_INSN (insn
);
158 rtx_insn
*bb_note
= NEXT_INSN (insn
);
160 really_delete
= false;
161 PUT_CODE (insn
, NOTE
);
162 NOTE_KIND (insn
) = NOTE_INSN_DELETED_LABEL
;
163 NOTE_DELETED_LABEL_NAME (insn
) = name
;
165 /* If the note following the label starts a basic block, and the
166 label is a member of the same basic block, interchange the two. */
167 if (bb_note
!= NULL_RTX
168 && NOTE_INSN_BASIC_BLOCK_P (bb_note
)
170 && bb
== BLOCK_FOR_INSN (bb_note
))
172 reorder_insns_nobb (insn
, insn
, bb_note
);
173 BB_HEAD (bb
) = bb_note
;
174 if (BB_END (bb
) == bb_note
)
179 remove_node_from_insn_list (insn
, &nonlocal_goto_handler_labels
);
184 /* If this insn has already been deleted, something is very wrong. */
185 gcc_assert (!insn
->deleted ());
187 df_insn_delete (insn
);
189 insn
->set_deleted ();
192 /* If deleting a jump, decrement the use count of the label. Deleting
193 the label itself should happen in the normal course of block merging. */
196 if (JUMP_LABEL (insn
)
197 && LABEL_P (JUMP_LABEL (insn
)))
198 LABEL_NUSES (JUMP_LABEL (insn
))--;
200 /* If there are more targets, remove them too. */
202 = find_reg_note (insn
, REG_LABEL_TARGET
, NULL_RTX
)) != NULL_RTX
203 && LABEL_P (XEXP (note
, 0)))
205 LABEL_NUSES (XEXP (note
, 0))--;
206 remove_note (insn
, note
);
210 /* Also if deleting any insn that references a label as an operand. */
211 while ((note
= find_reg_note (insn
, REG_LABEL_OPERAND
, NULL_RTX
)) != NULL_RTX
212 && LABEL_P (XEXP (note
, 0)))
214 LABEL_NUSES (XEXP (note
, 0))--;
215 remove_note (insn
, note
);
218 if (rtx_jump_table_data
*table
= dyn_cast
<rtx_jump_table_data
*> (insn
))
220 rtvec vec
= table
->get_labels ();
221 int len
= GET_NUM_ELEM (vec
);
224 for (i
= 0; i
< len
; i
++)
226 rtx label
= XEXP (RTVEC_ELT (vec
, i
), 0);
228 /* When deleting code in bulk (e.g. removing many unreachable
229 blocks) we can delete a label that's a target of the vector
230 before deleting the vector itself. */
232 LABEL_NUSES (label
)--;
237 /* Like delete_insn but also purge dead edges from BB. */
240 delete_insn_and_edges (rtx_insn
*insn
)
245 && BLOCK_FOR_INSN (insn
)
246 && BB_END (BLOCK_FOR_INSN (insn
)) == insn
)
250 purge_dead_edges (BLOCK_FOR_INSN (insn
));
253 /* Unlink a chain of insns between START and FINISH, leaving notes
254 that must be paired. If CLEAR_BB is true, we set bb field for
255 insns that cannot be removed to NULL. */
258 delete_insn_chain (rtx start
, rtx finish
, bool clear_bb
)
260 rtx_insn
*prev
, *current
;
262 /* Unchain the insns one by one. It would be quicker to delete all of these
263 with a single unchaining, rather than one at a time, but we need to keep
265 current
= safe_as_a
<rtx_insn
*> (finish
);
268 prev
= PREV_INSN (current
);
269 if (NOTE_P (current
) && !can_delete_note_p (as_a
<rtx_note
*> (current
)))
272 delete_insn (current
);
274 if (clear_bb
&& !current
->deleted ())
275 set_block_for_insn (current
, NULL
);
277 if (current
== start
)
283 /* Create a new basic block consisting of the instructions between HEAD and END
284 inclusive. This function is designed to allow fast BB construction - reuses
285 the note and basic block struct in BB_NOTE, if any and do not grow
286 BASIC_BLOCK chain and should be used directly only by CFG construction code.
287 END can be NULL in to create new empty basic block before HEAD. Both END
288 and HEAD can be NULL to create basic block at the end of INSN chain.
289 AFTER is the basic block we should be put after. */
292 create_basic_block_structure (rtx_insn
*head
, rtx_insn
*end
, rtx_note
*bb_note
,
298 && (bb
= NOTE_BASIC_BLOCK (bb_note
)) != NULL
301 /* If we found an existing note, thread it back onto the chain. */
309 after
= PREV_INSN (head
);
313 if (after
!= bb_note
&& NEXT_INSN (after
) != bb_note
)
314 reorder_insns_nobb (bb_note
, bb_note
, after
);
318 /* Otherwise we must create a note and a basic block structure. */
322 init_rtl_bb_info (bb
);
325 = emit_note_after (NOTE_INSN_BASIC_BLOCK
, get_last_insn ());
326 else if (LABEL_P (head
) && end
)
328 bb_note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, head
);
334 bb_note
= emit_note_before (NOTE_INSN_BASIC_BLOCK
, head
);
340 NOTE_BASIC_BLOCK (bb_note
) = bb
;
343 /* Always include the bb note in the block. */
344 if (NEXT_INSN (end
) == bb_note
)
349 bb
->index
= last_basic_block_for_fn (cfun
)++;
350 bb
->flags
= BB_NEW
| BB_RTL
;
351 link_block (bb
, after
);
352 SET_BASIC_BLOCK_FOR_FN (cfun
, bb
->index
, bb
);
353 df_bb_refs_record (bb
->index
, false);
354 update_bb_for_insn (bb
);
355 BB_SET_PARTITION (bb
, BB_UNPARTITIONED
);
357 /* Tag the block so that we know it has been used when considering
358 other basic block notes. */
364 /* Create new basic block consisting of instructions in between HEAD and END
365 and place it to the BB chain after block AFTER. END can be NULL to
366 create a new empty basic block before HEAD. Both END and HEAD can be
367 NULL to create basic block at the end of INSN chain. */
370 rtl_create_basic_block (void *headp
, void *endp
, basic_block after
)
372 rtx_insn
*head
= (rtx_insn
*) headp
;
373 rtx_insn
*end
= (rtx_insn
*) endp
;
376 /* Grow the basic block array if needed. */
377 if ((size_t) last_basic_block_for_fn (cfun
)
378 >= basic_block_info_for_fn (cfun
)->length ())
381 (last_basic_block_for_fn (cfun
)
382 + (last_basic_block_for_fn (cfun
) + 3) / 4);
383 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
), new_size
);
386 n_basic_blocks_for_fn (cfun
)++;
388 bb
= create_basic_block_structure (head
, end
, NULL
, after
);
394 cfg_layout_create_basic_block (void *head
, void *end
, basic_block after
)
396 basic_block newbb
= rtl_create_basic_block (head
, end
, after
);
401 /* Delete the insns in a (non-live) block. We physically delete every
402 non-deleted-note insn, and update the flow graph appropriately.
404 Return nonzero if we deleted an exception handler. */
406 /* ??? Preserving all such notes strikes me as wrong. It would be nice
407 to post-process the stream to remove empty blocks, loops, ranges, etc. */
410 rtl_delete_block (basic_block b
)
412 rtx_insn
*insn
, *end
;
414 /* If the head of this block is a CODE_LABEL, then it might be the
415 label for an exception handler which can't be reached. We need
416 to remove the label from the exception_handler_label list. */
419 end
= get_last_bb_insn (b
);
421 /* Selectively delete the entire chain. */
423 delete_insn_chain (insn
, end
, true);
427 fprintf (dump_file
, "deleting block %d\n", b
->index
);
428 df_bb_delete (b
->index
);
431 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
434 compute_bb_for_insn (void)
438 FOR_EACH_BB_FN (bb
, cfun
)
440 rtx_insn
*end
= BB_END (bb
);
443 for (insn
= BB_HEAD (bb
); ; insn
= NEXT_INSN (insn
))
445 BLOCK_FOR_INSN (insn
) = bb
;
452 /* Release the basic_block_for_insn array. */
455 free_bb_for_insn (void)
458 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
459 if (!BARRIER_P (insn
))
460 BLOCK_FOR_INSN (insn
) = NULL
;
466 const pass_data pass_data_free_cfg
=
469 "*free_cfg", /* name */
470 OPTGROUP_NONE
, /* optinfo_flags */
472 0, /* properties_required */
473 0, /* properties_provided */
474 PROP_cfg
, /* properties_destroyed */
475 0, /* todo_flags_start */
476 0, /* todo_flags_finish */
479 class pass_free_cfg
: public rtl_opt_pass
482 pass_free_cfg (gcc::context
*ctxt
)
483 : rtl_opt_pass (pass_data_free_cfg
, ctxt
)
486 /* opt_pass methods: */
487 virtual unsigned int execute (function
*);
489 }; // class pass_free_cfg
492 pass_free_cfg::execute (function
*)
495 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
496 valid at that point so it would be too late to call df_analyze. */
497 if (optimize
> 0 && flag_delayed_branch
)
499 df_note_add_problem ();
504 if (crtl
->has_bb_partition
)
505 insert_section_boundary_note ();
514 make_pass_free_cfg (gcc::context
*ctxt
)
516 return new pass_free_cfg (ctxt
);
519 /* Return RTX to emit after when we want to emit code on the entry of function. */
521 entry_of_function (void)
523 return (n_basic_blocks_for_fn (cfun
) > NUM_FIXED_BLOCKS
?
524 BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
) : get_insns ());
527 /* Emit INSN at the entry point of the function, ensuring that it is only
528 executed once per function. */
530 emit_insn_at_entry (rtx insn
)
532 edge_iterator ei
= ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
);
533 edge e
= ei_safe_edge (ei
);
534 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
536 insert_insn_on_edge (insn
, e
);
537 commit_edge_insertions ();
540 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
541 (or BARRIER if found) and notify df of the bb change.
542 The insn chain range is inclusive
543 (i.e. both BEGIN and END will be updated. */
546 update_bb_for_insn_chain (rtx_insn
*begin
, rtx_insn
*end
, basic_block bb
)
550 end
= NEXT_INSN (end
);
551 for (insn
= begin
; insn
!= end
; insn
= NEXT_INSN (insn
))
552 if (!BARRIER_P (insn
))
553 df_insn_change_bb (insn
, bb
);
556 /* Update BLOCK_FOR_INSN of insns in BB to BB,
557 and notify df of the change. */
560 update_bb_for_insn (basic_block bb
)
562 update_bb_for_insn_chain (BB_HEAD (bb
), BB_END (bb
), bb
);
566 /* Like active_insn_p, except keep the return value clobber around
567 even after reload. */
570 flow_active_insn_p (const rtx_insn
*insn
)
572 if (active_insn_p (insn
))
575 /* A clobber of the function return value exists for buggy
576 programs that fail to return a value. Its effect is to
577 keep the return value from being live across the entire
578 function. If we allow it to be skipped, we introduce the
579 possibility for register lifetime confusion. */
580 if (GET_CODE (PATTERN (insn
)) == CLOBBER
581 && REG_P (XEXP (PATTERN (insn
), 0))
582 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn
), 0)))
588 /* Return true if the block has no effect and only forwards control flow to
589 its single destination. */
592 contains_no_active_insn_p (const_basic_block bb
)
596 if (bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
) || bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
)
597 || !single_succ_p (bb
))
600 for (insn
= BB_HEAD (bb
); insn
!= BB_END (bb
); insn
= NEXT_INSN (insn
))
601 if (INSN_P (insn
) && flow_active_insn_p (insn
))
604 return (!INSN_P (insn
)
605 || (JUMP_P (insn
) && simplejump_p (insn
))
606 || !flow_active_insn_p (insn
));
609 /* Likewise, but protect loop latches, headers and preheaders. */
610 /* FIXME: Make this a cfg hook. */
613 forwarder_block_p (const_basic_block bb
)
615 if (!contains_no_active_insn_p (bb
))
618 /* Protect loop latches, headers and preheaders. */
622 if (bb
->loop_father
->header
== bb
)
624 dest
= EDGE_SUCC (bb
, 0)->dest
;
625 if (dest
->loop_father
->header
== dest
)
632 /* Return nonzero if we can reach target from src by falling through. */
633 /* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode. */
636 can_fallthru (basic_block src
, basic_block target
)
638 rtx_insn
*insn
= BB_END (src
);
643 if (target
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
645 if (src
->next_bb
!= target
)
648 /* ??? Later we may add code to move jump tables offline. */
649 if (tablejump_p (insn
, NULL
, NULL
))
652 FOR_EACH_EDGE (e
, ei
, src
->succs
)
653 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
654 && e
->flags
& EDGE_FALLTHRU
)
657 insn2
= BB_HEAD (target
);
658 if (!active_insn_p (insn2
))
659 insn2
= next_active_insn (insn2
);
661 return next_active_insn (insn
) == insn2
;
664 /* Return nonzero if we could reach target from src by falling through,
665 if the target was made adjacent. If we already have a fall-through
666 edge to the exit block, we can't do that. */
668 could_fall_through (basic_block src
, basic_block target
)
673 if (target
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
675 FOR_EACH_EDGE (e
, ei
, src
->succs
)
676 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
677 && e
->flags
& EDGE_FALLTHRU
)
682 /* Return the NOTE_INSN_BASIC_BLOCK of BB. */
684 bb_note (basic_block bb
)
690 note
= NEXT_INSN (note
);
692 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note
));
693 return as_a
<rtx_note
*> (note
);
696 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
697 note associated with the BLOCK. */
700 first_insn_after_basic_block_note (basic_block block
)
704 /* Get the first instruction in the block. */
705 insn
= BB_HEAD (block
);
707 if (insn
== NULL_RTX
)
710 insn
= NEXT_INSN (insn
);
711 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn
));
713 return NEXT_INSN (insn
);
716 /* Creates a new basic block just after basic block BB by splitting
717 everything after specified instruction INSNP. */
720 rtl_split_block (basic_block bb
, void *insnp
)
723 rtx_insn
*insn
= (rtx_insn
*) insnp
;
729 insn
= first_insn_after_basic_block_note (bb
);
733 rtx_insn
*next
= insn
;
735 insn
= PREV_INSN (insn
);
737 /* If the block contains only debug insns, insn would have
738 been NULL in a non-debug compilation, and then we'd end
739 up emitting a DELETED note. For -fcompare-debug
740 stability, emit the note too. */
741 if (insn
!= BB_END (bb
)
742 && DEBUG_INSN_P (next
)
743 && DEBUG_INSN_P (BB_END (bb
)))
745 while (next
!= BB_END (bb
) && DEBUG_INSN_P (next
))
746 next
= NEXT_INSN (next
);
748 if (next
== BB_END (bb
))
749 emit_note_after (NOTE_INSN_DELETED
, next
);
753 insn
= get_last_insn ();
756 /* We probably should check type of the insn so that we do not create
757 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
759 if (insn
== BB_END (bb
))
760 emit_note_after (NOTE_INSN_DELETED
, insn
);
762 /* Create the new basic block. */
763 new_bb
= create_basic_block (NEXT_INSN (insn
), BB_END (bb
), bb
);
764 BB_COPY_PARTITION (new_bb
, bb
);
767 /* Redirect the outgoing edges. */
768 new_bb
->succs
= bb
->succs
;
770 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
773 /* The new block starts off being dirty. */
774 df_set_bb_dirty (bb
);
778 /* Return true if the single edge between blocks A and B is the only place
779 in RTL which holds some unique locus. */
782 unique_locus_on_edge_between_p (basic_block a
, basic_block b
)
784 const location_t goto_locus
= EDGE_SUCC (a
, 0)->goto_locus
;
785 rtx_insn
*insn
, *end
;
787 if (LOCATION_LOCUS (goto_locus
) == UNKNOWN_LOCATION
)
790 /* First scan block A backward. */
792 end
= PREV_INSN (BB_HEAD (a
));
793 while (insn
!= end
&& (!NONDEBUG_INSN_P (insn
) || !INSN_HAS_LOCATION (insn
)))
794 insn
= PREV_INSN (insn
);
796 if (insn
!= end
&& INSN_LOCATION (insn
) == goto_locus
)
799 /* Then scan block B forward. */
803 end
= NEXT_INSN (BB_END (b
));
804 while (insn
!= end
&& !NONDEBUG_INSN_P (insn
))
805 insn
= NEXT_INSN (insn
);
807 if (insn
!= end
&& INSN_HAS_LOCATION (insn
)
808 && INSN_LOCATION (insn
) == goto_locus
)
815 /* If the single edge between blocks A and B is the only place in RTL which
816 holds some unique locus, emit a nop with that locus between the blocks. */
819 emit_nop_for_unique_locus_between (basic_block a
, basic_block b
)
821 if (!unique_locus_on_edge_between_p (a
, b
))
824 BB_END (a
) = emit_insn_after_noloc (gen_nop (), BB_END (a
), a
);
825 INSN_LOCATION (BB_END (a
)) = EDGE_SUCC (a
, 0)->goto_locus
;
828 /* Blocks A and B are to be merged into a single block A. The insns
829 are already contiguous. */
832 rtl_merge_blocks (basic_block a
, basic_block b
)
834 rtx_insn
*b_head
= BB_HEAD (b
), *b_end
= BB_END (b
), *a_end
= BB_END (a
);
835 rtx_insn
*del_first
= NULL
, *del_last
= NULL
;
836 rtx_insn
*b_debug_start
= b_end
, *b_debug_end
= b_end
;
837 bool forwarder_p
= (b
->flags
& BB_FORWARDER_BLOCK
) != 0;
841 fprintf (dump_file
, "Merging block %d into block %d...\n", b
->index
,
844 while (DEBUG_INSN_P (b_end
))
845 b_end
= PREV_INSN (b_debug_start
= b_end
);
847 /* If there was a CODE_LABEL beginning B, delete it. */
848 if (LABEL_P (b_head
))
850 /* Detect basic blocks with nothing but a label. This can happen
851 in particular at the end of a function. */
855 del_first
= del_last
= b_head
;
856 b_head
= NEXT_INSN (b_head
);
859 /* Delete the basic block note and handle blocks containing just that
861 if (NOTE_INSN_BASIC_BLOCK_P (b_head
))
869 b_head
= NEXT_INSN (b_head
);
872 /* If there was a jump out of A, delete it. */
877 for (prev
= PREV_INSN (a_end
); ; prev
= PREV_INSN (prev
))
879 || NOTE_INSN_BASIC_BLOCK_P (prev
)
880 || prev
== BB_HEAD (a
))
886 /* If this was a conditional jump, we need to also delete
887 the insn that set cc0. */
888 if (only_sets_cc0_p (prev
))
890 rtx_insn
*tmp
= prev
;
892 prev
= prev_nonnote_insn (prev
);
899 a_end
= PREV_INSN (del_first
);
901 else if (BARRIER_P (NEXT_INSN (a_end
)))
902 del_first
= NEXT_INSN (a_end
);
904 /* Delete everything marked above as well as crap that might be
905 hanging out between the two blocks. */
907 BB_HEAD (b
) = b_empty
? NULL
: b_head
;
908 delete_insn_chain (del_first
, del_last
, true);
910 /* When not optimizing and the edge is the only place in RTL which holds
911 some unique locus, emit a nop with that locus in between. */
914 emit_nop_for_unique_locus_between (a
, b
);
918 /* Reassociate the insns of B with A. */
921 update_bb_for_insn_chain (a_end
, b_debug_end
, a
);
923 BB_END (a
) = b_debug_end
;
926 else if (b_end
!= b_debug_end
)
928 /* Move any deleted labels and other notes between the end of A
929 and the debug insns that make up B after the debug insns,
930 bringing the debug insns into A while keeping the notes after
932 if (NEXT_INSN (a_end
) != b_debug_start
)
933 reorder_insns_nobb (NEXT_INSN (a_end
), PREV_INSN (b_debug_start
),
935 update_bb_for_insn_chain (b_debug_start
, b_debug_end
, a
);
936 BB_END (a
) = b_debug_end
;
939 df_bb_delete (b
->index
);
941 /* If B was a forwarder block, propagate the locus on the edge. */
943 && LOCATION_LOCUS (EDGE_SUCC (b
, 0)->goto_locus
) == UNKNOWN_LOCATION
)
944 EDGE_SUCC (b
, 0)->goto_locus
= EDGE_SUCC (a
, 0)->goto_locus
;
947 fprintf (dump_file
, "Merged blocks %d and %d.\n", a
->index
, b
->index
);
951 /* Return true when block A and B can be merged. */
954 rtl_can_merge_blocks (basic_block a
, basic_block b
)
956 /* If we are partitioning hot/cold basic blocks, we don't want to
957 mess up unconditional or indirect jumps that cross between hot
960 Basic block partitioning may result in some jumps that appear to
961 be optimizable (or blocks that appear to be mergeable), but which really
962 must be left untouched (they are required to make it safely across
963 partition boundaries). See the comments at the top of
964 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
966 if (BB_PARTITION (a
) != BB_PARTITION (b
))
969 /* Protect the loop latches. */
970 if (current_loops
&& b
->loop_father
->latch
== b
)
973 /* There must be exactly one edge in between the blocks. */
974 return (single_succ_p (a
)
975 && single_succ (a
) == b
978 /* Must be simple edge. */
979 && !(single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
981 && a
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
982 && b
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
983 /* If the jump insn has side effects,
984 we can't kill the edge. */
985 && (!JUMP_P (BB_END (a
))
987 ? simplejump_p (BB_END (a
)) : onlyjump_p (BB_END (a
)))));
990 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
994 block_label (basic_block block
)
996 if (block
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
999 if (!LABEL_P (BB_HEAD (block
)))
1001 BB_HEAD (block
) = emit_label_before (gen_label_rtx (), BB_HEAD (block
));
1004 return BB_HEAD (block
);
1007 /* Attempt to perform edge redirection by replacing possibly complex jump
1008 instruction by unconditional jump or removing jump completely. This can
1009 apply only if all edges now point to the same block. The parameters and
1010 return values are equivalent to redirect_edge_and_branch. */
1013 try_redirect_by_replacing_jump (edge e
, basic_block target
, bool in_cfglayout
)
1015 basic_block src
= e
->src
;
1016 rtx_insn
*insn
= BB_END (src
), *kill_from
;
1020 /* If we are partitioning hot/cold basic blocks, we don't want to
1021 mess up unconditional or indirect jumps that cross between hot
1024 Basic block partitioning may result in some jumps that appear to
1025 be optimizable (or blocks that appear to be mergeable), but which really
1026 must be left untouched (they are required to make it safely across
1027 partition boundaries). See the comments at the top of
1028 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
1030 if (BB_PARTITION (src
) != BB_PARTITION (target
))
1033 /* We can replace or remove a complex jump only when we have exactly
1034 two edges. Also, if we have exactly one outgoing edge, we can
1036 if (EDGE_COUNT (src
->succs
) >= 3
1037 /* Verify that all targets will be TARGET. Specifically, the
1038 edge that is not E must also go to TARGET. */
1039 || (EDGE_COUNT (src
->succs
) == 2
1040 && EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
))
1043 if (!onlyjump_p (insn
))
1045 if ((!optimize
|| reload_completed
) && tablejump_p (insn
, NULL
, NULL
))
1048 /* Avoid removing branch with side effects. */
1049 set
= single_set (insn
);
1050 if (!set
|| side_effects_p (set
))
1053 /* In case we zap a conditional jump, we'll need to kill
1054 the cc0 setter too. */
1057 if (reg_mentioned_p (cc0_rtx
, PATTERN (insn
))
1058 && only_sets_cc0_p (PREV_INSN (insn
)))
1059 kill_from
= PREV_INSN (insn
);
1062 /* See if we can create the fallthru edge. */
1063 if (in_cfglayout
|| can_fallthru (src
, target
))
1066 fprintf (dump_file
, "Removing jump %i.\n", INSN_UID (insn
));
1069 /* Selectively unlink whole insn chain. */
1072 rtx_insn
*insn
= BB_FOOTER (src
);
1074 delete_insn_chain (kill_from
, BB_END (src
), false);
1076 /* Remove barriers but keep jumptables. */
1079 if (BARRIER_P (insn
))
1081 if (PREV_INSN (insn
))
1082 SET_NEXT_INSN (PREV_INSN (insn
)) = NEXT_INSN (insn
);
1084 BB_FOOTER (src
) = NEXT_INSN (insn
);
1085 if (NEXT_INSN (insn
))
1086 SET_PREV_INSN (NEXT_INSN (insn
)) = PREV_INSN (insn
);
1090 insn
= NEXT_INSN (insn
);
1094 delete_insn_chain (kill_from
, PREV_INSN (BB_HEAD (target
)),
1098 /* If this already is simplejump, redirect it. */
1099 else if (simplejump_p (insn
))
1101 if (e
->dest
== target
)
1104 fprintf (dump_file
, "Redirecting jump %i from %i to %i.\n",
1105 INSN_UID (insn
), e
->dest
->index
, target
->index
);
1106 if (!redirect_jump (insn
, block_label (target
), 0))
1108 gcc_assert (target
== EXIT_BLOCK_PTR_FOR_FN (cfun
));
1113 /* Cannot do anything for target exit block. */
1114 else if (target
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1117 /* Or replace possibly complicated jump insn by simple jump insn. */
1120 rtx target_label
= block_label (target
);
1123 rtx_jump_table_data
*table
;
1125 emit_jump_insn_after_noloc (gen_jump (target_label
), insn
);
1126 JUMP_LABEL (BB_END (src
)) = target_label
;
1127 LABEL_NUSES (target_label
)++;
1129 fprintf (dump_file
, "Replacing insn %i by jump %i\n",
1130 INSN_UID (insn
), INSN_UID (BB_END (src
)));
1133 delete_insn_chain (kill_from
, insn
, false);
1135 /* Recognize a tablejump that we are converting to a
1136 simple jump and remove its associated CODE_LABEL
1137 and ADDR_VEC or ADDR_DIFF_VEC. */
1138 if (tablejump_p (insn
, &label
, &table
))
1139 delete_insn_chain (label
, table
, false);
1141 barrier
= next_nonnote_insn (BB_END (src
));
1142 if (!barrier
|| !BARRIER_P (barrier
))
1143 emit_barrier_after (BB_END (src
));
1146 if (barrier
!= NEXT_INSN (BB_END (src
)))
1148 /* Move the jump before barrier so that the notes
1149 which originally were or were created before jump table are
1150 inside the basic block. */
1151 rtx_insn
*new_insn
= BB_END (src
);
1153 update_bb_for_insn_chain (NEXT_INSN (BB_END (src
)),
1154 PREV_INSN (barrier
), src
);
1156 SET_NEXT_INSN (PREV_INSN (new_insn
)) = NEXT_INSN (new_insn
);
1157 SET_PREV_INSN (NEXT_INSN (new_insn
)) = PREV_INSN (new_insn
);
1159 SET_NEXT_INSN (new_insn
) = barrier
;
1160 SET_NEXT_INSN (PREV_INSN (barrier
)) = new_insn
;
1162 SET_PREV_INSN (new_insn
) = PREV_INSN (barrier
);
1163 SET_PREV_INSN (barrier
) = new_insn
;
1168 /* Keep only one edge out and set proper flags. */
1169 if (!single_succ_p (src
))
1171 gcc_assert (single_succ_p (src
));
1173 e
= single_succ_edge (src
);
1175 e
->flags
= EDGE_FALLTHRU
;
1179 e
->probability
= REG_BR_PROB_BASE
;
1180 e
->count
= src
->count
;
1182 if (e
->dest
!= target
)
1183 redirect_edge_succ (e
, target
);
1187 /* Subroutine of redirect_branch_edge that tries to patch the jump
1188 instruction INSN so that it reaches block NEW. Do this
1189 only when it originally reached block OLD. Return true if this
1190 worked or the original target wasn't OLD, return false if redirection
1194 patch_jump_insn (rtx_insn
*insn
, rtx_insn
*old_label
, basic_block new_bb
)
1196 rtx_jump_table_data
*table
;
1198 /* Recognize a tablejump and adjust all matching cases. */
1199 if (tablejump_p (insn
, NULL
, &table
))
1203 rtx new_label
= block_label (new_bb
);
1205 if (new_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1207 vec
= table
->get_labels ();
1209 for (j
= GET_NUM_ELEM (vec
) - 1; j
>= 0; --j
)
1210 if (XEXP (RTVEC_ELT (vec
, j
), 0) == old_label
)
1212 RTVEC_ELT (vec
, j
) = gen_rtx_LABEL_REF (Pmode
, new_label
);
1213 --LABEL_NUSES (old_label
);
1214 ++LABEL_NUSES (new_label
);
1217 /* Handle casesi dispatch insns. */
1218 if ((tmp
= single_set (insn
)) != NULL
1219 && SET_DEST (tmp
) == pc_rtx
1220 && GET_CODE (SET_SRC (tmp
)) == IF_THEN_ELSE
1221 && GET_CODE (XEXP (SET_SRC (tmp
), 2)) == LABEL_REF
1222 && LABEL_REF_LABEL (XEXP (SET_SRC (tmp
), 2)) == old_label
)
1224 XEXP (SET_SRC (tmp
), 2) = gen_rtx_LABEL_REF (Pmode
,
1226 --LABEL_NUSES (old_label
);
1227 ++LABEL_NUSES (new_label
);
1230 else if ((tmp
= extract_asm_operands (PATTERN (insn
))) != NULL
)
1232 int i
, n
= ASM_OPERANDS_LABEL_LENGTH (tmp
);
1233 rtx new_label
, note
;
1235 if (new_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1237 new_label
= block_label (new_bb
);
1239 for (i
= 0; i
< n
; ++i
)
1241 rtx old_ref
= ASM_OPERANDS_LABEL (tmp
, i
);
1242 gcc_assert (GET_CODE (old_ref
) == LABEL_REF
);
1243 if (XEXP (old_ref
, 0) == old_label
)
1245 ASM_OPERANDS_LABEL (tmp
, i
)
1246 = gen_rtx_LABEL_REF (Pmode
, new_label
);
1247 --LABEL_NUSES (old_label
);
1248 ++LABEL_NUSES (new_label
);
1252 if (JUMP_LABEL (insn
) == old_label
)
1254 JUMP_LABEL (insn
) = new_label
;
1255 note
= find_reg_note (insn
, REG_LABEL_TARGET
, new_label
);
1257 remove_note (insn
, note
);
1261 note
= find_reg_note (insn
, REG_LABEL_TARGET
, old_label
);
1263 remove_note (insn
, note
);
1264 if (JUMP_LABEL (insn
) != new_label
1265 && !find_reg_note (insn
, REG_LABEL_TARGET
, new_label
))
1266 add_reg_note (insn
, REG_LABEL_TARGET
, new_label
);
1268 while ((note
= find_reg_note (insn
, REG_LABEL_OPERAND
, old_label
))
1270 XEXP (note
, 0) = new_label
;
1274 /* ?? We may play the games with moving the named labels from
1275 one basic block to the other in case only one computed_jump is
1277 if (computed_jump_p (insn
)
1278 /* A return instruction can't be redirected. */
1279 || returnjump_p (insn
))
1282 if (!currently_expanding_to_rtl
|| JUMP_LABEL (insn
) == old_label
)
1284 /* If the insn doesn't go where we think, we're confused. */
1285 gcc_assert (JUMP_LABEL (insn
) == old_label
);
1287 /* If the substitution doesn't succeed, die. This can happen
1288 if the back end emitted unrecognizable instructions or if
1289 target is exit block on some arches. */
1290 if (!redirect_jump (insn
, block_label (new_bb
), 0))
1292 gcc_assert (new_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
));
1301 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1304 redirect_branch_edge (edge e
, basic_block target
)
1306 rtx_insn
*old_label
= BB_HEAD (e
->dest
);
1307 basic_block src
= e
->src
;
1308 rtx_insn
*insn
= BB_END (src
);
1310 /* We can only redirect non-fallthru edges of jump insn. */
1311 if (e
->flags
& EDGE_FALLTHRU
)
1313 else if (!JUMP_P (insn
) && !currently_expanding_to_rtl
)
1316 if (!currently_expanding_to_rtl
)
1318 if (!patch_jump_insn (insn
, old_label
, target
))
1322 /* When expanding this BB might actually contain multiple
1323 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1324 Redirect all of those that match our label. */
1325 FOR_BB_INSNS (src
, insn
)
1326 if (JUMP_P (insn
) && !patch_jump_insn (insn
, old_label
, target
))
1330 fprintf (dump_file
, "Edge %i->%i redirected to %i\n",
1331 e
->src
->index
, e
->dest
->index
, target
->index
);
1333 if (e
->dest
!= target
)
1334 e
= redirect_edge_succ_nodup (e
, target
);
1339 /* Called when edge E has been redirected to a new destination,
1340 in order to update the region crossing flag on the edge and
1344 fixup_partition_crossing (edge e
)
1346 if (e
->src
== ENTRY_BLOCK_PTR_FOR_FN (cfun
) || e
->dest
1347 == EXIT_BLOCK_PTR_FOR_FN (cfun
))
1349 /* If we redirected an existing edge, it may already be marked
1350 crossing, even though the new src is missing a reg crossing note.
1351 But make sure reg crossing note doesn't already exist before
1353 if (BB_PARTITION (e
->src
) != BB_PARTITION (e
->dest
))
1355 e
->flags
|= EDGE_CROSSING
;
1356 if (JUMP_P (BB_END (e
->src
))
1357 && !CROSSING_JUMP_P (BB_END (e
->src
)))
1358 CROSSING_JUMP_P (BB_END (e
->src
)) = 1;
1360 else if (BB_PARTITION (e
->src
) == BB_PARTITION (e
->dest
))
1362 e
->flags
&= ~EDGE_CROSSING
;
1363 /* Remove the section crossing note from jump at end of
1364 src if it exists, and if no other successors are
1366 if (JUMP_P (BB_END (e
->src
)) && CROSSING_JUMP_P (BB_END (e
->src
)))
1368 bool has_crossing_succ
= false;
1371 FOR_EACH_EDGE (e2
, ei
, e
->src
->succs
)
1373 has_crossing_succ
|= (e2
->flags
& EDGE_CROSSING
);
1374 if (has_crossing_succ
)
1377 if (!has_crossing_succ
)
1378 CROSSING_JUMP_P (BB_END (e
->src
)) = 0;
1383 /* Called when block BB has been reassigned to the cold partition,
1384 because it is now dominated by another cold block,
1385 to ensure that the region crossing attributes are updated. */
1388 fixup_new_cold_bb (basic_block bb
)
1393 /* This is called when a hot bb is found to now be dominated
1394 by a cold bb and therefore needs to become cold. Therefore,
1395 its preds will no longer be region crossing. Any non-dominating
1396 preds that were previously hot would also have become cold
1397 in the caller for the same region. Any preds that were previously
1398 region-crossing will be adjusted in fixup_partition_crossing. */
1399 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1401 fixup_partition_crossing (e
);
1404 /* Possibly need to make bb's successor edges region crossing,
1405 or remove stale region crossing. */
1406 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1408 /* We can't have fall-through edges across partition boundaries.
1409 Note that force_nonfallthru will do any necessary partition
1410 boundary fixup by calling fixup_partition_crossing itself. */
1411 if ((e
->flags
& EDGE_FALLTHRU
)
1412 && BB_PARTITION (bb
) != BB_PARTITION (e
->dest
)
1413 && e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
1414 force_nonfallthru (e
);
1416 fixup_partition_crossing (e
);
1420 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
1421 expense of adding new instructions or reordering basic blocks.
1423 Function can be also called with edge destination equivalent to the TARGET.
1424 Then it should try the simplifications and do nothing if none is possible.
1426 Return edge representing the branch if transformation succeeded. Return NULL
1428 We still return NULL in case E already destinated TARGET and we didn't
1429 managed to simplify instruction stream. */
1432 rtl_redirect_edge_and_branch (edge e
, basic_block target
)
1435 basic_block src
= e
->src
;
1436 basic_block dest
= e
->dest
;
1438 if (e
->flags
& (EDGE_ABNORMAL_CALL
| EDGE_EH
))
1444 if ((ret
= try_redirect_by_replacing_jump (e
, target
, false)) != NULL
)
1446 df_set_bb_dirty (src
);
1447 fixup_partition_crossing (ret
);
1451 ret
= redirect_branch_edge (e
, target
);
1455 df_set_bb_dirty (src
);
1456 fixup_partition_crossing (ret
);
1460 /* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode. */
1463 emit_barrier_after_bb (basic_block bb
)
1465 rtx_barrier
*barrier
= emit_barrier_after (BB_END (bb
));
1466 gcc_assert (current_ir_type () == IR_RTL_CFGRTL
1467 || current_ir_type () == IR_RTL_CFGLAYOUT
);
1468 if (current_ir_type () == IR_RTL_CFGLAYOUT
)
1470 rtx_insn
*insn
= unlink_insn_chain (barrier
, barrier
);
1474 rtx_insn
*footer_tail
= BB_FOOTER (bb
);
1476 while (NEXT_INSN (footer_tail
))
1477 footer_tail
= NEXT_INSN (footer_tail
);
1478 if (!BARRIER_P (footer_tail
))
1480 SET_NEXT_INSN (footer_tail
) = insn
;
1481 SET_PREV_INSN (insn
) = footer_tail
;
1485 BB_FOOTER (bb
) = insn
;
1489 /* Like force_nonfallthru below, but additionally performs redirection
1490 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1491 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1492 simple_return_rtx, indicating which kind of returnjump to create.
1493 It should be NULL otherwise. */
1496 force_nonfallthru_and_redirect (edge e
, basic_block target
, rtx jump_label
)
1498 basic_block jump_block
, new_bb
= NULL
, src
= e
->src
;
1501 int abnormal_edge_flags
= 0;
1502 bool asm_goto_edge
= false;
1505 /* In the case the last instruction is conditional jump to the next
1506 instruction, first redirect the jump itself and then continue
1507 by creating a basic block afterwards to redirect fallthru edge. */
1508 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
1509 && e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
1510 && any_condjump_p (BB_END (e
->src
))
1511 && JUMP_LABEL (BB_END (e
->src
)) == BB_HEAD (e
->dest
))
1514 edge b
= unchecked_make_edge (e
->src
, target
, 0);
1517 redirected
= redirect_jump (BB_END (e
->src
), block_label (target
), 0);
1518 gcc_assert (redirected
);
1520 note
= find_reg_note (BB_END (e
->src
), REG_BR_PROB
, NULL_RTX
);
1523 int prob
= XINT (note
, 0);
1525 b
->probability
= prob
;
1526 /* Update this to use GCOV_COMPUTE_SCALE. */
1527 b
->count
= e
->count
* prob
/ REG_BR_PROB_BASE
;
1528 e
->probability
-= e
->probability
;
1529 e
->count
-= b
->count
;
1530 if (e
->probability
< 0)
1537 if (e
->flags
& EDGE_ABNORMAL
)
1539 /* Irritating special case - fallthru edge to the same block as abnormal
1541 We can't redirect abnormal edge, but we still can split the fallthru
1542 one and create separate abnormal edge to original destination.
1543 This allows bb-reorder to make such edge non-fallthru. */
1544 gcc_assert (e
->dest
== target
);
1545 abnormal_edge_flags
= e
->flags
& ~EDGE_FALLTHRU
;
1546 e
->flags
&= EDGE_FALLTHRU
;
1550 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
1551 if (e
->src
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
1553 /* We can't redirect the entry block. Create an empty block
1554 at the start of the function which we use to add the new
1560 basic_block bb
= create_basic_block (BB_HEAD (e
->dest
), NULL
,
1561 ENTRY_BLOCK_PTR_FOR_FN (cfun
));
1563 /* Change the existing edge's source to be the new block, and add
1564 a new edge from the entry block to the new block. */
1566 for (ei
= ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
);
1567 (tmp
= ei_safe_edge (ei
)); )
1571 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
->unordered_remove (ei
.index
);
1581 vec_safe_push (bb
->succs
, e
);
1582 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), bb
,
1587 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1588 don't point to the target or fallthru label. */
1589 if (JUMP_P (BB_END (e
->src
))
1590 && target
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
1591 && (e
->flags
& EDGE_FALLTHRU
)
1592 && (note
= extract_asm_operands (PATTERN (BB_END (e
->src
)))))
1594 int i
, n
= ASM_OPERANDS_LABEL_LENGTH (note
);
1595 bool adjust_jump_target
= false;
1597 for (i
= 0; i
< n
; ++i
)
1599 if (XEXP (ASM_OPERANDS_LABEL (note
, i
), 0) == BB_HEAD (e
->dest
))
1601 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note
, i
), 0))--;
1602 XEXP (ASM_OPERANDS_LABEL (note
, i
), 0) = block_label (target
);
1603 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note
, i
), 0))++;
1604 adjust_jump_target
= true;
1606 if (XEXP (ASM_OPERANDS_LABEL (note
, i
), 0) == BB_HEAD (target
))
1607 asm_goto_edge
= true;
1609 if (adjust_jump_target
)
1611 rtx_insn
*insn
= BB_END (e
->src
);
1613 rtx_insn
*old_label
= BB_HEAD (e
->dest
);
1614 rtx_insn
*new_label
= BB_HEAD (target
);
1616 if (JUMP_LABEL (insn
) == old_label
)
1618 JUMP_LABEL (insn
) = new_label
;
1619 note
= find_reg_note (insn
, REG_LABEL_TARGET
, new_label
);
1621 remove_note (insn
, note
);
1625 note
= find_reg_note (insn
, REG_LABEL_TARGET
, old_label
);
1627 remove_note (insn
, note
);
1628 if (JUMP_LABEL (insn
) != new_label
1629 && !find_reg_note (insn
, REG_LABEL_TARGET
, new_label
))
1630 add_reg_note (insn
, REG_LABEL_TARGET
, new_label
);
1632 while ((note
= find_reg_note (insn
, REG_LABEL_OPERAND
, old_label
))
1634 XEXP (note
, 0) = new_label
;
1638 if (EDGE_COUNT (e
->src
->succs
) >= 2 || abnormal_edge_flags
|| asm_goto_edge
)
1641 gcov_type count
= e
->count
;
1642 int probability
= e
->probability
;
1643 /* Create the new structures. */
1645 /* If the old block ended with a tablejump, skip its table
1646 by searching forward from there. Otherwise start searching
1647 forward from the last instruction of the old block. */
1648 rtx_jump_table_data
*table
;
1649 if (tablejump_p (BB_END (e
->src
), NULL
, &table
))
1652 new_head
= BB_END (e
->src
);
1653 new_head
= NEXT_INSN (new_head
);
1655 jump_block
= create_basic_block (new_head
, NULL
, e
->src
);
1656 jump_block
->count
= count
;
1657 jump_block
->frequency
= EDGE_FREQUENCY (e
);
1659 /* Make sure new block ends up in correct hot/cold section. */
1661 BB_COPY_PARTITION (jump_block
, e
->src
);
1664 new_edge
= make_edge (e
->src
, jump_block
, EDGE_FALLTHRU
);
1665 new_edge
->probability
= probability
;
1666 new_edge
->count
= count
;
1668 /* Redirect old edge. */
1669 redirect_edge_pred (e
, jump_block
);
1670 e
->probability
= REG_BR_PROB_BASE
;
1672 /* If e->src was previously region crossing, it no longer is
1673 and the reg crossing note should be removed. */
1674 fixup_partition_crossing (new_edge
);
1676 /* If asm goto has any label refs to target's label,
1677 add also edge from asm goto bb to target. */
1680 new_edge
->probability
/= 2;
1681 new_edge
->count
/= 2;
1682 jump_block
->count
/= 2;
1683 jump_block
->frequency
/= 2;
1684 new_edge
= make_edge (new_edge
->src
, target
,
1685 e
->flags
& ~EDGE_FALLTHRU
);
1686 new_edge
->probability
= probability
- probability
/ 2;
1687 new_edge
->count
= count
- count
/ 2;
1690 new_bb
= jump_block
;
1693 jump_block
= e
->src
;
1695 loc
= e
->goto_locus
;
1696 e
->flags
&= ~EDGE_FALLTHRU
;
1697 if (target
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1699 if (jump_label
== ret_rtx
)
1702 emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block
), loc
);
1709 gcc_assert (jump_label
== simple_return_rtx
);
1710 #ifdef HAVE_simple_return
1711 emit_jump_insn_after_setloc (gen_simple_return (),
1712 BB_END (jump_block
), loc
);
1717 set_return_jump_label (BB_END (jump_block
));
1721 rtx label
= block_label (target
);
1722 emit_jump_insn_after_setloc (gen_jump (label
), BB_END (jump_block
), loc
);
1723 JUMP_LABEL (BB_END (jump_block
)) = label
;
1724 LABEL_NUSES (label
)++;
1727 /* We might be in cfg layout mode, and if so, the following routine will
1728 insert the barrier correctly. */
1729 emit_barrier_after_bb (jump_block
);
1730 redirect_edge_succ_nodup (e
, target
);
1732 if (abnormal_edge_flags
)
1733 make_edge (src
, target
, abnormal_edge_flags
);
1735 df_mark_solutions_dirty ();
1736 fixup_partition_crossing (e
);
1740 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1741 (and possibly create new basic block) to make edge non-fallthru.
1742 Return newly created BB or NULL if none. */
1745 rtl_force_nonfallthru (edge e
)
1747 return force_nonfallthru_and_redirect (e
, e
->dest
, NULL_RTX
);
1750 /* Redirect edge even at the expense of creating new jump insn or
1751 basic block. Return new basic block if created, NULL otherwise.
1752 Conversion must be possible. */
1755 rtl_redirect_edge_and_branch_force (edge e
, basic_block target
)
1757 if (redirect_edge_and_branch (e
, target
)
1758 || e
->dest
== target
)
1761 /* In case the edge redirection failed, try to force it to be non-fallthru
1762 and redirect newly created simplejump. */
1763 df_set_bb_dirty (e
->src
);
1764 return force_nonfallthru_and_redirect (e
, target
, NULL_RTX
);
1767 /* The given edge should potentially be a fallthru edge. If that is in
1768 fact true, delete the jump and barriers that are in the way. */
1771 rtl_tidy_fallthru_edge (edge e
)
1774 basic_block b
= e
->src
, c
= b
->next_bb
;
1776 /* ??? In a late-running flow pass, other folks may have deleted basic
1777 blocks by nopping out blocks, leaving multiple BARRIERs between here
1778 and the target label. They ought to be chastised and fixed.
1780 We can also wind up with a sequence of undeletable labels between
1781 one block and the next.
1783 So search through a sequence of barriers, labels, and notes for
1784 the head of block C and assert that we really do fall through. */
1786 for (q
= NEXT_INSN (BB_END (b
)); q
!= BB_HEAD (c
); q
= NEXT_INSN (q
))
1790 /* Remove what will soon cease being the jump insn from the source block.
1791 If block B consisted only of this single jump, turn it into a deleted
1796 && (any_uncondjump_p (q
)
1797 || single_succ_p (b
)))
1800 rtx_jump_table_data
*table
;
1802 if (tablejump_p (q
, &label
, &table
))
1804 /* The label is likely mentioned in some instruction before
1805 the tablejump and might not be DCEd, so turn it into
1806 a note instead and move before the tablejump that is going to
1808 const char *name
= LABEL_NAME (label
);
1809 PUT_CODE (label
, NOTE
);
1810 NOTE_KIND (label
) = NOTE_INSN_DELETED_LABEL
;
1811 NOTE_DELETED_LABEL_NAME (label
) = name
;
1812 rtx_insn
*lab
= safe_as_a
<rtx_insn
*> (label
);
1813 reorder_insns (lab
, lab
, PREV_INSN (q
));
1814 delete_insn (table
);
1818 /* If this was a conditional jump, we need to also delete
1819 the insn that set cc0. */
1820 if (any_condjump_p (q
) && only_sets_cc0_p (PREV_INSN (q
)))
1827 /* Selectively unlink the sequence. */
1828 if (q
!= PREV_INSN (BB_HEAD (c
)))
1829 delete_insn_chain (NEXT_INSN (q
), PREV_INSN (BB_HEAD (c
)), false);
1831 e
->flags
|= EDGE_FALLTHRU
;
1834 /* Should move basic block BB after basic block AFTER. NIY. */
1837 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED
,
1838 basic_block after ATTRIBUTE_UNUSED
)
1843 /* Locate the last bb in the same partition as START_BB. */
1846 last_bb_in_partition (basic_block start_bb
)
1849 FOR_BB_BETWEEN (bb
, start_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), next_bb
)
1851 if (BB_PARTITION (start_bb
) != BB_PARTITION (bb
->next_bb
))
1854 /* Return bb before the exit block. */
1858 /* Split a (typically critical) edge. Return the new block.
1859 The edge must not be abnormal.
1861 ??? The code generally expects to be called on critical edges.
1862 The case of a block ending in an unconditional jump to a
1863 block with multiple predecessors is not handled optimally. */
1866 rtl_split_edge (edge edge_in
)
1868 basic_block bb
, new_bb
;
1871 /* Abnormal edges cannot be split. */
1872 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
1874 /* We are going to place the new block in front of edge destination.
1875 Avoid existence of fallthru predecessors. */
1876 if ((edge_in
->flags
& EDGE_FALLTHRU
) == 0)
1878 edge e
= find_fallthru_edge (edge_in
->dest
->preds
);
1881 force_nonfallthru (e
);
1884 /* Create the basic block note. */
1885 if (edge_in
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
1886 before
= BB_HEAD (edge_in
->dest
);
1890 /* If this is a fall through edge to the exit block, the blocks might be
1891 not adjacent, and the right place is after the source. */
1892 if ((edge_in
->flags
& EDGE_FALLTHRU
)
1893 && edge_in
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1895 before
= NEXT_INSN (BB_END (edge_in
->src
));
1896 bb
= create_basic_block (before
, NULL
, edge_in
->src
);
1897 BB_COPY_PARTITION (bb
, edge_in
->src
);
1901 if (edge_in
->src
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
1903 bb
= create_basic_block (before
, NULL
, edge_in
->dest
->prev_bb
);
1904 BB_COPY_PARTITION (bb
, edge_in
->dest
);
1908 basic_block after
= edge_in
->dest
->prev_bb
;
1909 /* If this is post-bb reordering, and the edge crosses a partition
1910 boundary, the new block needs to be inserted in the bb chain
1911 at the end of the src partition (since we put the new bb into
1912 that partition, see below). Otherwise we may end up creating
1913 an extra partition crossing in the chain, which is illegal.
1914 It can't go after the src, because src may have a fall-through
1915 to a different block. */
1916 if (crtl
->bb_reorder_complete
1917 && (edge_in
->flags
& EDGE_CROSSING
))
1919 after
= last_bb_in_partition (edge_in
->src
);
1920 before
= NEXT_INSN (BB_END (after
));
1921 /* The instruction following the last bb in partition should
1922 be a barrier, since it cannot end in a fall-through. */
1923 gcc_checking_assert (BARRIER_P (before
));
1924 before
= NEXT_INSN (before
);
1926 bb
= create_basic_block (before
, NULL
, after
);
1927 /* Put the split bb into the src partition, to avoid creating
1928 a situation where a cold bb dominates a hot bb, in the case
1929 where src is cold and dest is hot. The src will dominate
1930 the new bb (whereas it might not have dominated dest). */
1931 BB_COPY_PARTITION (bb
, edge_in
->src
);
1935 make_single_succ_edge (bb
, edge_in
->dest
, EDGE_FALLTHRU
);
1937 /* Can't allow a region crossing edge to be fallthrough. */
1938 if (BB_PARTITION (bb
) != BB_PARTITION (edge_in
->dest
)
1939 && edge_in
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
1941 new_bb
= force_nonfallthru (single_succ_edge (bb
));
1942 gcc_assert (!new_bb
);
1945 /* For non-fallthru edges, we must adjust the predecessor's
1946 jump instruction to target our new block. */
1947 if ((edge_in
->flags
& EDGE_FALLTHRU
) == 0)
1949 edge redirected
= redirect_edge_and_branch (edge_in
, bb
);
1950 gcc_assert (redirected
);
1954 if (edge_in
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
1956 /* For asm goto even splitting of fallthru edge might
1957 need insn patching, as other labels might point to the
1959 rtx_insn
*last
= BB_END (edge_in
->src
);
1962 && edge_in
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
1963 && extract_asm_operands (PATTERN (last
)) != NULL_RTX
1964 && patch_jump_insn (last
, before
, bb
))
1965 df_set_bb_dirty (edge_in
->src
);
1967 redirect_edge_succ (edge_in
, bb
);
1973 /* Queue instructions for insertion on an edge between two basic blocks.
1974 The new instructions and basic blocks (if any) will not appear in the
1975 CFG until commit_edge_insertions is called. */
1978 insert_insn_on_edge (rtx pattern
, edge e
)
1980 /* We cannot insert instructions on an abnormal critical edge.
1981 It will be easier to find the culprit if we die now. */
1982 gcc_assert (!((e
->flags
& EDGE_ABNORMAL
) && EDGE_CRITICAL_P (e
)));
1984 if (e
->insns
.r
== NULL_RTX
)
1987 push_to_sequence (e
->insns
.r
);
1989 emit_insn (pattern
);
1991 e
->insns
.r
= get_insns ();
1995 /* Update the CFG for the instructions queued on edge E. */
1998 commit_one_edge_insertion (edge e
)
2000 rtx_insn
*before
= NULL
, *after
= NULL
, *insns
, *tmp
, *last
;
2003 /* Pull the insns off the edge now since the edge might go away. */
2007 /* Figure out where to put these insns. If the destination has
2008 one predecessor, insert there. Except for the exit block. */
2009 if (single_pred_p (e
->dest
) && e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
2013 /* Get the location correct wrt a code label, and "nice" wrt
2014 a basic block note, and before everything else. */
2017 tmp
= NEXT_INSN (tmp
);
2018 if (NOTE_INSN_BASIC_BLOCK_P (tmp
))
2019 tmp
= NEXT_INSN (tmp
);
2020 if (tmp
== BB_HEAD (bb
))
2023 after
= PREV_INSN (tmp
);
2025 after
= get_last_insn ();
2028 /* If the source has one successor and the edge is not abnormal,
2029 insert there. Except for the entry block.
2030 Don't do this if the predecessor ends in a jump other than
2031 unconditional simple jump. E.g. for asm goto that points all
2032 its labels at the fallthru basic block, we can't insert instructions
2033 before the asm goto, as the asm goto can have various of side effects,
2034 and can't emit instructions after the asm goto, as it must end
2036 else if ((e
->flags
& EDGE_ABNORMAL
) == 0
2037 && single_succ_p (e
->src
)
2038 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
2039 && (!JUMP_P (BB_END (e
->src
))
2040 || simplejump_p (BB_END (e
->src
))))
2044 /* It is possible to have a non-simple jump here. Consider a target
2045 where some forms of unconditional jumps clobber a register. This
2046 happens on the fr30 for example.
2048 We know this block has a single successor, so we can just emit
2049 the queued insns before the jump. */
2050 if (JUMP_P (BB_END (bb
)))
2051 before
= BB_END (bb
);
2054 /* We'd better be fallthru, or we've lost track of what's what. */
2055 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
2057 after
= BB_END (bb
);
2061 /* Otherwise we must split the edge. */
2064 bb
= split_edge (e
);
2066 /* If E crossed a partition boundary, we needed to make bb end in
2067 a region-crossing jump, even though it was originally fallthru. */
2068 if (JUMP_P (BB_END (bb
)))
2069 before
= BB_END (bb
);
2071 after
= BB_END (bb
);
2074 /* Now that we've found the spot, do the insertion. */
2077 emit_insn_before_noloc (insns
, before
, bb
);
2078 last
= prev_nonnote_insn (before
);
2081 last
= emit_insn_after_noloc (insns
, after
, bb
);
2083 if (returnjump_p (last
))
2085 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2086 This is not currently a problem because this only happens
2087 for the (single) epilogue, which already has a fallthru edge
2090 e
= single_succ_edge (bb
);
2091 gcc_assert (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
2092 && single_succ_p (bb
) && (e
->flags
& EDGE_FALLTHRU
));
2094 e
->flags
&= ~EDGE_FALLTHRU
;
2095 emit_barrier_after (last
);
2098 delete_insn (before
);
2101 gcc_assert (!JUMP_P (last
));
2104 /* Update the CFG for all queued instructions. */
2107 commit_edge_insertions (void)
2111 /* Optimization passes that invoke this routine can cause hot blocks
2112 previously reached by both hot and cold blocks to become dominated only
2113 by cold blocks. This will cause the verification below to fail,
2114 and lead to now cold code in the hot section. In some cases this
2115 may only be visible after newly unreachable blocks are deleted,
2116 which will be done by fixup_partitions. */
2117 fixup_partitions ();
2119 #ifdef ENABLE_CHECKING
2120 verify_flow_info ();
2123 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
),
2124 EXIT_BLOCK_PTR_FOR_FN (cfun
), next_bb
)
2129 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2131 commit_one_edge_insertion (e
);
2136 /* Print out RTL-specific basic block information (live information
2137 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
2138 documented in dumpfile.h. */
2141 rtl_dump_bb (FILE *outf
, basic_block bb
, int indent
, int flags
)
2147 s_indent
= (char *) alloca ((size_t) indent
+ 1);
2148 memset (s_indent
, ' ', (size_t) indent
);
2149 s_indent
[indent
] = '\0';
2151 if (df
&& (flags
& TDF_DETAILS
))
2153 df_dump_top (bb
, outf
);
2157 if (bb
->index
!= ENTRY_BLOCK
&& bb
->index
!= EXIT_BLOCK
)
2158 for (insn
= BB_HEAD (bb
), last
= NEXT_INSN (BB_END (bb
)); insn
!= last
;
2159 insn
= NEXT_INSN (insn
))
2161 if (flags
& TDF_DETAILS
)
2162 df_dump_insn_top (insn
, outf
);
2163 if (! (flags
& TDF_SLIM
))
2164 print_rtl_single (outf
, insn
);
2166 dump_insn_slim (outf
, insn
);
2167 if (flags
& TDF_DETAILS
)
2168 df_dump_insn_bottom (insn
, outf
);
2171 if (df
&& (flags
& TDF_DETAILS
))
2173 df_dump_bottom (bb
, outf
);
2179 /* Like dump_function_to_file, but for RTL. Print out dataflow information
2180 for the start of each basic block. FLAGS are the TDF_* masks documented
2184 print_rtl_with_bb (FILE *outf
, const rtx_insn
*rtx_first
, int flags
)
2186 const rtx_insn
*tmp_rtx
;
2188 fprintf (outf
, "(nil)\n");
2191 enum bb_state
{ NOT_IN_BB
, IN_ONE_BB
, IN_MULTIPLE_BB
};
2192 int max_uid
= get_max_uid ();
2193 basic_block
*start
= XCNEWVEC (basic_block
, max_uid
);
2194 basic_block
*end
= XCNEWVEC (basic_block
, max_uid
);
2195 enum bb_state
*in_bb_p
= XCNEWVEC (enum bb_state
, max_uid
);
2198 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2199 insns, but the CFG is not maintained so the basic block info
2200 is not reliable. Therefore it's omitted from the dumps. */
2201 if (! (cfun
->curr_properties
& PROP_cfg
))
2202 flags
&= ~TDF_BLOCKS
;
2205 df_dump_start (outf
);
2207 if (flags
& TDF_BLOCKS
)
2209 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
2213 start
[INSN_UID (BB_HEAD (bb
))] = bb
;
2214 end
[INSN_UID (BB_END (bb
))] = bb
;
2215 for (x
= BB_HEAD (bb
); x
!= NULL_RTX
; x
= NEXT_INSN (x
))
2217 enum bb_state state
= IN_MULTIPLE_BB
;
2219 if (in_bb_p
[INSN_UID (x
)] == NOT_IN_BB
)
2221 in_bb_p
[INSN_UID (x
)] = state
;
2223 if (x
== BB_END (bb
))
2229 for (tmp_rtx
= rtx_first
; NULL
!= tmp_rtx
; tmp_rtx
= NEXT_INSN (tmp_rtx
))
2231 if (flags
& TDF_BLOCKS
)
2233 bb
= start
[INSN_UID (tmp_rtx
)];
2236 dump_bb_info (outf
, bb
, 0, dump_flags
| TDF_COMMENT
, true, false);
2237 if (df
&& (flags
& TDF_DETAILS
))
2238 df_dump_top (bb
, outf
);
2241 if (in_bb_p
[INSN_UID (tmp_rtx
)] == NOT_IN_BB
2242 && !NOTE_P (tmp_rtx
)
2243 && !BARRIER_P (tmp_rtx
))
2244 fprintf (outf
, ";; Insn is not within a basic block\n");
2245 else if (in_bb_p
[INSN_UID (tmp_rtx
)] == IN_MULTIPLE_BB
)
2246 fprintf (outf
, ";; Insn is in multiple basic blocks\n");
2249 if (flags
& TDF_DETAILS
)
2250 df_dump_insn_top (tmp_rtx
, outf
);
2251 if (! (flags
& TDF_SLIM
))
2252 print_rtl_single (outf
, tmp_rtx
);
2254 dump_insn_slim (outf
, tmp_rtx
);
2255 if (flags
& TDF_DETAILS
)
2256 df_dump_insn_bottom (tmp_rtx
, outf
);
2258 if (flags
& TDF_BLOCKS
)
2260 bb
= end
[INSN_UID (tmp_rtx
)];
2263 dump_bb_info (outf
, bb
, 0, dump_flags
| TDF_COMMENT
, false, true);
2264 if (df
&& (flags
& TDF_DETAILS
))
2265 df_dump_bottom (bb
, outf
);
2277 /* Update the branch probability of BB if a REG_BR_PROB is present. */
2280 update_br_prob_note (basic_block bb
)
2283 if (!JUMP_P (BB_END (bb
)))
2285 note
= find_reg_note (BB_END (bb
), REG_BR_PROB
, NULL_RTX
);
2286 if (!note
|| XINT (note
, 0) == BRANCH_EDGE (bb
)->probability
)
2288 XINT (note
, 0) = BRANCH_EDGE (bb
)->probability
;
2291 /* Get the last insn associated with block BB (that includes barriers and
2292 tablejumps after BB). */
2294 get_last_bb_insn (basic_block bb
)
2296 rtx_jump_table_data
*table
;
2298 rtx_insn
*end
= BB_END (bb
);
2300 /* Include any jump table following the basic block. */
2301 if (tablejump_p (end
, NULL
, &table
))
2304 /* Include any barriers that may follow the basic block. */
2305 tmp
= next_nonnote_insn_bb (end
);
2306 while (tmp
&& BARRIER_P (tmp
))
2309 tmp
= next_nonnote_insn_bb (end
);
2315 /* Sanity check partition hotness to ensure that basic blocks in
2316 Â the cold partition don't dominate basic blocks in the hot partition.
2317 If FLAG_ONLY is true, report violations as errors. Otherwise
2318 re-mark the dominated blocks as cold, since this is run after
2319 cfg optimizations that may make hot blocks previously reached
2320 by both hot and cold blocks now only reachable along cold paths. */
2322 static vec
<basic_block
>
2323 find_partition_fixes (bool flag_only
)
2326 vec
<basic_block
> bbs_in_cold_partition
= vNULL
;
2327 vec
<basic_block
> bbs_to_fix
= vNULL
;
2329 /* Callers check this. */
2330 gcc_checking_assert (crtl
->has_bb_partition
);
2332 FOR_EACH_BB_FN (bb
, cfun
)
2333 if ((BB_PARTITION (bb
) == BB_COLD_PARTITION
))
2334 bbs_in_cold_partition
.safe_push (bb
);
2336 if (bbs_in_cold_partition
.is_empty ())
2339 bool dom_calculated_here
= !dom_info_available_p (CDI_DOMINATORS
);
2341 if (dom_calculated_here
)
2342 calculate_dominance_info (CDI_DOMINATORS
);
2344 while (! bbs_in_cold_partition
.is_empty ())
2346 bb
= bbs_in_cold_partition
.pop ();
2347 /* Any blocks dominated by a block in the cold section
2348 must also be cold. */
2350 for (son
= first_dom_son (CDI_DOMINATORS
, bb
);
2352 son
= next_dom_son (CDI_DOMINATORS
, son
))
2354 /* If son is not yet cold, then mark it cold here and
2355 enqueue it for further processing. */
2356 if ((BB_PARTITION (son
) != BB_COLD_PARTITION
))
2359 error ("non-cold basic block %d dominated "
2360 "by a block in the cold partition (%d)", son
->index
, bb
->index
);
2362 BB_SET_PARTITION (son
, BB_COLD_PARTITION
);
2363 bbs_to_fix
.safe_push (son
);
2364 bbs_in_cold_partition
.safe_push (son
);
2369 if (dom_calculated_here
)
2370 free_dominance_info (CDI_DOMINATORS
);
2375 /* Perform cleanup on the hot/cold bb partitioning after optimization
2376 passes that modify the cfg. */
2379 fixup_partitions (void)
2383 if (!crtl
->has_bb_partition
)
2386 /* Delete any blocks that became unreachable and weren't
2387 already cleaned up, for example during edge forwarding
2388 and convert_jumps_to_returns. This will expose more
2389 opportunities for fixing the partition boundaries here.
2390 Also, the calculation of the dominance graph during verification
2391 will assert if there are unreachable nodes. */
2392 delete_unreachable_blocks ();
2394 /* If there are partitions, do a sanity check on them: A basic block in
2395 Â a cold partition cannot dominate a basic block in a hot partition.
2396 Fixup any that now violate this requirement, as a result of edge
2397 forwarding and unreachable block deletion. Â */
2398 vec
<basic_block
> bbs_to_fix
= find_partition_fixes (false);
2400 /* Do the partition fixup after all necessary blocks have been converted to
2401 cold, so that we only update the region crossings the minimum number of
2402 places, which can require forcing edges to be non fallthru. */
2403 while (! bbs_to_fix
.is_empty ())
2405 bb
= bbs_to_fix
.pop ();
2406 fixup_new_cold_bb (bb
);
2410 /* Verify, in the basic block chain, that there is at most one switch
2411 between hot/cold partitions. This condition will not be true until
2412 after reorder_basic_blocks is called. */
2415 verify_hot_cold_block_grouping (void)
2419 bool switched_sections
= false;
2420 int current_partition
= BB_UNPARTITIONED
;
2422 /* Even after bb reordering is complete, we go into cfglayout mode
2423 again (in compgoto). Ensure we don't call this before going back
2424 into linearized RTL when any layout fixes would have been committed. */
2425 if (!crtl
->bb_reorder_complete
2426 || current_ir_type () != IR_RTL_CFGRTL
)
2429 FOR_EACH_BB_FN (bb
, cfun
)
2431 if (current_partition
!= BB_UNPARTITIONED
2432 && BB_PARTITION (bb
) != current_partition
)
2434 if (switched_sections
)
2436 error ("multiple hot/cold transitions found (bb %i)",
2441 switched_sections
= true;
2443 if (!crtl
->has_bb_partition
)
2444 error ("partition found but function partition flag not set");
2446 current_partition
= BB_PARTITION (bb
);
2453 /* Perform several checks on the edges out of each block, such as
2454 the consistency of the branch probabilities, the correctness
2455 of hot/cold partition crossing edges, and the number of expected
2456 successor edges. Also verify that the dominance relationship
2457 between hot/cold blocks is sane. */
2460 rtl_verify_edges (void)
2465 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
2467 int n_fallthru
= 0, n_branch
= 0, n_abnormal_call
= 0, n_sibcall
= 0;
2468 int n_eh
= 0, n_abnormal
= 0;
2469 edge e
, fallthru
= NULL
;
2472 bool has_crossing_edge
= false;
2474 if (JUMP_P (BB_END (bb
))
2475 && (note
= find_reg_note (BB_END (bb
), REG_BR_PROB
, NULL_RTX
))
2476 && EDGE_COUNT (bb
->succs
) >= 2
2477 && any_condjump_p (BB_END (bb
)))
2479 if (XINT (note
, 0) != BRANCH_EDGE (bb
)->probability
2480 && profile_status_for_fn (cfun
) != PROFILE_ABSENT
)
2482 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
2483 XINT (note
, 0), BRANCH_EDGE (bb
)->probability
);
2488 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2492 if (e
->flags
& EDGE_FALLTHRU
)
2493 n_fallthru
++, fallthru
= e
;
2495 is_crossing
= (BB_PARTITION (e
->src
) != BB_PARTITION (e
->dest
)
2496 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
2497 && e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
));
2498 has_crossing_edge
|= is_crossing
;
2499 if (e
->flags
& EDGE_CROSSING
)
2503 error ("EDGE_CROSSING incorrectly set across same section");
2506 if (e
->flags
& EDGE_FALLTHRU
)
2508 error ("fallthru edge crosses section boundary in bb %i",
2512 if (e
->flags
& EDGE_EH
)
2514 error ("EH edge crosses section boundary in bb %i",
2518 if (JUMP_P (BB_END (bb
)) && !CROSSING_JUMP_P (BB_END (bb
)))
2520 error ("No region crossing jump at section boundary in bb %i",
2525 else if (is_crossing
)
2527 error ("EDGE_CROSSING missing across section boundary");
2531 if ((e
->flags
& ~(EDGE_DFS_BACK
2533 | EDGE_IRREDUCIBLE_LOOP
2536 | EDGE_PRESERVE
)) == 0)
2539 if (e
->flags
& EDGE_ABNORMAL_CALL
)
2542 if (e
->flags
& EDGE_SIBCALL
)
2545 if (e
->flags
& EDGE_EH
)
2548 if (e
->flags
& EDGE_ABNORMAL
)
2552 if (!has_crossing_edge
2553 && JUMP_P (BB_END (bb
))
2554 && CROSSING_JUMP_P (BB_END (bb
)))
2556 print_rtl_with_bb (stderr
, get_insns (), TDF_RTL
| TDF_BLOCKS
| TDF_DETAILS
);
2557 error ("Region crossing jump across same section in bb %i",
2562 if (n_eh
&& !find_reg_note (BB_END (bb
), REG_EH_REGION
, NULL_RTX
))
2564 error ("missing REG_EH_REGION note at the end of bb %i", bb
->index
);
2569 error ("too many exception handling edges in bb %i", bb
->index
);
2573 && (!JUMP_P (BB_END (bb
))
2574 || (n_branch
> 1 && (any_uncondjump_p (BB_END (bb
))
2575 || any_condjump_p (BB_END (bb
))))))
2577 error ("too many outgoing branch edges from bb %i", bb
->index
);
2580 if (n_fallthru
&& any_uncondjump_p (BB_END (bb
)))
2582 error ("fallthru edge after unconditional jump in bb %i", bb
->index
);
2585 if (n_branch
!= 1 && any_uncondjump_p (BB_END (bb
)))
2587 error ("wrong number of branch edges after unconditional jump"
2588 " in bb %i", bb
->index
);
2591 if (n_branch
!= 1 && any_condjump_p (BB_END (bb
))
2592 && JUMP_LABEL (BB_END (bb
)) != BB_HEAD (fallthru
->dest
))
2594 error ("wrong amount of branch edges after conditional jump"
2595 " in bb %i", bb
->index
);
2598 if (n_abnormal_call
&& !CALL_P (BB_END (bb
)))
2600 error ("abnormal call edges for non-call insn in bb %i", bb
->index
);
2603 if (n_sibcall
&& !CALL_P (BB_END (bb
)))
2605 error ("sibcall edges for non-call insn in bb %i", bb
->index
);
2608 if (n_abnormal
> n_eh
2609 && !(CALL_P (BB_END (bb
))
2610 && n_abnormal
== n_abnormal_call
+ n_sibcall
)
2611 && (!JUMP_P (BB_END (bb
))
2612 || any_condjump_p (BB_END (bb
))
2613 || any_uncondjump_p (BB_END (bb
))))
2615 error ("abnormal edges for no purpose in bb %i", bb
->index
);
2620 /* If there are partitions, do a sanity check on them: A basic block in
2621 Â a cold partition cannot dominate a basic block in a hot partition. Â */
2622 if (crtl
->has_bb_partition
&& !err
)
2624 vec
<basic_block
> bbs_to_fix
= find_partition_fixes (true);
2625 err
= !bbs_to_fix
.is_empty ();
2632 /* Checks on the instructions within blocks. Currently checks that each
2633 block starts with a basic block note, and that basic block notes and
2634 control flow jumps are not found in the middle of the block. */
2637 rtl_verify_bb_insns (void)
2643 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
2645 /* Now check the header of basic
2646 block. It ought to contain optional CODE_LABEL followed
2647 by NOTE_BASIC_BLOCK. */
2651 if (BB_END (bb
) == x
)
2653 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2661 if (!NOTE_INSN_BASIC_BLOCK_P (x
) || NOTE_BASIC_BLOCK (x
) != bb
)
2663 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2668 if (BB_END (bb
) == x
)
2669 /* Do checks for empty blocks here. */
2672 for (x
= NEXT_INSN (x
); x
; x
= NEXT_INSN (x
))
2674 if (NOTE_INSN_BASIC_BLOCK_P (x
))
2676 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2677 INSN_UID (x
), bb
->index
);
2681 if (x
== BB_END (bb
))
2684 if (control_flow_insn_p (x
))
2686 error ("in basic block %d:", bb
->index
);
2687 fatal_insn ("flow control insn inside a basic block", x
);
2696 /* Verify that block pointers for instructions in basic blocks, headers and
2697 footers are set appropriately. */
2700 rtl_verify_bb_pointers (void)
2705 /* Check the general integrity of the basic blocks. */
2706 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
2710 if (!(bb
->flags
& BB_RTL
))
2712 error ("BB_RTL flag not set for block %d", bb
->index
);
2716 FOR_BB_INSNS (bb
, insn
)
2717 if (BLOCK_FOR_INSN (insn
) != bb
)
2719 error ("insn %d basic block pointer is %d, should be %d",
2721 BLOCK_FOR_INSN (insn
) ? BLOCK_FOR_INSN (insn
)->index
: 0,
2726 for (insn
= BB_HEADER (bb
); insn
; insn
= NEXT_INSN (insn
))
2727 if (!BARRIER_P (insn
)
2728 && BLOCK_FOR_INSN (insn
) != NULL
)
2730 error ("insn %d in header of bb %d has non-NULL basic block",
2731 INSN_UID (insn
), bb
->index
);
2734 for (insn
= BB_FOOTER (bb
); insn
; insn
= NEXT_INSN (insn
))
2735 if (!BARRIER_P (insn
)
2736 && BLOCK_FOR_INSN (insn
) != NULL
)
2738 error ("insn %d in footer of bb %d has non-NULL basic block",
2739 INSN_UID (insn
), bb
->index
);
2748 /* Verify the CFG and RTL consistency common for both underlying RTL and
2751 Currently it does following checks:
2753 - overlapping of basic blocks
2754 - insns with wrong BLOCK_FOR_INSN pointers
2755 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2756 - tails of basic blocks (ensure that boundary is necessary)
2757 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2758 and NOTE_INSN_BASIC_BLOCK
2759 - verify that no fall_thru edge crosses hot/cold partition boundaries
2760 - verify that there are no pending RTL branch predictions
2761 - verify that hot blocks are not dominated by cold blocks
2763 In future it can be extended check a lot of other stuff as well
2764 (reachability of basic blocks, life information, etc. etc.). */
2767 rtl_verify_flow_info_1 (void)
2771 err
|= rtl_verify_bb_pointers ();
2773 err
|= rtl_verify_bb_insns ();
2775 err
|= rtl_verify_edges ();
2780 /* Walk the instruction chain and verify that bb head/end pointers
2781 are correct, and that instructions are in exactly one bb and have
2782 correct block pointers. */
2785 rtl_verify_bb_insn_chain (void)
2790 rtx_insn
*last_head
= get_last_insn ();
2791 basic_block
*bb_info
;
2792 const int max_uid
= get_max_uid ();
2794 bb_info
= XCNEWVEC (basic_block
, max_uid
);
2796 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
2798 rtx_insn
*head
= BB_HEAD (bb
);
2799 rtx_insn
*end
= BB_END (bb
);
2801 for (x
= last_head
; x
!= NULL_RTX
; x
= PREV_INSN (x
))
2803 /* Verify the end of the basic block is in the INSN chain. */
2807 /* And that the code outside of basic blocks has NULL bb field. */
2809 && BLOCK_FOR_INSN (x
) != NULL
)
2811 error ("insn %d outside of basic blocks has non-NULL bb field",
2819 error ("end insn %d for block %d not found in the insn stream",
2820 INSN_UID (end
), bb
->index
);
2824 /* Work backwards from the end to the head of the basic block
2825 to verify the head is in the RTL chain. */
2826 for (; x
!= NULL_RTX
; x
= PREV_INSN (x
))
2828 /* While walking over the insn chain, verify insns appear
2829 in only one basic block. */
2830 if (bb_info
[INSN_UID (x
)] != NULL
)
2832 error ("insn %d is in multiple basic blocks (%d and %d)",
2833 INSN_UID (x
), bb
->index
, bb_info
[INSN_UID (x
)]->index
);
2837 bb_info
[INSN_UID (x
)] = bb
;
2844 error ("head insn %d for block %d not found in the insn stream",
2845 INSN_UID (head
), bb
->index
);
2849 last_head
= PREV_INSN (x
);
2852 for (x
= last_head
; x
!= NULL_RTX
; x
= PREV_INSN (x
))
2854 /* Check that the code before the first basic block has NULL
2857 && BLOCK_FOR_INSN (x
) != NULL
)
2859 error ("insn %d outside of basic blocks has non-NULL bb field",
2869 /* Verify that fallthru edges point to adjacent blocks in layout order and
2870 that barriers exist after non-fallthru blocks. */
2873 rtl_verify_fallthru (void)
2878 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
2882 e
= find_fallthru_edge (bb
->succs
);
2887 /* Ensure existence of barrier in BB with no fallthru edges. */
2888 for (insn
= NEXT_INSN (BB_END (bb
)); ; insn
= NEXT_INSN (insn
))
2890 if (!insn
|| NOTE_INSN_BASIC_BLOCK_P (insn
))
2892 error ("missing barrier after block %i", bb
->index
);
2896 if (BARRIER_P (insn
))
2900 else if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
2901 && e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
2905 if (e
->src
->next_bb
!= e
->dest
)
2908 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2909 e
->src
->index
, e
->dest
->index
);
2913 for (insn
= NEXT_INSN (BB_END (e
->src
)); insn
!= BB_HEAD (e
->dest
);
2914 insn
= NEXT_INSN (insn
))
2915 if (BARRIER_P (insn
) || INSN_P (insn
))
2917 error ("verify_flow_info: Incorrect fallthru %i->%i",
2918 e
->src
->index
, e
->dest
->index
);
2919 fatal_insn ("wrong insn in the fallthru edge", insn
);
2928 /* Verify that blocks are laid out in consecutive order. While walking the
2929 instructions, verify that all expected instructions are inside the basic
2930 blocks, and that all returns are followed by barriers. */
2933 rtl_verify_bb_layout (void)
2939 rtx_insn
* const rtx_first
= get_insns ();
2940 basic_block last_bb_seen
= ENTRY_BLOCK_PTR_FOR_FN (cfun
), curr_bb
= NULL
;
2943 last_bb_seen
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
2945 for (x
= rtx_first
; x
; x
= NEXT_INSN (x
))
2947 if (NOTE_INSN_BASIC_BLOCK_P (x
))
2949 bb
= NOTE_BASIC_BLOCK (x
);
2952 if (bb
!= last_bb_seen
->next_bb
)
2953 internal_error ("basic blocks not laid down consecutively");
2955 curr_bb
= last_bb_seen
= bb
;
2960 switch (GET_CODE (x
))
2967 /* An ADDR_VEC is placed outside any basic block. */
2969 && JUMP_TABLE_DATA_P (NEXT_INSN (x
)))
2972 /* But in any case, non-deletable labels can appear anywhere. */
2976 fatal_insn ("insn outside basic block", x
);
2981 && returnjump_p (x
) && ! condjump_p (x
)
2982 && ! (next_nonnote_insn (x
) && BARRIER_P (next_nonnote_insn (x
))))
2983 fatal_insn ("return not followed by barrier", x
);
2985 if (curr_bb
&& x
== BB_END (curr_bb
))
2989 if (num_bb_notes
!= n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
)
2991 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2992 num_bb_notes
, n_basic_blocks_for_fn (cfun
));
2997 /* Verify the CFG and RTL consistency common for both underlying RTL and
2998 cfglayout RTL, plus consistency checks specific to linearized RTL mode.
3000 Currently it does following checks:
3001 - all checks of rtl_verify_flow_info_1
3002 - test head/end pointers
3003 - check that blocks are laid out in consecutive order
3004 - check that all insns are in the basic blocks
3005 (except the switch handling code, barriers and notes)
3006 - check that all returns are followed by barriers
3007 - check that all fallthru edge points to the adjacent blocks
3008 - verify that there is a single hot/cold partition boundary after bbro */
3011 rtl_verify_flow_info (void)
3015 err
|= rtl_verify_flow_info_1 ();
3017 err
|= rtl_verify_bb_insn_chain ();
3019 err
|= rtl_verify_fallthru ();
3021 err
|= rtl_verify_bb_layout ();
3023 err
|= verify_hot_cold_block_grouping ();
3028 /* Assume that the preceding pass has possibly eliminated jump instructions
3029 or converted the unconditional jumps. Eliminate the edges from CFG.
3030 Return true if any edges are eliminated. */
3033 purge_dead_edges (basic_block bb
)
3036 rtx_insn
*insn
= BB_END (bb
);
3038 bool purged
= false;
3042 if (DEBUG_INSN_P (insn
) && insn
!= BB_HEAD (bb
))
3044 insn
= PREV_INSN (insn
);
3045 while ((DEBUG_INSN_P (insn
) || NOTE_P (insn
)) && insn
!= BB_HEAD (bb
));
3047 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
3048 if (NONJUMP_INSN_P (insn
)
3049 && (note
= find_reg_note (insn
, REG_EH_REGION
, NULL
)))
3053 if (! may_trap_p (PATTERN (insn
))
3054 || ((eqnote
= find_reg_equal_equiv_note (insn
))
3055 && ! may_trap_p (XEXP (eqnote
, 0))))
3056 remove_note (insn
, note
);
3059 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
3060 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
3062 bool remove
= false;
3064 /* There are three types of edges we need to handle correctly here: EH
3065 edges, abnormal call EH edges, and abnormal call non-EH edges. The
3066 latter can appear when nonlocal gotos are used. */
3067 if (e
->flags
& EDGE_ABNORMAL_CALL
)
3071 else if (can_nonlocal_goto (insn
))
3073 else if ((e
->flags
& EDGE_EH
) && can_throw_internal (insn
))
3075 else if (flag_tm
&& find_reg_note (insn
, REG_TM
, NULL
))
3080 else if (e
->flags
& EDGE_EH
)
3081 remove
= !can_throw_internal (insn
);
3086 df_set_bb_dirty (bb
);
3099 /* We do care only about conditional jumps and simplejumps. */
3100 if (!any_condjump_p (insn
)
3101 && !returnjump_p (insn
)
3102 && !simplejump_p (insn
))
3105 /* Branch probability/prediction notes are defined only for
3106 condjumps. We've possibly turned condjump into simplejump. */
3107 if (simplejump_p (insn
))
3109 note
= find_reg_note (insn
, REG_BR_PROB
, NULL
);
3111 remove_note (insn
, note
);
3112 while ((note
= find_reg_note (insn
, REG_BR_PRED
, NULL
)))
3113 remove_note (insn
, note
);
3116 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
3118 /* Avoid abnormal flags to leak from computed jumps turned
3119 into simplejumps. */
3121 e
->flags
&= ~EDGE_ABNORMAL
;
3123 /* See if this edge is one we should keep. */
3124 if ((e
->flags
& EDGE_FALLTHRU
) && any_condjump_p (insn
))
3125 /* A conditional jump can fall through into the next
3126 block, so we should keep the edge. */
3131 else if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
3132 && BB_HEAD (e
->dest
) == JUMP_LABEL (insn
))
3133 /* If the destination block is the target of the jump,
3139 else if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
3140 && returnjump_p (insn
))
3141 /* If the destination block is the exit block, and this
3142 instruction is a return, then keep the edge. */
3147 else if ((e
->flags
& EDGE_EH
) && can_throw_internal (insn
))
3148 /* Keep the edges that correspond to exceptions thrown by
3149 this instruction and rematerialize the EDGE_ABNORMAL
3150 flag we just cleared above. */
3152 e
->flags
|= EDGE_ABNORMAL
;
3157 /* We do not need this edge. */
3158 df_set_bb_dirty (bb
);
3163 if (EDGE_COUNT (bb
->succs
) == 0 || !purged
)
3167 fprintf (dump_file
, "Purged edges from bb %i\n", bb
->index
);
3172 /* Redistribute probabilities. */
3173 if (single_succ_p (bb
))
3175 single_succ_edge (bb
)->probability
= REG_BR_PROB_BASE
;
3176 single_succ_edge (bb
)->count
= bb
->count
;
3180 note
= find_reg_note (insn
, REG_BR_PROB
, NULL
);
3184 b
= BRANCH_EDGE (bb
);
3185 f
= FALLTHRU_EDGE (bb
);
3186 b
->probability
= XINT (note
, 0);
3187 f
->probability
= REG_BR_PROB_BASE
- b
->probability
;
3188 /* Update these to use GCOV_COMPUTE_SCALE. */
3189 b
->count
= bb
->count
* b
->probability
/ REG_BR_PROB_BASE
;
3190 f
->count
= bb
->count
* f
->probability
/ REG_BR_PROB_BASE
;
3195 else if (CALL_P (insn
) && SIBLING_CALL_P (insn
))
3197 /* First, there should not be any EH or ABCALL edges resulting
3198 from non-local gotos and the like. If there were, we shouldn't
3199 have created the sibcall in the first place. Second, there
3200 should of course never have been a fallthru edge. */
3201 gcc_assert (single_succ_p (bb
));
3202 gcc_assert (single_succ_edge (bb
)->flags
3203 == (EDGE_SIBCALL
| EDGE_ABNORMAL
));
3208 /* If we don't see a jump insn, we don't know exactly why the block would
3209 have been broken at this point. Look for a simple, non-fallthru edge,
3210 as these are only created by conditional branches. If we find such an
3211 edge we know that there used to be a jump here and can then safely
3212 remove all non-fallthru edges. */
3214 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3215 if (! (e
->flags
& (EDGE_COMPLEX
| EDGE_FALLTHRU
)))
3224 /* Remove all but the fake and fallthru edges. The fake edge may be
3225 the only successor for this block in the case of noreturn
3227 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
3229 if (!(e
->flags
& (EDGE_FALLTHRU
| EDGE_FAKE
)))
3231 df_set_bb_dirty (bb
);
3239 gcc_assert (single_succ_p (bb
));
3241 single_succ_edge (bb
)->probability
= REG_BR_PROB_BASE
;
3242 single_succ_edge (bb
)->count
= bb
->count
;
3245 fprintf (dump_file
, "Purged non-fallthru edges from bb %i\n",
3250 /* Search all basic blocks for potentially dead edges and purge them. Return
3251 true if some edge has been eliminated. */
3254 purge_all_dead_edges (void)
3259 FOR_EACH_BB_FN (bb
, cfun
)
3261 bool purged_here
= purge_dead_edges (bb
);
3263 purged
|= purged_here
;
3269 /* This is used by a few passes that emit some instructions after abnormal
3270 calls, moving the basic block's end, while they in fact do want to emit
3271 them on the fallthru edge. Look for abnormal call edges, find backward
3272 the call in the block and insert the instructions on the edge instead.
3274 Similarly, handle instructions throwing exceptions internally.
3276 Return true when instructions have been found and inserted on edges. */
3279 fixup_abnormal_edges (void)
3281 bool inserted
= false;
3284 FOR_EACH_BB_FN (bb
, cfun
)
3289 /* Look for cases we are interested in - calls or instructions causing
3291 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3292 if ((e
->flags
& EDGE_ABNORMAL_CALL
)
3293 || ((e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
3294 == (EDGE_ABNORMAL
| EDGE_EH
)))
3297 if (e
&& !CALL_P (BB_END (bb
)) && !can_throw_internal (BB_END (bb
)))
3301 /* Get past the new insns generated. Allow notes, as the insns
3302 may be already deleted. */
3304 while ((NONJUMP_INSN_P (insn
) || NOTE_P (insn
))
3305 && !can_throw_internal (insn
)
3306 && insn
!= BB_HEAD (bb
))
3307 insn
= PREV_INSN (insn
);
3309 if (CALL_P (insn
) || can_throw_internal (insn
))
3311 rtx_insn
*stop
, *next
;
3313 e
= find_fallthru_edge (bb
->succs
);
3315 stop
= NEXT_INSN (BB_END (bb
));
3318 for (insn
= NEXT_INSN (insn
); insn
!= stop
; insn
= next
)
3320 next
= NEXT_INSN (insn
);
3325 /* Sometimes there's still the return value USE.
3326 If it's placed after a trapping call (i.e. that
3327 call is the last insn anyway), we have no fallthru
3328 edge. Simply delete this use and don't try to insert
3329 on the non-existent edge. */
3330 if (GET_CODE (PATTERN (insn
)) != USE
)
3332 /* We're not deleting it, we're moving it. */
3333 insn
->set_undeleted ();
3334 SET_PREV_INSN (insn
) = NULL_RTX
;
3335 SET_NEXT_INSN (insn
) = NULL_RTX
;
3337 insert_insn_on_edge (insn
, e
);
3341 else if (!BARRIER_P (insn
))
3342 set_block_for_insn (insn
, NULL
);
3346 /* It may be that we don't find any trapping insn. In this
3347 case we discovered quite late that the insn that had been
3348 marked as can_throw_internal in fact couldn't trap at all.
3349 So we should in fact delete the EH edges out of the block. */
3351 purge_dead_edges (bb
);
3358 /* Cut the insns from FIRST to LAST out of the insns stream. */
3361 unlink_insn_chain (rtx_insn
*first
, rtx_insn
*last
)
3363 rtx_insn
*prevfirst
= PREV_INSN (first
);
3364 rtx_insn
*nextlast
= NEXT_INSN (last
);
3366 SET_PREV_INSN (first
) = NULL
;
3367 SET_NEXT_INSN (last
) = NULL
;
3369 SET_NEXT_INSN (prevfirst
) = nextlast
;
3371 SET_PREV_INSN (nextlast
) = prevfirst
;
3373 set_last_insn (prevfirst
);
3375 set_first_insn (nextlast
);
3379 /* Skip over inter-block insns occurring after BB which are typically
3380 associated with BB (e.g., barriers). If there are any such insns,
3381 we return the last one. Otherwise, we return the end of BB. */
3384 skip_insns_after_block (basic_block bb
)
3386 rtx_insn
*insn
, *last_insn
, *next_head
, *prev
;
3389 if (bb
->next_bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
3390 next_head
= BB_HEAD (bb
->next_bb
);
3392 for (last_insn
= insn
= BB_END (bb
); (insn
= NEXT_INSN (insn
)) != 0; )
3394 if (insn
== next_head
)
3397 switch (GET_CODE (insn
))
3404 switch (NOTE_KIND (insn
))
3406 case NOTE_INSN_BLOCK_END
:
3416 if (NEXT_INSN (insn
)
3417 && JUMP_TABLE_DATA_P (NEXT_INSN (insn
)))
3419 insn
= NEXT_INSN (insn
);
3432 /* It is possible to hit contradictory sequence. For instance:
3438 Where barrier belongs to jump_insn, but the note does not. This can be
3439 created by removing the basic block originally following
3440 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
3442 for (insn
= last_insn
; insn
!= BB_END (bb
); insn
= prev
)
3444 prev
= PREV_INSN (insn
);
3446 switch (NOTE_KIND (insn
))
3448 case NOTE_INSN_BLOCK_END
:
3451 case NOTE_INSN_DELETED
:
3452 case NOTE_INSN_DELETED_LABEL
:
3453 case NOTE_INSN_DELETED_DEBUG_LABEL
:
3456 reorder_insns (insn
, insn
, last_insn
);
3463 /* Locate or create a label for a given basic block. */
3466 label_for_bb (basic_block bb
)
3468 rtx label
= BB_HEAD (bb
);
3470 if (!LABEL_P (label
))
3473 fprintf (dump_file
, "Emitting label for block %d\n", bb
->index
);
3475 label
= block_label (bb
);
3481 /* Locate the effective beginning and end of the insn chain for each
3482 block, as defined by skip_insns_after_block above. */
3485 record_effective_endpoints (void)
3487 rtx_insn
*next_insn
;
3491 for (insn
= get_insns ();
3494 && NOTE_KIND (insn
) != NOTE_INSN_BASIC_BLOCK
;
3495 insn
= NEXT_INSN (insn
))
3497 /* No basic blocks at all? */
3500 if (PREV_INSN (insn
))
3501 cfg_layout_function_header
=
3502 unlink_insn_chain (get_insns (), PREV_INSN (insn
));
3504 cfg_layout_function_header
= NULL
;
3506 next_insn
= get_insns ();
3507 FOR_EACH_BB_FN (bb
, cfun
)
3511 if (PREV_INSN (BB_HEAD (bb
)) && next_insn
!= BB_HEAD (bb
))
3512 BB_HEADER (bb
) = unlink_insn_chain (next_insn
,
3513 PREV_INSN (BB_HEAD (bb
)));
3514 end
= skip_insns_after_block (bb
);
3515 if (NEXT_INSN (BB_END (bb
)) && BB_END (bb
) != end
)
3516 BB_FOOTER (bb
) = unlink_insn_chain (NEXT_INSN (BB_END (bb
)), end
);
3517 next_insn
= NEXT_INSN (BB_END (bb
));
3520 cfg_layout_function_footer
= next_insn
;
3521 if (cfg_layout_function_footer
)
3522 cfg_layout_function_footer
= unlink_insn_chain (cfg_layout_function_footer
, get_last_insn ());
3527 const pass_data pass_data_into_cfg_layout_mode
=
3529 RTL_PASS
, /* type */
3530 "into_cfglayout", /* name */
3531 OPTGROUP_NONE
, /* optinfo_flags */
3533 0, /* properties_required */
3534 PROP_cfglayout
, /* properties_provided */
3535 0, /* properties_destroyed */
3536 0, /* todo_flags_start */
3537 0, /* todo_flags_finish */
3540 class pass_into_cfg_layout_mode
: public rtl_opt_pass
3543 pass_into_cfg_layout_mode (gcc::context
*ctxt
)
3544 : rtl_opt_pass (pass_data_into_cfg_layout_mode
, ctxt
)
3547 /* opt_pass methods: */
3548 virtual unsigned int execute (function
*)
3550 cfg_layout_initialize (0);
3554 }; // class pass_into_cfg_layout_mode
3559 make_pass_into_cfg_layout_mode (gcc::context
*ctxt
)
3561 return new pass_into_cfg_layout_mode (ctxt
);
3566 const pass_data pass_data_outof_cfg_layout_mode
=
3568 RTL_PASS
, /* type */
3569 "outof_cfglayout", /* name */
3570 OPTGROUP_NONE
, /* optinfo_flags */
3572 0, /* properties_required */
3573 0, /* properties_provided */
3574 PROP_cfglayout
, /* properties_destroyed */
3575 0, /* todo_flags_start */
3576 0, /* todo_flags_finish */
3579 class pass_outof_cfg_layout_mode
: public rtl_opt_pass
3582 pass_outof_cfg_layout_mode (gcc::context
*ctxt
)
3583 : rtl_opt_pass (pass_data_outof_cfg_layout_mode
, ctxt
)
3586 /* opt_pass methods: */
3587 virtual unsigned int execute (function
*);
3589 }; // class pass_outof_cfg_layout_mode
3592 pass_outof_cfg_layout_mode::execute (function
*fun
)
3596 FOR_EACH_BB_FN (bb
, fun
)
3597 if (bb
->next_bb
!= EXIT_BLOCK_PTR_FOR_FN (fun
))
3598 bb
->aux
= bb
->next_bb
;
3600 cfg_layout_finalize ();
3608 make_pass_outof_cfg_layout_mode (gcc::context
*ctxt
)
3610 return new pass_outof_cfg_layout_mode (ctxt
);
3614 /* Link the basic blocks in the correct order, compacting the basic
3615 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3616 function also clears the basic block header and footer fields.
3618 This function is usually called after a pass (e.g. tracer) finishes
3619 some transformations while in cfglayout mode. The required sequence
3620 of the basic blocks is in a linked list along the bb->aux field.
3621 This functions re-links the basic block prev_bb and next_bb pointers
3622 accordingly, and it compacts and renumbers the blocks.
3624 FIXME: This currently works only for RTL, but the only RTL-specific
3625 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3626 to GIMPLE a long time ago, but it doesn't relink the basic block
3627 chain. It could do that (to give better initial RTL) if this function
3628 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
3631 relink_block_chain (bool stay_in_cfglayout_mode
)
3633 basic_block bb
, prev_bb
;
3636 /* Maybe dump the re-ordered sequence. */
3639 fprintf (dump_file
, "Reordered sequence:\n");
3640 for (bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
, index
=
3643 bb
= (basic_block
) bb
->aux
, index
++)
3645 fprintf (dump_file
, " %i ", index
);
3646 if (get_bb_original (bb
))
3647 fprintf (dump_file
, "duplicate of %i ",
3648 get_bb_original (bb
)->index
);
3649 else if (forwarder_block_p (bb
)
3650 && !LABEL_P (BB_HEAD (bb
)))
3651 fprintf (dump_file
, "compensation ");
3653 fprintf (dump_file
, "bb %i ", bb
->index
);
3654 fprintf (dump_file
, " [%i]\n", bb
->frequency
);
3658 /* Now reorder the blocks. */
3659 prev_bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
3660 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
;
3661 for (; bb
; prev_bb
= bb
, bb
= (basic_block
) bb
->aux
)
3663 bb
->prev_bb
= prev_bb
;
3664 prev_bb
->next_bb
= bb
;
3666 prev_bb
->next_bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
);
3667 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
= prev_bb
;
3669 /* Then, clean up the aux fields. */
3670 FOR_ALL_BB_FN (bb
, cfun
)
3673 if (!stay_in_cfglayout_mode
)
3674 BB_HEADER (bb
) = BB_FOOTER (bb
) = NULL
;
3677 /* Maybe reset the original copy tables, they are not valid anymore
3678 when we renumber the basic blocks in compact_blocks. If we are
3679 are going out of cfglayout mode, don't re-allocate the tables. */
3680 free_original_copy_tables ();
3681 if (stay_in_cfglayout_mode
)
3682 initialize_original_copy_tables ();
3684 /* Finally, put basic_block_info in the new order. */
3689 /* Given a reorder chain, rearrange the code to match. */
3692 fixup_reorder_chain (void)
3695 rtx_insn
*insn
= NULL
;
3697 if (cfg_layout_function_header
)
3699 set_first_insn (cfg_layout_function_header
);
3700 insn
= cfg_layout_function_header
;
3701 while (NEXT_INSN (insn
))
3702 insn
= NEXT_INSN (insn
);
3705 /* First do the bulk reordering -- rechain the blocks without regard to
3706 the needed changes to jumps and labels. */
3708 for (bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
; bb
; bb
= (basic_block
)
3714 SET_NEXT_INSN (insn
) = BB_HEADER (bb
);
3716 set_first_insn (BB_HEADER (bb
));
3717 SET_PREV_INSN (BB_HEADER (bb
)) = insn
;
3718 insn
= BB_HEADER (bb
);
3719 while (NEXT_INSN (insn
))
3720 insn
= NEXT_INSN (insn
);
3723 SET_NEXT_INSN (insn
) = BB_HEAD (bb
);
3725 set_first_insn (BB_HEAD (bb
));
3726 SET_PREV_INSN (BB_HEAD (bb
)) = insn
;
3730 SET_NEXT_INSN (insn
) = BB_FOOTER (bb
);
3731 SET_PREV_INSN (BB_FOOTER (bb
)) = insn
;
3732 while (NEXT_INSN (insn
))
3733 insn
= NEXT_INSN (insn
);
3737 SET_NEXT_INSN (insn
) = cfg_layout_function_footer
;
3738 if (cfg_layout_function_footer
)
3739 SET_PREV_INSN (cfg_layout_function_footer
) = insn
;
3741 while (NEXT_INSN (insn
))
3742 insn
= NEXT_INSN (insn
);
3744 set_last_insn (insn
);
3745 #ifdef ENABLE_CHECKING
3746 verify_insn_chain ();
3749 /* Now add jumps and labels as needed to match the blocks new
3752 for (bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
; bb
; bb
= (basic_block
)
3755 edge e_fall
, e_taken
, e
;
3756 rtx_insn
*bb_end_insn
;
3757 rtx ret_label
= NULL_RTX
;
3761 if (EDGE_COUNT (bb
->succs
) == 0)
3764 /* Find the old fallthru edge, and another non-EH edge for
3766 e_taken
= e_fall
= NULL
;
3768 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3769 if (e
->flags
& EDGE_FALLTHRU
)
3771 else if (! (e
->flags
& EDGE_EH
))
3774 bb_end_insn
= BB_END (bb
);
3775 if (JUMP_P (bb_end_insn
))
3777 ret_label
= JUMP_LABEL (bb_end_insn
);
3778 if (any_condjump_p (bb_end_insn
))
3780 /* This might happen if the conditional jump has side
3781 effects and could therefore not be optimized away.
3782 Make the basic block to end with a barrier in order
3783 to prevent rtl_verify_flow_info from complaining. */
3786 gcc_assert (!onlyjump_p (bb_end_insn
)
3787 || returnjump_p (bb_end_insn
)
3788 || (e_taken
->flags
& EDGE_CROSSING
));
3789 emit_barrier_after (bb_end_insn
);
3793 /* If the old fallthru is still next, nothing to do. */
3794 if (bb
->aux
== e_fall
->dest
3795 || e_fall
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
3798 /* The degenerated case of conditional jump jumping to the next
3799 instruction can happen for jumps with side effects. We need
3800 to construct a forwarder block and this will be done just
3801 fine by force_nonfallthru below. */
3805 /* There is another special case: if *neither* block is next,
3806 such as happens at the very end of a function, then we'll
3807 need to add a new unconditional jump. Choose the taken
3808 edge based on known or assumed probability. */
3809 else if (bb
->aux
!= e_taken
->dest
)
3811 rtx note
= find_reg_note (bb_end_insn
, REG_BR_PROB
, 0);
3814 && XINT (note
, 0) < REG_BR_PROB_BASE
/ 2
3815 && invert_jump (bb_end_insn
,
3817 == EXIT_BLOCK_PTR_FOR_FN (cfun
)
3819 : label_for_bb (e_fall
->dest
)), 0))
3821 e_fall
->flags
&= ~EDGE_FALLTHRU
;
3822 gcc_checking_assert (could_fall_through
3823 (e_taken
->src
, e_taken
->dest
));
3824 e_taken
->flags
|= EDGE_FALLTHRU
;
3825 update_br_prob_note (bb
);
3826 e
= e_fall
, e_fall
= e_taken
, e_taken
= e
;
3830 /* If the "jumping" edge is a crossing edge, and the fall
3831 through edge is non-crossing, leave things as they are. */
3832 else if ((e_taken
->flags
& EDGE_CROSSING
)
3833 && !(e_fall
->flags
& EDGE_CROSSING
))
3836 /* Otherwise we can try to invert the jump. This will
3837 basically never fail, however, keep up the pretense. */
3838 else if (invert_jump (bb_end_insn
,
3840 == EXIT_BLOCK_PTR_FOR_FN (cfun
)
3842 : label_for_bb (e_fall
->dest
)), 0))
3844 e_fall
->flags
&= ~EDGE_FALLTHRU
;
3845 gcc_checking_assert (could_fall_through
3846 (e_taken
->src
, e_taken
->dest
));
3847 e_taken
->flags
|= EDGE_FALLTHRU
;
3848 update_br_prob_note (bb
);
3849 if (LABEL_NUSES (ret_label
) == 0
3850 && single_pred_p (e_taken
->dest
))
3851 delete_insn (ret_label
);
3855 else if (extract_asm_operands (PATTERN (bb_end_insn
)) != NULL
)
3857 /* If the old fallthru is still next or if
3858 asm goto doesn't have a fallthru (e.g. when followed by
3859 __builtin_unreachable ()), nothing to do. */
3861 || bb
->aux
== e_fall
->dest
3862 || e_fall
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
3865 /* Otherwise we'll have to use the fallthru fixup below. */
3869 /* Otherwise we have some return, switch or computed
3870 jump. In the 99% case, there should not have been a
3872 gcc_assert (returnjump_p (bb_end_insn
) || !e_fall
);
3878 /* No fallthru implies a noreturn function with EH edges, or
3879 something similarly bizarre. In any case, we don't need to
3884 /* If the fallthru block is still next, nothing to do. */
3885 if (bb
->aux
== e_fall
->dest
)
3888 /* A fallthru to exit block. */
3889 if (e_fall
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
3893 /* We got here if we need to add a new jump insn.
3894 Note force_nonfallthru can delete E_FALL and thus we have to
3895 save E_FALL->src prior to the call to force_nonfallthru. */
3896 nb
= force_nonfallthru_and_redirect (e_fall
, e_fall
->dest
, ret_label
);
3901 /* Don't process this new block. */
3906 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3908 /* Annoying special case - jump around dead jumptables left in the code. */
3909 FOR_EACH_BB_FN (bb
, cfun
)
3911 edge e
= find_fallthru_edge (bb
->succs
);
3913 if (e
&& !can_fallthru (e
->src
, e
->dest
))
3914 force_nonfallthru (e
);
3917 /* Ensure goto_locus from edges has some instructions with that locus
3920 FOR_EACH_BB_FN (bb
, cfun
)
3925 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3926 if (LOCATION_LOCUS (e
->goto_locus
) != UNKNOWN_LOCATION
3927 && !(e
->flags
& EDGE_ABNORMAL
))
3931 basic_block dest
, nb
;
3934 insn
= BB_END (e
->src
);
3935 end
= PREV_INSN (BB_HEAD (e
->src
));
3937 && (!NONDEBUG_INSN_P (insn
) || !INSN_HAS_LOCATION (insn
)))
3938 insn
= PREV_INSN (insn
);
3940 && INSN_LOCATION (insn
) == e
->goto_locus
)
3942 if (simplejump_p (BB_END (e
->src
))
3943 && !INSN_HAS_LOCATION (BB_END (e
->src
)))
3945 INSN_LOCATION (BB_END (e
->src
)) = e
->goto_locus
;
3949 if (dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
3951 /* Non-fallthru edges to the exit block cannot be split. */
3952 if (!(e
->flags
& EDGE_FALLTHRU
))
3957 insn
= BB_HEAD (dest
);
3958 end
= NEXT_INSN (BB_END (dest
));
3959 while (insn
!= end
&& !NONDEBUG_INSN_P (insn
))
3960 insn
= NEXT_INSN (insn
);
3961 if (insn
!= end
&& INSN_HAS_LOCATION (insn
)
3962 && INSN_LOCATION (insn
) == e
->goto_locus
)
3965 nb
= split_edge (e
);
3966 if (!INSN_P (BB_END (nb
)))
3967 BB_END (nb
) = emit_insn_after_noloc (gen_nop (), BB_END (nb
),
3969 INSN_LOCATION (BB_END (nb
)) = e
->goto_locus
;
3971 /* If there are other incoming edges to the destination block
3972 with the same goto locus, redirect them to the new block as
3973 well, this can prevent other such blocks from being created
3974 in subsequent iterations of the loop. */
3975 for (ei2
= ei_start (dest
->preds
); (e2
= ei_safe_edge (ei2
)); )
3976 if (LOCATION_LOCUS (e2
->goto_locus
) != UNKNOWN_LOCATION
3977 && !(e2
->flags
& (EDGE_ABNORMAL
| EDGE_FALLTHRU
))
3978 && e
->goto_locus
== e2
->goto_locus
)
3979 redirect_edge_and_branch (e2
, nb
);
3986 /* Perform sanity checks on the insn chain.
3987 1. Check that next/prev pointers are consistent in both the forward and
3989 2. Count insns in chain, going both directions, and check if equal.
3990 3. Check that get_last_insn () returns the actual end of chain. */
3993 verify_insn_chain (void)
3995 rtx_insn
*x
, *prevx
, *nextx
;
3996 int insn_cnt1
, insn_cnt2
;
3998 for (prevx
= NULL
, insn_cnt1
= 1, x
= get_insns ();
4000 prevx
= x
, insn_cnt1
++, x
= NEXT_INSN (x
))
4001 gcc_assert (PREV_INSN (x
) == prevx
);
4003 gcc_assert (prevx
== get_last_insn ());
4005 for (nextx
= NULL
, insn_cnt2
= 1, x
= get_last_insn ();
4007 nextx
= x
, insn_cnt2
++, x
= PREV_INSN (x
))
4008 gcc_assert (NEXT_INSN (x
) == nextx
);
4010 gcc_assert (insn_cnt1
== insn_cnt2
);
4013 /* If we have assembler epilogues, the block falling through to exit must
4014 be the last one in the reordered chain when we reach final. Ensure
4015 that this condition is met. */
4017 fixup_fallthru_exit_predecessor (void)
4020 basic_block bb
= NULL
;
4022 /* This transformation is not valid before reload, because we might
4023 separate a call from the instruction that copies the return
4025 gcc_assert (reload_completed
);
4027 e
= find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
);
4033 basic_block c
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
;
4035 /* If the very first block is the one with the fall-through exit
4036 edge, we have to split that block. */
4039 bb
= split_block (bb
, NULL
)->dest
;
4042 BB_FOOTER (bb
) = BB_FOOTER (c
);
4043 BB_FOOTER (c
) = NULL
;
4046 while (c
->aux
!= bb
)
4047 c
= (basic_block
) c
->aux
;
4051 c
= (basic_block
) c
->aux
;
4058 /* In case there are more than one fallthru predecessors of exit, force that
4059 there is only one. */
4062 force_one_exit_fallthru (void)
4064 edge e
, predecessor
= NULL
;
4067 basic_block forwarder
, bb
;
4069 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
4070 if (e
->flags
& EDGE_FALLTHRU
)
4072 if (predecessor
== NULL
)
4084 /* Exit has several fallthru predecessors. Create a forwarder block for
4086 forwarder
= split_edge (predecessor
);
4087 for (ei
= ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
);
4088 (e
= ei_safe_edge (ei
)); )
4090 if (e
->src
== forwarder
4091 || !(e
->flags
& EDGE_FALLTHRU
))
4094 redirect_edge_and_branch_force (e
, forwarder
);
4097 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4099 FOR_EACH_BB_FN (bb
, cfun
)
4101 if (bb
->aux
== NULL
&& bb
!= forwarder
)
4103 bb
->aux
= forwarder
;
4109 /* Return true in case it is possible to duplicate the basic block BB. */
4112 cfg_layout_can_duplicate_bb_p (const_basic_block bb
)
4114 /* Do not attempt to duplicate tablejumps, as we need to unshare
4115 the dispatch table. This is difficult to do, as the instructions
4116 computing jump destination may be hoisted outside the basic block. */
4117 if (tablejump_p (BB_END (bb
), NULL
, NULL
))
4120 /* Do not duplicate blocks containing insns that can't be copied. */
4121 if (targetm
.cannot_copy_insn_p
)
4123 rtx_insn
*insn
= BB_HEAD (bb
);
4126 if (INSN_P (insn
) && targetm
.cannot_copy_insn_p (insn
))
4128 if (insn
== BB_END (bb
))
4130 insn
= NEXT_INSN (insn
);
4138 duplicate_insn_chain (rtx_insn
*from
, rtx_insn
*to
)
4140 rtx_insn
*insn
, *next
, *copy
;
4143 /* Avoid updating of boundaries of previous basic block. The
4144 note will get removed from insn stream in fixup. */
4145 last
= emit_note (NOTE_INSN_DELETED
);
4147 /* Create copy at the end of INSN chain. The chain will
4148 be reordered later. */
4149 for (insn
= from
; insn
!= NEXT_INSN (to
); insn
= NEXT_INSN (insn
))
4151 switch (GET_CODE (insn
))
4154 /* Don't duplicate label debug insns. */
4155 if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn
)) == LABEL_DECL
)
4161 copy
= emit_copy_of_insn_after (insn
, get_last_insn ());
4162 if (JUMP_P (insn
) && JUMP_LABEL (insn
) != NULL_RTX
4163 && ANY_RETURN_P (JUMP_LABEL (insn
)))
4164 JUMP_LABEL (copy
) = JUMP_LABEL (insn
);
4165 maybe_copy_prologue_epilogue_insn (insn
, copy
);
4168 case JUMP_TABLE_DATA
:
4169 /* Avoid copying of dispatch tables. We never duplicate
4170 tablejumps, so this can hit only in case the table got
4171 moved far from original jump.
4172 Avoid copying following barrier as well if any
4173 (and debug insns in between). */
4174 for (next
= NEXT_INSN (insn
);
4175 next
!= NEXT_INSN (to
);
4176 next
= NEXT_INSN (next
))
4177 if (!DEBUG_INSN_P (next
))
4179 if (next
!= NEXT_INSN (to
) && BARRIER_P (next
))
4191 switch (NOTE_KIND (insn
))
4193 /* In case prologue is empty and function contain label
4194 in first BB, we may want to copy the block. */
4195 case NOTE_INSN_PROLOGUE_END
:
4197 case NOTE_INSN_DELETED
:
4198 case NOTE_INSN_DELETED_LABEL
:
4199 case NOTE_INSN_DELETED_DEBUG_LABEL
:
4200 /* No problem to strip these. */
4201 case NOTE_INSN_FUNCTION_BEG
:
4202 /* There is always just single entry to function. */
4203 case NOTE_INSN_BASIC_BLOCK
:
4204 /* We should only switch text sections once. */
4205 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
4208 case NOTE_INSN_EPILOGUE_BEG
:
4209 emit_note_copy (as_a
<rtx_note
*> (insn
));
4213 /* All other notes should have already been eliminated. */
4221 insn
= NEXT_INSN (last
);
4226 /* Create a duplicate of the basic block BB. */
4229 cfg_layout_duplicate_bb (basic_block bb
)
4234 insn
= duplicate_insn_chain (BB_HEAD (bb
), BB_END (bb
));
4235 new_bb
= create_basic_block (insn
,
4236 insn
? get_last_insn () : NULL
,
4237 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
4239 BB_COPY_PARTITION (new_bb
, bb
);
4242 insn
= BB_HEADER (bb
);
4243 while (NEXT_INSN (insn
))
4244 insn
= NEXT_INSN (insn
);
4245 insn
= duplicate_insn_chain (BB_HEADER (bb
), insn
);
4247 BB_HEADER (new_bb
) = unlink_insn_chain (insn
, get_last_insn ());
4252 insn
= BB_FOOTER (bb
);
4253 while (NEXT_INSN (insn
))
4254 insn
= NEXT_INSN (insn
);
4255 insn
= duplicate_insn_chain (BB_FOOTER (bb
), insn
);
4257 BB_FOOTER (new_bb
) = unlink_insn_chain (insn
, get_last_insn ());
4264 /* Main entry point to this module - initialize the datastructures for
4265 CFG layout changes. It keeps LOOPS up-to-date if not null.
4267 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
4270 cfg_layout_initialize (unsigned int flags
)
4275 /* Once bb partitioning is complete, cfg layout mode should not be
4276 re-entered. Entering cfg layout mode may require fixups. As an
4277 example, if edge forwarding performed when optimizing the cfg
4278 layout required moving a block from the hot to the cold
4279 section. This would create an illegal partitioning unless some
4280 manual fixup was performed. */
4281 gcc_assert (!(crtl
->bb_reorder_complete
4282 && flag_reorder_blocks_and_partition
));
4284 initialize_original_copy_tables ();
4286 cfg_layout_rtl_register_cfg_hooks ();
4288 record_effective_endpoints ();
4290 /* Make sure that the targets of non local gotos are marked. */
4291 for (x
= nonlocal_goto_handler_labels
; x
; x
= x
->next ())
4293 bb
= BLOCK_FOR_INSN (x
->insn ());
4294 bb
->flags
|= BB_NON_LOCAL_GOTO_TARGET
;
4297 cleanup_cfg (CLEANUP_CFGLAYOUT
| flags
);
4300 /* Splits superblocks. */
4302 break_superblocks (void)
4304 sbitmap superblocks
;
4308 superblocks
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
4309 bitmap_clear (superblocks
);
4311 FOR_EACH_BB_FN (bb
, cfun
)
4312 if (bb
->flags
& BB_SUPERBLOCK
)
4314 bb
->flags
&= ~BB_SUPERBLOCK
;
4315 bitmap_set_bit (superblocks
, bb
->index
);
4321 rebuild_jump_labels (get_insns ());
4322 find_many_sub_basic_blocks (superblocks
);
4328 /* Finalize the changes: reorder insn list according to the sequence specified
4329 by aux pointers, enter compensation code, rebuild scope forest. */
4332 cfg_layout_finalize (void)
4334 #ifdef ENABLE_CHECKING
4335 verify_flow_info ();
4337 force_one_exit_fallthru ();
4338 rtl_register_cfg_hooks ();
4339 if (reload_completed
4340 #ifdef HAVE_epilogue
4344 fixup_fallthru_exit_predecessor ();
4345 fixup_reorder_chain ();
4347 rebuild_jump_labels (get_insns ());
4348 delete_dead_jumptables ();
4350 #ifdef ENABLE_CHECKING
4351 verify_insn_chain ();
4352 verify_flow_info ();
4357 /* Same as split_block but update cfg_layout structures. */
4360 cfg_layout_split_block (basic_block bb
, void *insnp
)
4362 rtx insn
= (rtx
) insnp
;
4363 basic_block new_bb
= rtl_split_block (bb
, insn
);
4365 BB_FOOTER (new_bb
) = BB_FOOTER (bb
);
4366 BB_FOOTER (bb
) = NULL
;
4371 /* Redirect Edge to DEST. */
4373 cfg_layout_redirect_edge_and_branch (edge e
, basic_block dest
)
4375 basic_block src
= e
->src
;
4378 if (e
->flags
& (EDGE_ABNORMAL_CALL
| EDGE_EH
))
4381 if (e
->dest
== dest
)
4384 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
4385 && (ret
= try_redirect_by_replacing_jump (e
, dest
, true)))
4387 df_set_bb_dirty (src
);
4391 if (e
->src
== ENTRY_BLOCK_PTR_FOR_FN (cfun
)
4392 && (e
->flags
& EDGE_FALLTHRU
) && !(e
->flags
& EDGE_COMPLEX
))
4395 fprintf (dump_file
, "Redirecting entry edge from bb %i to %i\n",
4396 e
->src
->index
, dest
->index
);
4398 df_set_bb_dirty (e
->src
);
4399 redirect_edge_succ (e
, dest
);
4403 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4404 in the case the basic block appears to be in sequence. Avoid this
4407 if (e
->flags
& EDGE_FALLTHRU
)
4409 /* Redirect any branch edges unified with the fallthru one. */
4410 if (JUMP_P (BB_END (src
))
4411 && label_is_jump_target_p (BB_HEAD (e
->dest
),
4417 fprintf (dump_file
, "Fallthru edge unified with branch "
4418 "%i->%i redirected to %i\n",
4419 e
->src
->index
, e
->dest
->index
, dest
->index
);
4420 e
->flags
&= ~EDGE_FALLTHRU
;
4421 redirected
= redirect_branch_edge (e
, dest
);
4422 gcc_assert (redirected
);
4423 redirected
->flags
|= EDGE_FALLTHRU
;
4424 df_set_bb_dirty (redirected
->src
);
4427 /* In case we are redirecting fallthru edge to the branch edge
4428 of conditional jump, remove it. */
4429 if (EDGE_COUNT (src
->succs
) == 2)
4431 /* Find the edge that is different from E. */
4432 edge s
= EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
);
4435 && any_condjump_p (BB_END (src
))
4436 && onlyjump_p (BB_END (src
)))
4437 delete_insn (BB_END (src
));
4440 fprintf (dump_file
, "Redirecting fallthru edge %i->%i to %i\n",
4441 e
->src
->index
, e
->dest
->index
, dest
->index
);
4442 ret
= redirect_edge_succ_nodup (e
, dest
);
4445 ret
= redirect_branch_edge (e
, dest
);
4447 /* We don't want simplejumps in the insn stream during cfglayout. */
4448 gcc_assert (!simplejump_p (BB_END (src
)));
4450 df_set_bb_dirty (src
);
4454 /* Simple wrapper as we always can redirect fallthru edges. */
4456 cfg_layout_redirect_edge_and_branch_force (edge e
, basic_block dest
)
4458 edge redirected
= cfg_layout_redirect_edge_and_branch (e
, dest
);
4460 gcc_assert (redirected
);
4464 /* Same as delete_basic_block but update cfg_layout structures. */
4467 cfg_layout_delete_block (basic_block bb
)
4469 rtx_insn
*insn
, *next
, *prev
= PREV_INSN (BB_HEAD (bb
)), *remaints
;
4474 next
= BB_HEAD (bb
);
4476 SET_NEXT_INSN (prev
) = BB_HEADER (bb
);
4478 set_first_insn (BB_HEADER (bb
));
4479 SET_PREV_INSN (BB_HEADER (bb
)) = prev
;
4480 insn
= BB_HEADER (bb
);
4481 while (NEXT_INSN (insn
))
4482 insn
= NEXT_INSN (insn
);
4483 SET_NEXT_INSN (insn
) = next
;
4484 SET_PREV_INSN (next
) = insn
;
4486 next
= NEXT_INSN (BB_END (bb
));
4489 insn
= BB_FOOTER (bb
);
4492 if (BARRIER_P (insn
))
4494 if (PREV_INSN (insn
))
4495 SET_NEXT_INSN (PREV_INSN (insn
)) = NEXT_INSN (insn
);
4497 BB_FOOTER (bb
) = NEXT_INSN (insn
);
4498 if (NEXT_INSN (insn
))
4499 SET_PREV_INSN (NEXT_INSN (insn
)) = PREV_INSN (insn
);
4503 insn
= NEXT_INSN (insn
);
4508 SET_NEXT_INSN (insn
) = BB_FOOTER (bb
);
4509 SET_PREV_INSN (BB_FOOTER (bb
)) = insn
;
4510 while (NEXT_INSN (insn
))
4511 insn
= NEXT_INSN (insn
);
4512 SET_NEXT_INSN (insn
) = next
;
4514 SET_PREV_INSN (next
) = insn
;
4516 set_last_insn (insn
);
4519 if (bb
->next_bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
4520 to
= &BB_HEADER (bb
->next_bb
);
4522 to
= &cfg_layout_function_footer
;
4524 rtl_delete_block (bb
);
4527 prev
= NEXT_INSN (prev
);
4529 prev
= get_insns ();
4531 next
= PREV_INSN (next
);
4533 next
= get_last_insn ();
4535 if (next
&& NEXT_INSN (next
) != prev
)
4537 remaints
= unlink_insn_chain (prev
, next
);
4539 while (NEXT_INSN (insn
))
4540 insn
= NEXT_INSN (insn
);
4541 SET_NEXT_INSN (insn
) = *to
;
4543 SET_PREV_INSN (*to
) = insn
;
4548 /* Return true when blocks A and B can be safely merged. */
4551 cfg_layout_can_merge_blocks_p (basic_block a
, basic_block b
)
4553 /* If we are partitioning hot/cold basic blocks, we don't want to
4554 mess up unconditional or indirect jumps that cross between hot
4557 Basic block partitioning may result in some jumps that appear to
4558 be optimizable (or blocks that appear to be mergeable), but which really
4559 must be left untouched (they are required to make it safely across
4560 partition boundaries). See the comments at the top of
4561 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4563 if (BB_PARTITION (a
) != BB_PARTITION (b
))
4566 /* Protect the loop latches. */
4567 if (current_loops
&& b
->loop_father
->latch
== b
)
4570 /* If we would end up moving B's instructions, make sure it doesn't fall
4571 through into the exit block, since we cannot recover from a fallthrough
4572 edge into the exit block occurring in the middle of a function. */
4573 if (NEXT_INSN (BB_END (a
)) != BB_HEAD (b
))
4575 edge e
= find_fallthru_edge (b
->succs
);
4576 if (e
&& e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
4580 /* There must be exactly one edge in between the blocks. */
4581 return (single_succ_p (a
)
4582 && single_succ (a
) == b
4583 && single_pred_p (b
) == 1
4585 /* Must be simple edge. */
4586 && !(single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
4587 && a
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
4588 && b
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
4589 /* If the jump insn has side effects, we can't kill the edge.
4590 When not optimizing, try_redirect_by_replacing_jump will
4591 not allow us to redirect an edge by replacing a table jump. */
4592 && (!JUMP_P (BB_END (a
))
4593 || ((!optimize
|| reload_completed
)
4594 ? simplejump_p (BB_END (a
)) : onlyjump_p (BB_END (a
)))));
4597 /* Merge block A and B. The blocks must be mergeable. */
4600 cfg_layout_merge_blocks (basic_block a
, basic_block b
)
4602 bool forwarder_p
= (b
->flags
& BB_FORWARDER_BLOCK
) != 0;
4605 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a
, b
));
4608 fprintf (dump_file
, "Merging block %d into block %d...\n", b
->index
,
4611 /* If there was a CODE_LABEL beginning B, delete it. */
4612 if (LABEL_P (BB_HEAD (b
)))
4614 delete_insn (BB_HEAD (b
));
4617 /* We should have fallthru edge in a, or we can do dummy redirection to get
4619 if (JUMP_P (BB_END (a
)))
4620 try_redirect_by_replacing_jump (EDGE_SUCC (a
, 0), b
, true);
4621 gcc_assert (!JUMP_P (BB_END (a
)));
4623 /* When not optimizing and the edge is the only place in RTL which holds
4624 some unique locus, emit a nop with that locus in between. */
4626 emit_nop_for_unique_locus_between (a
, b
);
4628 /* Move things from b->footer after a->footer. */
4632 BB_FOOTER (a
) = BB_FOOTER (b
);
4635 rtx_insn
*last
= BB_FOOTER (a
);
4637 while (NEXT_INSN (last
))
4638 last
= NEXT_INSN (last
);
4639 SET_NEXT_INSN (last
) = BB_FOOTER (b
);
4640 SET_PREV_INSN (BB_FOOTER (b
)) = last
;
4642 BB_FOOTER (b
) = NULL
;
4645 /* Move things from b->header before a->footer.
4646 Note that this may include dead tablejump data, but we don't clean
4647 those up until we go out of cfglayout mode. */
4650 if (! BB_FOOTER (a
))
4651 BB_FOOTER (a
) = BB_HEADER (b
);
4654 rtx_insn
*last
= BB_HEADER (b
);
4656 while (NEXT_INSN (last
))
4657 last
= NEXT_INSN (last
);
4658 SET_NEXT_INSN (last
) = BB_FOOTER (a
);
4659 SET_PREV_INSN (BB_FOOTER (a
)) = last
;
4660 BB_FOOTER (a
) = BB_HEADER (b
);
4662 BB_HEADER (b
) = NULL
;
4665 /* In the case basic blocks are not adjacent, move them around. */
4666 if (NEXT_INSN (BB_END (a
)) != BB_HEAD (b
))
4668 insn
= unlink_insn_chain (BB_HEAD (b
), BB_END (b
));
4670 emit_insn_after_noloc (insn
, BB_END (a
), a
);
4672 /* Otherwise just re-associate the instructions. */
4676 BB_END (a
) = BB_END (b
);
4679 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4680 We need to explicitly call. */
4681 update_bb_for_insn_chain (insn
, BB_END (b
), a
);
4683 /* Skip possible DELETED_LABEL insn. */
4684 if (!NOTE_INSN_BASIC_BLOCK_P (insn
))
4685 insn
= NEXT_INSN (insn
);
4686 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn
));
4687 BB_HEAD (b
) = BB_END (b
) = NULL
;
4690 df_bb_delete (b
->index
);
4692 /* If B was a forwarder block, propagate the locus on the edge. */
4694 && LOCATION_LOCUS (EDGE_SUCC (b
, 0)->goto_locus
) == UNKNOWN_LOCATION
)
4695 EDGE_SUCC (b
, 0)->goto_locus
= EDGE_SUCC (a
, 0)->goto_locus
;
4698 fprintf (dump_file
, "Merged blocks %d and %d.\n", a
->index
, b
->index
);
4704 cfg_layout_split_edge (edge e
)
4706 basic_block new_bb
=
4707 create_basic_block (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
4708 ? NEXT_INSN (BB_END (e
->src
)) : get_insns (),
4711 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
4712 BB_COPY_PARTITION (new_bb
, e
->src
);
4714 BB_COPY_PARTITION (new_bb
, e
->dest
);
4715 make_edge (new_bb
, e
->dest
, EDGE_FALLTHRU
);
4716 redirect_edge_and_branch_force (e
, new_bb
);
4721 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4724 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED
)
4728 /* Return true if BB contains only labels or non-executable
4732 rtl_block_empty_p (basic_block bb
)
4736 if (bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
)
4737 || bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
4740 FOR_BB_INSNS (bb
, insn
)
4741 if (NONDEBUG_INSN_P (insn
) && !any_uncondjump_p (insn
))
4747 /* Split a basic block if it ends with a conditional branch and if
4748 the other part of the block is not empty. */
4751 rtl_split_block_before_cond_jump (basic_block bb
)
4754 rtx_insn
*split_point
= NULL
;
4755 rtx_insn
*last
= NULL
;
4756 bool found_code
= false;
4758 FOR_BB_INSNS (bb
, insn
)
4760 if (any_condjump_p (insn
))
4762 else if (NONDEBUG_INSN_P (insn
))
4767 /* Did not find everything. */
4768 if (found_code
&& split_point
)
4769 return split_block (bb
, split_point
)->dest
;
4774 /* Return 1 if BB ends with a call, possibly followed by some
4775 instructions that must stay with the call, 0 otherwise. */
4778 rtl_block_ends_with_call_p (basic_block bb
)
4780 rtx_insn
*insn
= BB_END (bb
);
4782 while (!CALL_P (insn
)
4783 && insn
!= BB_HEAD (bb
)
4784 && (keep_with_call_p (insn
)
4786 || DEBUG_INSN_P (insn
)))
4787 insn
= PREV_INSN (insn
);
4788 return (CALL_P (insn
));
4791 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4794 rtl_block_ends_with_condjump_p (const_basic_block bb
)
4796 return any_condjump_p (BB_END (bb
));
4799 /* Return true if we need to add fake edge to exit.
4800 Helper function for rtl_flow_call_edges_add. */
4803 need_fake_edge_p (const rtx_insn
*insn
)
4809 && !SIBLING_CALL_P (insn
)
4810 && !find_reg_note (insn
, REG_NORETURN
, NULL
)
4811 && !(RTL_CONST_OR_PURE_CALL_P (insn
))))
4814 return ((GET_CODE (PATTERN (insn
)) == ASM_OPERANDS
4815 && MEM_VOLATILE_P (PATTERN (insn
)))
4816 || (GET_CODE (PATTERN (insn
)) == PARALLEL
4817 && asm_noperands (insn
) != -1
4818 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn
), 0, 0)))
4819 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
);
4822 /* Add fake edges to the function exit for any non constant and non noreturn
4823 calls, volatile inline assembly in the bitmap of blocks specified by
4824 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4827 The goal is to expose cases in which entering a basic block does not imply
4828 that all subsequent instructions must be executed. */
4831 rtl_flow_call_edges_add (sbitmap blocks
)
4834 int blocks_split
= 0;
4835 int last_bb
= last_basic_block_for_fn (cfun
);
4836 bool check_last_block
= false;
4838 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
4842 check_last_block
= true;
4844 check_last_block
= bitmap_bit_p (blocks
,
4845 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
->index
);
4847 /* In the last basic block, before epilogue generation, there will be
4848 a fallthru edge to EXIT. Special care is required if the last insn
4849 of the last basic block is a call because make_edge folds duplicate
4850 edges, which would result in the fallthru edge also being marked
4851 fake, which would result in the fallthru edge being removed by
4852 remove_fake_edges, which would result in an invalid CFG.
4854 Moreover, we can't elide the outgoing fake edge, since the block
4855 profiler needs to take this into account in order to solve the minimal
4856 spanning tree in the case that the call doesn't return.
4858 Handle this by adding a dummy instruction in a new last basic block. */
4859 if (check_last_block
)
4861 basic_block bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
4862 rtx_insn
*insn
= BB_END (bb
);
4864 /* Back up past insns that must be kept in the same block as a call. */
4865 while (insn
!= BB_HEAD (bb
)
4866 && keep_with_call_p (insn
))
4867 insn
= PREV_INSN (insn
);
4869 if (need_fake_edge_p (insn
))
4873 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
4876 insert_insn_on_edge (gen_use (const0_rtx
), e
);
4877 commit_edge_insertions ();
4882 /* Now add fake edges to the function exit for any non constant
4883 calls since there is no way that we can determine if they will
4886 for (i
= NUM_FIXED_BLOCKS
; i
< last_bb
; i
++)
4888 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
4890 rtx_insn
*prev_insn
;
4895 if (blocks
&& !bitmap_bit_p (blocks
, i
))
4898 for (insn
= BB_END (bb
); ; insn
= prev_insn
)
4900 prev_insn
= PREV_INSN (insn
);
4901 if (need_fake_edge_p (insn
))
4904 rtx_insn
*split_at_insn
= insn
;
4906 /* Don't split the block between a call and an insn that should
4907 remain in the same block as the call. */
4909 while (split_at_insn
!= BB_END (bb
)
4910 && keep_with_call_p (NEXT_INSN (split_at_insn
)))
4911 split_at_insn
= NEXT_INSN (split_at_insn
);
4913 /* The handling above of the final block before the epilogue
4914 should be enough to verify that there is no edge to the exit
4915 block in CFG already. Calling make_edge in such case would
4916 cause us to mark that edge as fake and remove it later. */
4918 #ifdef ENABLE_CHECKING
4919 if (split_at_insn
== BB_END (bb
))
4921 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
4922 gcc_assert (e
== NULL
);
4926 /* Note that the following may create a new basic block
4927 and renumber the existing basic blocks. */
4928 if (split_at_insn
!= BB_END (bb
))
4930 e
= split_block (bb
, split_at_insn
);
4935 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FAKE
);
4938 if (insn
== BB_HEAD (bb
))
4944 verify_flow_info ();
4946 return blocks_split
;
4949 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
4950 the conditional branch target, SECOND_HEAD should be the fall-thru
4951 there is no need to handle this here the loop versioning code handles
4952 this. the reason for SECON_HEAD is that it is needed for condition
4953 in trees, and this should be of the same type since it is a hook. */
4955 rtl_lv_add_condition_to_bb (basic_block first_head
,
4956 basic_block second_head ATTRIBUTE_UNUSED
,
4957 basic_block cond_bb
, void *comp_rtx
)
4960 rtx_insn
*seq
, *jump
;
4961 rtx op0
= XEXP ((rtx
)comp_rtx
, 0);
4962 rtx op1
= XEXP ((rtx
)comp_rtx
, 1);
4963 enum rtx_code comp
= GET_CODE ((rtx
)comp_rtx
);
4967 label
= block_label (first_head
);
4968 mode
= GET_MODE (op0
);
4969 if (mode
== VOIDmode
)
4970 mode
= GET_MODE (op1
);
4973 op0
= force_operand (op0
, NULL_RTX
);
4974 op1
= force_operand (op1
, NULL_RTX
);
4975 do_compare_rtx_and_jump (op0
, op1
, comp
, 0,
4976 mode
, NULL_RTX
, NULL_RTX
, label
, -1);
4977 jump
= get_last_insn ();
4978 JUMP_LABEL (jump
) = label
;
4979 LABEL_NUSES (label
)++;
4983 /* Add the new cond, in the new head. */
4984 emit_insn_after (seq
, BB_END (cond_bb
));
4988 /* Given a block B with unconditional branch at its end, get the
4989 store the return the branch edge and the fall-thru edge in
4990 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
4992 rtl_extract_cond_bb_edges (basic_block b
, edge
*branch_edge
,
4993 edge
*fallthru_edge
)
4995 edge e
= EDGE_SUCC (b
, 0);
4997 if (e
->flags
& EDGE_FALLTHRU
)
5000 *branch_edge
= EDGE_SUCC (b
, 1);
5005 *fallthru_edge
= EDGE_SUCC (b
, 1);
5010 init_rtl_bb_info (basic_block bb
)
5012 gcc_assert (!bb
->il
.x
.rtl
);
5013 bb
->il
.x
.head_
= NULL
;
5014 bb
->il
.x
.rtl
= ggc_cleared_alloc
<rtl_bb_info
> ();
5017 /* Returns true if it is possible to remove edge E by redirecting
5018 it to the destination of the other edge from E->src. */
5021 rtl_can_remove_branch_p (const_edge e
)
5023 const_basic_block src
= e
->src
;
5024 const_basic_block target
= EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
;
5025 const rtx_insn
*insn
= BB_END (src
);
5028 /* The conditions are taken from try_redirect_by_replacing_jump. */
5029 if (target
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
5032 if (e
->flags
& (EDGE_ABNORMAL_CALL
| EDGE_EH
))
5035 if (BB_PARTITION (src
) != BB_PARTITION (target
))
5038 if (!onlyjump_p (insn
)
5039 || tablejump_p (insn
, NULL
, NULL
))
5042 set
= single_set (insn
);
5043 if (!set
|| side_effects_p (set
))
5050 rtl_duplicate_bb (basic_block bb
)
5052 bb
= cfg_layout_duplicate_bb (bb
);
5057 /* Do book-keeping of basic block BB for the profile consistency checker.
5058 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
5059 then do post-pass accounting. Store the counting in RECORD. */
5061 rtl_account_profile_record (basic_block bb
, int after_pass
,
5062 struct profile_record
*record
)
5065 FOR_BB_INSNS (bb
, insn
)
5068 record
->size
[after_pass
]
5069 += insn_rtx_cost (PATTERN (insn
), false);
5070 if (profile_status_for_fn (cfun
) == PROFILE_READ
)
5071 record
->time
[after_pass
]
5072 += insn_rtx_cost (PATTERN (insn
), true) * bb
->count
;
5073 else if (profile_status_for_fn (cfun
) == PROFILE_GUESSED
)
5074 record
->time
[after_pass
]
5075 += insn_rtx_cost (PATTERN (insn
), true) * bb
->frequency
;
5079 /* Implementation of CFG manipulation for linearized RTL. */
5080 struct cfg_hooks rtl_cfg_hooks
= {
5082 rtl_verify_flow_info
,
5084 rtl_dump_bb_for_graph
,
5085 rtl_create_basic_block
,
5086 rtl_redirect_edge_and_branch
,
5087 rtl_redirect_edge_and_branch_force
,
5088 rtl_can_remove_branch_p
,
5091 rtl_move_block_after
,
5092 rtl_can_merge_blocks
, /* can_merge_blocks_p */
5096 cfg_layout_can_duplicate_bb_p
,
5099 rtl_make_forwarder_block
,
5100 rtl_tidy_fallthru_edge
,
5101 rtl_force_nonfallthru
,
5102 rtl_block_ends_with_call_p
,
5103 rtl_block_ends_with_condjump_p
,
5104 rtl_flow_call_edges_add
,
5105 NULL
, /* execute_on_growing_pred */
5106 NULL
, /* execute_on_shrinking_pred */
5107 NULL
, /* duplicate loop for trees */
5108 NULL
, /* lv_add_condition_to_bb */
5109 NULL
, /* lv_adjust_loop_header_phi*/
5110 NULL
, /* extract_cond_bb_edges */
5111 NULL
, /* flush_pending_stmts */
5112 rtl_block_empty_p
, /* block_empty_p */
5113 rtl_split_block_before_cond_jump
, /* split_block_before_cond_jump */
5114 rtl_account_profile_record
,
5117 /* Implementation of CFG manipulation for cfg layout RTL, where
5118 basic block connected via fallthru edges does not have to be adjacent.
5119 This representation will hopefully become the default one in future
5120 version of the compiler. */
5122 struct cfg_hooks cfg_layout_rtl_cfg_hooks
= {
5124 rtl_verify_flow_info_1
,
5126 rtl_dump_bb_for_graph
,
5127 cfg_layout_create_basic_block
,
5128 cfg_layout_redirect_edge_and_branch
,
5129 cfg_layout_redirect_edge_and_branch_force
,
5130 rtl_can_remove_branch_p
,
5131 cfg_layout_delete_block
,
5132 cfg_layout_split_block
,
5133 rtl_move_block_after
,
5134 cfg_layout_can_merge_blocks_p
,
5135 cfg_layout_merge_blocks
,
5138 cfg_layout_can_duplicate_bb_p
,
5139 cfg_layout_duplicate_bb
,
5140 cfg_layout_split_edge
,
5141 rtl_make_forwarder_block
,
5142 NULL
, /* tidy_fallthru_edge */
5143 rtl_force_nonfallthru
,
5144 rtl_block_ends_with_call_p
,
5145 rtl_block_ends_with_condjump_p
,
5146 rtl_flow_call_edges_add
,
5147 NULL
, /* execute_on_growing_pred */
5148 NULL
, /* execute_on_shrinking_pred */
5149 duplicate_loop_to_header_edge
, /* duplicate loop for trees */
5150 rtl_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
5151 NULL
, /* lv_adjust_loop_header_phi*/
5152 rtl_extract_cond_bb_edges
, /* extract_cond_bb_edges */
5153 NULL
, /* flush_pending_stmts */
5154 rtl_block_empty_p
, /* block_empty_p */
5155 rtl_split_block_before_cond_jump
, /* split_block_before_cond_jump */
5156 rtl_account_profile_record
,
5159 #include "gt-cfgrtl.h"