1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
23 Available functionality:
24 - Basic CFG/RTL manipulation API documented in cfghooks.h
25 - CFG-aware instruction chain manipulation
26 delete_insn, delete_insn_chain
27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
31 - CFG fixing after coarse manipulation
34 Functions not supposed for generic use:
35 - Infrastructure to determine quickly basic block for insn
36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37 - Edge redirection with updating and optimizing of insn chain
38 block_label, tidy_fallthru_edge, force_nonfallthru */
42 #include "coretypes.h"
49 #include "insn-config.h"
55 #include "cfgcleanup.h"
56 #include "bb-reorder.h"
57 #include "rtl-error.h"
58 #include "insn-attr.h"
62 #include "tree-pass.h"
63 #include "print-rtl.h"
67 /* Disable warnings about missing quoting in GCC diagnostics. */
69 # pragma GCC diagnostic push
70 # pragma GCC diagnostic ignored "-Wformat-diag"
73 /* Holds the interesting leading and trailing notes for the function.
74 Only applicable if the CFG is in cfglayout mode. */
75 static GTY(()) rtx_insn
*cfg_layout_function_footer
;
76 static GTY(()) rtx_insn
*cfg_layout_function_header
;
78 static rtx_insn
*skip_insns_after_block (basic_block
);
79 static void record_effective_endpoints (void);
80 static void fixup_reorder_chain (void);
82 void verify_insn_chain (void);
83 static void fixup_fallthru_exit_predecessor (void);
84 static int can_delete_note_p (const rtx_note
*);
85 static int can_delete_label_p (const rtx_code_label
*);
86 static basic_block
rtl_split_edge (edge
);
87 static bool rtl_move_block_after (basic_block
, basic_block
);
88 static int rtl_verify_flow_info (void);
89 static basic_block
cfg_layout_split_block (basic_block
, void *);
90 static edge
cfg_layout_redirect_edge_and_branch (edge
, basic_block
);
91 static basic_block
cfg_layout_redirect_edge_and_branch_force (edge
, basic_block
);
92 static void cfg_layout_delete_block (basic_block
);
93 static void rtl_delete_block (basic_block
);
94 static basic_block
rtl_redirect_edge_and_branch_force (edge
, basic_block
);
95 static edge
rtl_redirect_edge_and_branch (edge
, basic_block
);
96 static basic_block
rtl_split_block (basic_block
, void *);
97 static void rtl_dump_bb (FILE *, basic_block
, int, dump_flags_t
);
98 static int rtl_verify_flow_info_1 (void);
99 static void rtl_make_forwarder_block (edge
);
101 /* Return true if NOTE is not one of the ones that must be kept paired,
102 so that we may simply delete it. */
105 can_delete_note_p (const rtx_note
*note
)
107 switch (NOTE_KIND (note
))
109 case NOTE_INSN_DELETED
:
110 case NOTE_INSN_BASIC_BLOCK
:
111 case NOTE_INSN_EPILOGUE_BEG
:
119 /* True if a given label can be deleted. */
122 can_delete_label_p (const rtx_code_label
*label
)
124 return (!LABEL_PRESERVE_P (label
)
125 /* User declared labels must be preserved. */
126 && LABEL_NAME (label
) == 0
127 && !vec_safe_contains
<rtx_insn
*> (forced_labels
,
128 const_cast<rtx_code_label
*> (label
)));
131 /* Delete INSN by patching it out. */
134 delete_insn (rtx_insn
*insn
)
137 bool really_delete
= true;
141 /* Some labels can't be directly removed from the INSN chain, as they
142 might be references via variables, constant pool etc.
143 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
144 if (! can_delete_label_p (as_a
<rtx_code_label
*> (insn
)))
146 const char *name
= LABEL_NAME (insn
);
147 basic_block bb
= BLOCK_FOR_INSN (insn
);
148 rtx_insn
*bb_note
= NEXT_INSN (insn
);
150 really_delete
= false;
151 PUT_CODE (insn
, NOTE
);
152 NOTE_KIND (insn
) = NOTE_INSN_DELETED_LABEL
;
153 NOTE_DELETED_LABEL_NAME (insn
) = name
;
155 /* If the note following the label starts a basic block, and the
156 label is a member of the same basic block, interchange the two. */
157 if (bb_note
!= NULL_RTX
158 && NOTE_INSN_BASIC_BLOCK_P (bb_note
)
160 && bb
== BLOCK_FOR_INSN (bb_note
))
162 reorder_insns_nobb (insn
, insn
, bb_note
);
163 BB_HEAD (bb
) = bb_note
;
164 if (BB_END (bb
) == bb_note
)
169 remove_node_from_insn_list (insn
, &nonlocal_goto_handler_labels
);
174 /* If this insn has already been deleted, something is very wrong. */
175 gcc_assert (!insn
->deleted ());
177 df_insn_delete (insn
);
179 insn
->set_deleted ();
182 /* If deleting a jump, decrement the use count of the label. Deleting
183 the label itself should happen in the normal course of block merging. */
186 if (JUMP_LABEL (insn
)
187 && LABEL_P (JUMP_LABEL (insn
)))
188 LABEL_NUSES (JUMP_LABEL (insn
))--;
190 /* If there are more targets, remove them too. */
192 = find_reg_note (insn
, REG_LABEL_TARGET
, NULL_RTX
)) != NULL_RTX
193 && LABEL_P (XEXP (note
, 0)))
195 LABEL_NUSES (XEXP (note
, 0))--;
196 remove_note (insn
, note
);
200 /* Also if deleting any insn that references a label as an operand. */
201 while ((note
= find_reg_note (insn
, REG_LABEL_OPERAND
, NULL_RTX
)) != NULL_RTX
202 && LABEL_P (XEXP (note
, 0)))
204 LABEL_NUSES (XEXP (note
, 0))--;
205 remove_note (insn
, note
);
208 if (rtx_jump_table_data
*table
= dyn_cast
<rtx_jump_table_data
*> (insn
))
210 rtvec vec
= table
->get_labels ();
211 int len
= GET_NUM_ELEM (vec
);
214 for (i
= 0; i
< len
; i
++)
216 rtx label
= XEXP (RTVEC_ELT (vec
, i
), 0);
218 /* When deleting code in bulk (e.g. removing many unreachable
219 blocks) we can delete a label that's a target of the vector
220 before deleting the vector itself. */
222 LABEL_NUSES (label
)--;
227 /* Like delete_insn but also purge dead edges from BB.
228 Return true if any edges are eliminated. */
231 delete_insn_and_edges (rtx_insn
*insn
)
235 if (INSN_P (insn
) && BLOCK_FOR_INSN (insn
))
237 basic_block bb
= BLOCK_FOR_INSN (insn
);
238 if (BB_END (bb
) == insn
)
240 else if (DEBUG_INSN_P (BB_END (bb
)))
241 for (rtx_insn
*dinsn
= NEXT_INSN (insn
);
242 DEBUG_INSN_P (dinsn
); dinsn
= NEXT_INSN (dinsn
))
243 if (BB_END (bb
) == dinsn
)
251 return purge_dead_edges (BLOCK_FOR_INSN (insn
));
255 /* Unlink a chain of insns between START and FINISH, leaving notes
256 that must be paired. If CLEAR_BB is true, we set bb field for
257 insns that cannot be removed to NULL. */
260 delete_insn_chain (rtx start
, rtx_insn
*finish
, bool clear_bb
)
262 /* Unchain the insns one by one. It would be quicker to delete all of these
263 with a single unchaining, rather than one at a time, but we need to keep
265 rtx_insn
*current
= finish
;
268 rtx_insn
*prev
= PREV_INSN (current
);
269 if (NOTE_P (current
) && !can_delete_note_p (as_a
<rtx_note
*> (current
)))
272 delete_insn (current
);
274 if (clear_bb
&& !current
->deleted ())
275 set_block_for_insn (current
, NULL
);
277 if (current
== start
)
283 /* Create a new basic block consisting of the instructions between HEAD and END
284 inclusive. This function is designed to allow fast BB construction - reuses
285 the note and basic block struct in BB_NOTE, if any and do not grow
286 BASIC_BLOCK chain and should be used directly only by CFG construction code.
287 END can be NULL in to create new empty basic block before HEAD. Both END
288 and HEAD can be NULL to create basic block at the end of INSN chain.
289 AFTER is the basic block we should be put after. */
292 create_basic_block_structure (rtx_insn
*head
, rtx_insn
*end
, rtx_note
*bb_note
,
298 && (bb
= NOTE_BASIC_BLOCK (bb_note
)) != NULL
301 /* If we found an existing note, thread it back onto the chain. */
309 after
= PREV_INSN (head
);
313 if (after
!= bb_note
&& NEXT_INSN (after
) != bb_note
)
314 reorder_insns_nobb (bb_note
, bb_note
, after
);
318 /* Otherwise we must create a note and a basic block structure. */
322 init_rtl_bb_info (bb
);
325 = emit_note_after (NOTE_INSN_BASIC_BLOCK
, get_last_insn ());
326 else if (LABEL_P (head
) && end
)
328 bb_note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, head
);
334 bb_note
= emit_note_before (NOTE_INSN_BASIC_BLOCK
, head
);
340 NOTE_BASIC_BLOCK (bb_note
) = bb
;
343 /* Always include the bb note in the block. */
344 if (NEXT_INSN (end
) == bb_note
)
349 bb
->index
= last_basic_block_for_fn (cfun
)++;
350 bb
->flags
= BB_NEW
| BB_RTL
;
351 link_block (bb
, after
);
352 SET_BASIC_BLOCK_FOR_FN (cfun
, bb
->index
, bb
);
353 df_bb_refs_record (bb
->index
, false);
354 update_bb_for_insn (bb
);
355 BB_SET_PARTITION (bb
, BB_UNPARTITIONED
);
357 /* Tag the block so that we know it has been used when considering
358 other basic block notes. */
364 /* Create new basic block consisting of instructions in between HEAD and END
365 and place it to the BB chain after block AFTER. END can be NULL to
366 create a new empty basic block before HEAD. Both END and HEAD can be
367 NULL to create basic block at the end of INSN chain. */
370 rtl_create_basic_block (void *headp
, void *endp
, basic_block after
)
372 rtx_insn
*head
= (rtx_insn
*) headp
;
373 rtx_insn
*end
= (rtx_insn
*) endp
;
376 /* Grow the basic block array if needed. */
377 if ((size_t) last_basic_block_for_fn (cfun
)
378 >= basic_block_info_for_fn (cfun
)->length ())
379 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
),
380 last_basic_block_for_fn (cfun
) + 1);
382 n_basic_blocks_for_fn (cfun
)++;
384 bb
= create_basic_block_structure (head
, end
, NULL
, after
);
390 cfg_layout_create_basic_block (void *head
, void *end
, basic_block after
)
392 basic_block newbb
= rtl_create_basic_block (head
, end
, after
);
397 /* Delete the insns in a (non-live) block. We physically delete every
398 non-deleted-note insn, and update the flow graph appropriately.
400 Return nonzero if we deleted an exception handler. */
402 /* ??? Preserving all such notes strikes me as wrong. It would be nice
403 to post-process the stream to remove empty blocks, loops, ranges, etc. */
406 rtl_delete_block (basic_block b
)
408 rtx_insn
*insn
, *end
;
410 /* If the head of this block is a CODE_LABEL, then it might be the
411 label for an exception handler which can't be reached. We need
412 to remove the label from the exception_handler_label list. */
415 end
= get_last_bb_insn (b
);
417 /* Selectively delete the entire chain. */
419 delete_insn_chain (insn
, end
, true);
423 fprintf (dump_file
, "deleting block %d\n", b
->index
);
424 df_bb_delete (b
->index
);
427 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
430 compute_bb_for_insn (void)
434 FOR_EACH_BB_FN (bb
, cfun
)
436 rtx_insn
*end
= BB_END (bb
);
439 for (insn
= BB_HEAD (bb
); ; insn
= NEXT_INSN (insn
))
441 BLOCK_FOR_INSN (insn
) = bb
;
448 /* Release the basic_block_for_insn array. */
451 free_bb_for_insn (void)
454 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
455 if (!BARRIER_P (insn
))
456 BLOCK_FOR_INSN (insn
) = NULL
;
462 const pass_data pass_data_free_cfg
=
465 "*free_cfg", /* name */
466 OPTGROUP_NONE
, /* optinfo_flags */
468 0, /* properties_required */
469 0, /* properties_provided */
470 PROP_cfg
, /* properties_destroyed */
471 0, /* todo_flags_start */
472 0, /* todo_flags_finish */
475 class pass_free_cfg
: public rtl_opt_pass
478 pass_free_cfg (gcc::context
*ctxt
)
479 : rtl_opt_pass (pass_data_free_cfg
, ctxt
)
482 /* opt_pass methods: */
483 virtual unsigned int execute (function
*);
485 }; // class pass_free_cfg
488 pass_free_cfg::execute (function
*)
490 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
491 valid at that point so it would be too late to call df_analyze. */
492 if (DELAY_SLOTS
&& optimize
> 0 && flag_delayed_branch
)
494 df_note_add_problem ();
498 if (crtl
->has_bb_partition
)
499 insert_section_boundary_note ();
508 make_pass_free_cfg (gcc::context
*ctxt
)
510 return new pass_free_cfg (ctxt
);
513 /* Return RTX to emit after when we want to emit code on the entry of function. */
515 entry_of_function (void)
517 return (n_basic_blocks_for_fn (cfun
) > NUM_FIXED_BLOCKS
?
518 BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
) : get_insns ());
521 /* Emit INSN at the entry point of the function, ensuring that it is only
522 executed once per function. */
524 emit_insn_at_entry (rtx insn
)
526 edge_iterator ei
= ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
);
527 edge e
= ei_safe_edge (ei
);
528 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
530 insert_insn_on_edge (insn
, e
);
531 commit_edge_insertions ();
534 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
535 (or BARRIER if found) and notify df of the bb change.
536 The insn chain range is inclusive
537 (i.e. both BEGIN and END will be updated. */
540 update_bb_for_insn_chain (rtx_insn
*begin
, rtx_insn
*end
, basic_block bb
)
544 end
= NEXT_INSN (end
);
545 for (insn
= begin
; insn
!= end
; insn
= NEXT_INSN (insn
))
546 if (!BARRIER_P (insn
))
547 df_insn_change_bb (insn
, bb
);
550 /* Update BLOCK_FOR_INSN of insns in BB to BB,
551 and notify df of the change. */
554 update_bb_for_insn (basic_block bb
)
556 update_bb_for_insn_chain (BB_HEAD (bb
), BB_END (bb
), bb
);
560 /* Like active_insn_p, except keep the return value use or clobber around
561 even after reload. */
564 flow_active_insn_p (const rtx_insn
*insn
)
566 if (active_insn_p (insn
))
569 /* A clobber of the function return value exists for buggy
570 programs that fail to return a value. Its effect is to
571 keep the return value from being live across the entire
572 function. If we allow it to be skipped, we introduce the
573 possibility for register lifetime confusion.
574 Similarly, keep a USE of the function return value, otherwise
575 the USE is dropped and we could fail to thread jump if USE
576 appears on some paths and not on others, see PR90257. */
577 if ((GET_CODE (PATTERN (insn
)) == CLOBBER
578 || GET_CODE (PATTERN (insn
)) == USE
)
579 && REG_P (XEXP (PATTERN (insn
), 0))
580 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn
), 0)))
586 /* Return true if the block has no effect and only forwards control flow to
587 its single destination. */
590 contains_no_active_insn_p (const_basic_block bb
)
594 if (bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
595 || bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
)
596 || !single_succ_p (bb
)
597 || (single_succ_edge (bb
)->flags
& EDGE_FAKE
) != 0)
600 for (insn
= BB_HEAD (bb
); insn
!= BB_END (bb
); insn
= NEXT_INSN (insn
))
601 if (INSN_P (insn
) && flow_active_insn_p (insn
))
604 return (!INSN_P (insn
)
605 || (JUMP_P (insn
) && simplejump_p (insn
))
606 || !flow_active_insn_p (insn
));
609 /* Likewise, but protect loop latches, headers and preheaders. */
610 /* FIXME: Make this a cfg hook. */
613 forwarder_block_p (const_basic_block bb
)
615 if (!contains_no_active_insn_p (bb
))
618 /* Protect loop latches, headers and preheaders. */
622 if (bb
->loop_father
->header
== bb
)
624 dest
= EDGE_SUCC (bb
, 0)->dest
;
625 if (dest
->loop_father
->header
== dest
)
632 /* Return nonzero if we can reach target from src by falling through. */
633 /* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode. */
636 can_fallthru (basic_block src
, basic_block target
)
638 rtx_insn
*insn
= BB_END (src
);
643 if (target
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
645 if (src
->next_bb
!= target
)
648 /* ??? Later we may add code to move jump tables offline. */
649 if (tablejump_p (insn
, NULL
, NULL
))
652 FOR_EACH_EDGE (e
, ei
, src
->succs
)
653 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
654 && e
->flags
& EDGE_FALLTHRU
)
657 insn2
= BB_HEAD (target
);
658 if (!active_insn_p (insn2
))
659 insn2
= next_active_insn (insn2
);
661 return next_active_insn (insn
) == insn2
;
664 /* Return nonzero if we could reach target from src by falling through,
665 if the target was made adjacent. If we already have a fall-through
666 edge to the exit block, we can't do that. */
668 could_fall_through (basic_block src
, basic_block target
)
673 if (target
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
675 FOR_EACH_EDGE (e
, ei
, src
->succs
)
676 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
677 && e
->flags
& EDGE_FALLTHRU
)
682 /* Return the NOTE_INSN_BASIC_BLOCK of BB. */
684 bb_note (basic_block bb
)
690 note
= NEXT_INSN (note
);
692 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note
));
693 return as_a
<rtx_note
*> (note
);
696 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
697 note associated with the BLOCK. */
700 first_insn_after_basic_block_note (basic_block block
)
704 /* Get the first instruction in the block. */
705 insn
= BB_HEAD (block
);
707 if (insn
== NULL_RTX
)
710 insn
= NEXT_INSN (insn
);
711 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn
));
713 return NEXT_INSN (insn
);
716 /* Creates a new basic block just after basic block BB by splitting
717 everything after specified instruction INSNP. */
720 rtl_split_block (basic_block bb
, void *insnp
)
723 rtx_insn
*insn
= (rtx_insn
*) insnp
;
729 insn
= first_insn_after_basic_block_note (bb
);
733 rtx_insn
*next
= insn
;
735 insn
= PREV_INSN (insn
);
737 /* If the block contains only debug insns, insn would have
738 been NULL in a non-debug compilation, and then we'd end
739 up emitting a DELETED note. For -fcompare-debug
740 stability, emit the note too. */
741 if (insn
!= BB_END (bb
)
742 && DEBUG_INSN_P (next
)
743 && DEBUG_INSN_P (BB_END (bb
)))
745 while (next
!= BB_END (bb
) && DEBUG_INSN_P (next
))
746 next
= NEXT_INSN (next
);
748 if (next
== BB_END (bb
))
749 emit_note_after (NOTE_INSN_DELETED
, next
);
753 insn
= get_last_insn ();
756 /* We probably should check type of the insn so that we do not create
757 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
759 if (insn
== BB_END (bb
))
760 emit_note_after (NOTE_INSN_DELETED
, insn
);
762 /* Create the new basic block. */
763 new_bb
= create_basic_block (NEXT_INSN (insn
), BB_END (bb
), bb
);
764 BB_COPY_PARTITION (new_bb
, bb
);
767 /* Redirect the outgoing edges. */
768 new_bb
->succs
= bb
->succs
;
770 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
773 /* The new block starts off being dirty. */
774 df_set_bb_dirty (bb
);
778 /* Return true if the single edge between blocks A and B is the only place
779 in RTL which holds some unique locus. */
782 unique_locus_on_edge_between_p (basic_block a
, basic_block b
)
784 const location_t goto_locus
= EDGE_SUCC (a
, 0)->goto_locus
;
785 rtx_insn
*insn
, *end
;
787 if (LOCATION_LOCUS (goto_locus
) == UNKNOWN_LOCATION
)
790 /* First scan block A backward. */
792 end
= PREV_INSN (BB_HEAD (a
));
793 while (insn
!= end
&& (!NONDEBUG_INSN_P (insn
) || !INSN_HAS_LOCATION (insn
)))
794 insn
= PREV_INSN (insn
);
796 if (insn
!= end
&& INSN_LOCATION (insn
) == goto_locus
)
799 /* Then scan block B forward. */
803 end
= NEXT_INSN (BB_END (b
));
804 while (insn
!= end
&& !NONDEBUG_INSN_P (insn
))
805 insn
= NEXT_INSN (insn
);
807 if (insn
!= end
&& INSN_HAS_LOCATION (insn
)
808 && INSN_LOCATION (insn
) == goto_locus
)
815 /* If the single edge between blocks A and B is the only place in RTL which
816 holds some unique locus, emit a nop with that locus between the blocks. */
819 emit_nop_for_unique_locus_between (basic_block a
, basic_block b
)
821 if (!unique_locus_on_edge_between_p (a
, b
))
824 BB_END (a
) = emit_insn_after_noloc (gen_nop (), BB_END (a
), a
);
825 INSN_LOCATION (BB_END (a
)) = EDGE_SUCC (a
, 0)->goto_locus
;
828 /* Blocks A and B are to be merged into a single block A. The insns
829 are already contiguous. */
832 rtl_merge_blocks (basic_block a
, basic_block b
)
834 /* If B is a forwarder block whose outgoing edge has no location, we'll
835 propagate the locus of the edge between A and B onto it. */
836 const bool forward_edge_locus
837 = (b
->flags
& BB_FORWARDER_BLOCK
) != 0
838 && LOCATION_LOCUS (EDGE_SUCC (b
, 0)->goto_locus
) == UNKNOWN_LOCATION
;
839 rtx_insn
*b_head
= BB_HEAD (b
), *b_end
= BB_END (b
), *a_end
= BB_END (a
);
840 rtx_insn
*del_first
= NULL
, *del_last
= NULL
;
841 rtx_insn
*b_debug_start
= b_end
, *b_debug_end
= b_end
;
845 fprintf (dump_file
, "Merging block %d into block %d...\n", b
->index
,
848 while (DEBUG_INSN_P (b_end
))
849 b_end
= PREV_INSN (b_debug_start
= b_end
);
851 /* If there was a CODE_LABEL beginning B, delete it. */
852 if (LABEL_P (b_head
))
854 /* Detect basic blocks with nothing but a label. This can happen
855 in particular at the end of a function. */
859 del_first
= del_last
= b_head
;
860 b_head
= NEXT_INSN (b_head
);
863 /* Delete the basic block note and handle blocks containing just that
865 if (NOTE_INSN_BASIC_BLOCK_P (b_head
))
873 b_head
= NEXT_INSN (b_head
);
876 /* If there was a jump out of A, delete it. */
881 for (prev
= PREV_INSN (a_end
); ; prev
= PREV_INSN (prev
))
883 || NOTE_INSN_BASIC_BLOCK_P (prev
)
884 || prev
== BB_HEAD (a
))
889 /* If this was a conditional jump, we need to also delete
890 the insn that set cc0. */
891 if (HAVE_cc0
&& only_sets_cc0_p (prev
))
893 rtx_insn
*tmp
= prev
;
895 prev
= prev_nonnote_insn (prev
);
901 a_end
= PREV_INSN (del_first
);
903 else if (BARRIER_P (NEXT_INSN (a_end
)))
904 del_first
= NEXT_INSN (a_end
);
906 /* Delete everything marked above as well as crap that might be
907 hanging out between the two blocks. */
909 BB_HEAD (b
) = b_empty
? NULL
: b_head
;
910 delete_insn_chain (del_first
, del_last
, true);
912 /* If not optimizing, preserve the locus of the single edge between
913 blocks A and B if necessary by emitting a nop. */
915 && !forward_edge_locus
916 && !DECL_IGNORED_P (current_function_decl
))
918 emit_nop_for_unique_locus_between (a
, b
);
922 /* Reassociate the insns of B with A. */
925 update_bb_for_insn_chain (a_end
, b_debug_end
, a
);
927 BB_END (a
) = b_debug_end
;
930 else if (b_end
!= b_debug_end
)
932 /* Move any deleted labels and other notes between the end of A
933 and the debug insns that make up B after the debug insns,
934 bringing the debug insns into A while keeping the notes after
936 if (NEXT_INSN (a_end
) != b_debug_start
)
937 reorder_insns_nobb (NEXT_INSN (a_end
), PREV_INSN (b_debug_start
),
939 update_bb_for_insn_chain (b_debug_start
, b_debug_end
, a
);
940 BB_END (a
) = b_debug_end
;
943 df_bb_delete (b
->index
);
945 if (forward_edge_locus
)
946 EDGE_SUCC (b
, 0)->goto_locus
= EDGE_SUCC (a
, 0)->goto_locus
;
949 fprintf (dump_file
, "Merged blocks %d and %d.\n", a
->index
, b
->index
);
953 /* Return true when block A and B can be merged. */
956 rtl_can_merge_blocks (basic_block a
, basic_block b
)
958 /* If we are partitioning hot/cold basic blocks, we don't want to
959 mess up unconditional or indirect jumps that cross between hot
962 Basic block partitioning may result in some jumps that appear to
963 be optimizable (or blocks that appear to be mergeable), but which really
964 must be left untouched (they are required to make it safely across
965 partition boundaries). See the comments at the top of
966 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
968 if (BB_PARTITION (a
) != BB_PARTITION (b
))
971 /* Protect the loop latches. */
972 if (current_loops
&& b
->loop_father
->latch
== b
)
975 /* There must be exactly one edge in between the blocks. */
976 return (single_succ_p (a
)
977 && single_succ (a
) == b
980 /* Must be simple edge. */
981 && !(single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
983 && a
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
984 && b
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
985 /* If the jump insn has side effects,
986 we can't kill the edge. */
987 && (!JUMP_P (BB_END (a
))
989 ? simplejump_p (BB_END (a
)) : onlyjump_p (BB_END (a
)))));
992 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
996 block_label (basic_block block
)
998 if (block
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1001 if (!LABEL_P (BB_HEAD (block
)))
1003 BB_HEAD (block
) = emit_label_before (gen_label_rtx (), BB_HEAD (block
));
1006 return as_a
<rtx_code_label
*> (BB_HEAD (block
));
1009 /* Remove all barriers from BB_FOOTER of a BB. */
1012 remove_barriers_from_footer (basic_block bb
)
1014 rtx_insn
*insn
= BB_FOOTER (bb
);
1016 /* Remove barriers but keep jumptables. */
1019 if (BARRIER_P (insn
))
1021 if (PREV_INSN (insn
))
1022 SET_NEXT_INSN (PREV_INSN (insn
)) = NEXT_INSN (insn
);
1024 BB_FOOTER (bb
) = NEXT_INSN (insn
);
1025 if (NEXT_INSN (insn
))
1026 SET_PREV_INSN (NEXT_INSN (insn
)) = PREV_INSN (insn
);
1030 insn
= NEXT_INSN (insn
);
1034 /* Attempt to perform edge redirection by replacing possibly complex jump
1035 instruction by unconditional jump or removing jump completely. This can
1036 apply only if all edges now point to the same block. The parameters and
1037 return values are equivalent to redirect_edge_and_branch. */
1040 try_redirect_by_replacing_jump (edge e
, basic_block target
, bool in_cfglayout
)
1042 basic_block src
= e
->src
;
1043 rtx_insn
*insn
= BB_END (src
), *kill_from
;
1047 /* If we are partitioning hot/cold basic blocks, we don't want to
1048 mess up unconditional or indirect jumps that cross between hot
1051 Basic block partitioning may result in some jumps that appear to
1052 be optimizable (or blocks that appear to be mergeable), but which really
1053 must be left untouched (they are required to make it safely across
1054 partition boundaries). See the comments at the top of
1055 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
1057 if (BB_PARTITION (src
) != BB_PARTITION (target
))
1060 /* We can replace or remove a complex jump only when we have exactly
1061 two edges. Also, if we have exactly one outgoing edge, we can
1063 if (EDGE_COUNT (src
->succs
) >= 3
1064 /* Verify that all targets will be TARGET. Specifically, the
1065 edge that is not E must also go to TARGET. */
1066 || (EDGE_COUNT (src
->succs
) == 2
1067 && EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
))
1070 if (!onlyjump_p (insn
))
1072 if ((!optimize
|| reload_completed
) && tablejump_p (insn
, NULL
, NULL
))
1075 /* Avoid removing branch with side effects. */
1076 set
= single_set (insn
);
1077 if (!set
|| side_effects_p (set
))
1080 /* In case we zap a conditional jump, we'll need to kill
1081 the cc0 setter too. */
1083 if (HAVE_cc0
&& reg_mentioned_p (cc0_rtx
, PATTERN (insn
))
1084 && only_sets_cc0_p (PREV_INSN (insn
)))
1085 kill_from
= PREV_INSN (insn
);
1087 /* See if we can create the fallthru edge. */
1088 if (in_cfglayout
|| can_fallthru (src
, target
))
1091 fprintf (dump_file
, "Removing jump %i.\n", INSN_UID (insn
));
1094 /* Selectively unlink whole insn chain. */
1097 delete_insn_chain (kill_from
, BB_END (src
), false);
1098 remove_barriers_from_footer (src
);
1101 delete_insn_chain (kill_from
, PREV_INSN (BB_HEAD (target
)),
1105 /* If this already is simplejump, redirect it. */
1106 else if (simplejump_p (insn
))
1108 if (e
->dest
== target
)
1111 fprintf (dump_file
, "Redirecting jump %i from %i to %i.\n",
1112 INSN_UID (insn
), e
->dest
->index
, target
->index
);
1113 if (!redirect_jump (as_a
<rtx_jump_insn
*> (insn
),
1114 block_label (target
), 0))
1116 gcc_assert (target
== EXIT_BLOCK_PTR_FOR_FN (cfun
));
1121 /* Cannot do anything for target exit block. */
1122 else if (target
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1125 /* Or replace possibly complicated jump insn by simple jump insn. */
1128 rtx_code_label
*target_label
= block_label (target
);
1131 rtx_jump_table_data
*table
;
1133 emit_jump_insn_after_noloc (targetm
.gen_jump (target_label
), insn
);
1134 JUMP_LABEL (BB_END (src
)) = target_label
;
1135 LABEL_NUSES (target_label
)++;
1137 fprintf (dump_file
, "Replacing insn %i by jump %i\n",
1138 INSN_UID (insn
), INSN_UID (BB_END (src
)));
1141 delete_insn_chain (kill_from
, insn
, false);
1143 /* Recognize a tablejump that we are converting to a
1144 simple jump and remove its associated CODE_LABEL
1145 and ADDR_VEC or ADDR_DIFF_VEC. */
1146 if (tablejump_p (insn
, &label
, &table
))
1147 delete_insn_chain (label
, table
, false);
1149 barrier
= next_nonnote_nondebug_insn (BB_END (src
));
1150 if (!barrier
|| !BARRIER_P (barrier
))
1151 emit_barrier_after (BB_END (src
));
1154 if (barrier
!= NEXT_INSN (BB_END (src
)))
1156 /* Move the jump before barrier so that the notes
1157 which originally were or were created before jump table are
1158 inside the basic block. */
1159 rtx_insn
*new_insn
= BB_END (src
);
1161 update_bb_for_insn_chain (NEXT_INSN (BB_END (src
)),
1162 PREV_INSN (barrier
), src
);
1164 SET_NEXT_INSN (PREV_INSN (new_insn
)) = NEXT_INSN (new_insn
);
1165 SET_PREV_INSN (NEXT_INSN (new_insn
)) = PREV_INSN (new_insn
);
1167 SET_NEXT_INSN (new_insn
) = barrier
;
1168 SET_NEXT_INSN (PREV_INSN (barrier
)) = new_insn
;
1170 SET_PREV_INSN (new_insn
) = PREV_INSN (barrier
);
1171 SET_PREV_INSN (barrier
) = new_insn
;
1176 /* Keep only one edge out and set proper flags. */
1177 if (!single_succ_p (src
))
1179 gcc_assert (single_succ_p (src
));
1181 e
= single_succ_edge (src
);
1183 e
->flags
= EDGE_FALLTHRU
;
1187 e
->probability
= profile_probability::always ();
1189 if (e
->dest
!= target
)
1190 redirect_edge_succ (e
, target
);
1194 /* Subroutine of redirect_branch_edge that tries to patch the jump
1195 instruction INSN so that it reaches block NEW. Do this
1196 only when it originally reached block OLD. Return true if this
1197 worked or the original target wasn't OLD, return false if redirection
1201 patch_jump_insn (rtx_insn
*insn
, rtx_insn
*old_label
, basic_block new_bb
)
1203 rtx_jump_table_data
*table
;
1205 /* Recognize a tablejump and adjust all matching cases. */
1206 if (tablejump_p (insn
, NULL
, &table
))
1210 rtx_code_label
*new_label
= block_label (new_bb
);
1212 if (new_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1214 vec
= table
->get_labels ();
1216 for (j
= GET_NUM_ELEM (vec
) - 1; j
>= 0; --j
)
1217 if (XEXP (RTVEC_ELT (vec
, j
), 0) == old_label
)
1219 RTVEC_ELT (vec
, j
) = gen_rtx_LABEL_REF (Pmode
, new_label
);
1220 --LABEL_NUSES (old_label
);
1221 ++LABEL_NUSES (new_label
);
1224 /* Handle casesi dispatch insns. */
1225 if ((tmp
= tablejump_casesi_pattern (insn
)) != NULL_RTX
1226 && label_ref_label (XEXP (SET_SRC (tmp
), 2)) == old_label
)
1228 XEXP (SET_SRC (tmp
), 2) = gen_rtx_LABEL_REF (Pmode
,
1230 --LABEL_NUSES (old_label
);
1231 ++LABEL_NUSES (new_label
);
1234 else if ((tmp
= extract_asm_operands (PATTERN (insn
))) != NULL
)
1236 int i
, n
= ASM_OPERANDS_LABEL_LENGTH (tmp
);
1239 if (new_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1241 rtx_code_label
*new_label
= block_label (new_bb
);
1243 for (i
= 0; i
< n
; ++i
)
1245 rtx old_ref
= ASM_OPERANDS_LABEL (tmp
, i
);
1246 gcc_assert (GET_CODE (old_ref
) == LABEL_REF
);
1247 if (XEXP (old_ref
, 0) == old_label
)
1249 ASM_OPERANDS_LABEL (tmp
, i
)
1250 = gen_rtx_LABEL_REF (Pmode
, new_label
);
1251 --LABEL_NUSES (old_label
);
1252 ++LABEL_NUSES (new_label
);
1256 if (JUMP_LABEL (insn
) == old_label
)
1258 JUMP_LABEL (insn
) = new_label
;
1259 note
= find_reg_note (insn
, REG_LABEL_TARGET
, new_label
);
1261 remove_note (insn
, note
);
1265 note
= find_reg_note (insn
, REG_LABEL_TARGET
, old_label
);
1267 remove_note (insn
, note
);
1268 if (JUMP_LABEL (insn
) != new_label
1269 && !find_reg_note (insn
, REG_LABEL_TARGET
, new_label
))
1270 add_reg_note (insn
, REG_LABEL_TARGET
, new_label
);
1272 while ((note
= find_reg_note (insn
, REG_LABEL_OPERAND
, old_label
))
1274 XEXP (note
, 0) = new_label
;
1278 /* ?? We may play the games with moving the named labels from
1279 one basic block to the other in case only one computed_jump is
1281 if (computed_jump_p (insn
)
1282 /* A return instruction can't be redirected. */
1283 || returnjump_p (insn
))
1286 if (!currently_expanding_to_rtl
|| JUMP_LABEL (insn
) == old_label
)
1288 /* If the insn doesn't go where we think, we're confused. */
1289 gcc_assert (JUMP_LABEL (insn
) == old_label
);
1291 /* If the substitution doesn't succeed, die. This can happen
1292 if the back end emitted unrecognizable instructions or if
1293 target is exit block on some arches. Or for crossing
1295 if (!redirect_jump (as_a
<rtx_jump_insn
*> (insn
),
1296 block_label (new_bb
), 0))
1298 gcc_assert (new_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
1299 || CROSSING_JUMP_P (insn
));
1308 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1311 redirect_branch_edge (edge e
, basic_block target
)
1313 rtx_insn
*old_label
= BB_HEAD (e
->dest
);
1314 basic_block src
= e
->src
;
1315 rtx_insn
*insn
= BB_END (src
);
1317 /* We can only redirect non-fallthru edges of jump insn. */
1318 if (e
->flags
& EDGE_FALLTHRU
)
1320 else if (!JUMP_P (insn
) && !currently_expanding_to_rtl
)
1323 if (!currently_expanding_to_rtl
)
1325 if (!patch_jump_insn (as_a
<rtx_jump_insn
*> (insn
), old_label
, target
))
1329 /* When expanding this BB might actually contain multiple
1330 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1331 Redirect all of those that match our label. */
1332 FOR_BB_INSNS (src
, insn
)
1333 if (JUMP_P (insn
) && !patch_jump_insn (as_a
<rtx_jump_insn
*> (insn
),
1338 fprintf (dump_file
, "Edge %i->%i redirected to %i\n",
1339 e
->src
->index
, e
->dest
->index
, target
->index
);
1341 if (e
->dest
!= target
)
1342 e
= redirect_edge_succ_nodup (e
, target
);
1347 /* Called when edge E has been redirected to a new destination,
1348 in order to update the region crossing flag on the edge and
1352 fixup_partition_crossing (edge e
)
1354 if (e
->src
== ENTRY_BLOCK_PTR_FOR_FN (cfun
) || e
->dest
1355 == EXIT_BLOCK_PTR_FOR_FN (cfun
))
1357 /* If we redirected an existing edge, it may already be marked
1358 crossing, even though the new src is missing a reg crossing note.
1359 But make sure reg crossing note doesn't already exist before
1361 if (BB_PARTITION (e
->src
) != BB_PARTITION (e
->dest
))
1363 e
->flags
|= EDGE_CROSSING
;
1364 if (JUMP_P (BB_END (e
->src
)))
1365 CROSSING_JUMP_P (BB_END (e
->src
)) = 1;
1367 else if (BB_PARTITION (e
->src
) == BB_PARTITION (e
->dest
))
1369 e
->flags
&= ~EDGE_CROSSING
;
1370 /* Remove the section crossing note from jump at end of
1371 src if it exists, and if no other successors are
1373 if (JUMP_P (BB_END (e
->src
)) && CROSSING_JUMP_P (BB_END (e
->src
)))
1375 bool has_crossing_succ
= false;
1378 FOR_EACH_EDGE (e2
, ei
, e
->src
->succs
)
1380 has_crossing_succ
|= (e2
->flags
& EDGE_CROSSING
);
1381 if (has_crossing_succ
)
1384 if (!has_crossing_succ
)
1385 CROSSING_JUMP_P (BB_END (e
->src
)) = 0;
1390 /* Called when block BB has been reassigned to the cold partition,
1391 because it is now dominated by another cold block,
1392 to ensure that the region crossing attributes are updated. */
1395 fixup_new_cold_bb (basic_block bb
)
1400 /* This is called when a hot bb is found to now be dominated
1401 by a cold bb and therefore needs to become cold. Therefore,
1402 its preds will no longer be region crossing. Any non-dominating
1403 preds that were previously hot would also have become cold
1404 in the caller for the same region. Any preds that were previously
1405 region-crossing will be adjusted in fixup_partition_crossing. */
1406 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1408 fixup_partition_crossing (e
);
1411 /* Possibly need to make bb's successor edges region crossing,
1412 or remove stale region crossing. */
1413 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1415 /* We can't have fall-through edges across partition boundaries.
1416 Note that force_nonfallthru will do any necessary partition
1417 boundary fixup by calling fixup_partition_crossing itself. */
1418 if ((e
->flags
& EDGE_FALLTHRU
)
1419 && BB_PARTITION (bb
) != BB_PARTITION (e
->dest
)
1420 && e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
1421 force_nonfallthru (e
);
1423 fixup_partition_crossing (e
);
1427 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
1428 expense of adding new instructions or reordering basic blocks.
1430 Function can be also called with edge destination equivalent to the TARGET.
1431 Then it should try the simplifications and do nothing if none is possible.
1433 Return edge representing the branch if transformation succeeded. Return NULL
1435 We still return NULL in case E already destinated TARGET and we didn't
1436 managed to simplify instruction stream. */
1439 rtl_redirect_edge_and_branch (edge e
, basic_block target
)
1442 basic_block src
= e
->src
;
1443 basic_block dest
= e
->dest
;
1445 if (e
->flags
& (EDGE_ABNORMAL_CALL
| EDGE_EH
))
1451 if ((ret
= try_redirect_by_replacing_jump (e
, target
, false)) != NULL
)
1453 df_set_bb_dirty (src
);
1454 fixup_partition_crossing (ret
);
1458 ret
= redirect_branch_edge (e
, target
);
1462 df_set_bb_dirty (src
);
1463 fixup_partition_crossing (ret
);
1467 /* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode. */
1470 emit_barrier_after_bb (basic_block bb
)
1472 rtx_barrier
*barrier
= emit_barrier_after (BB_END (bb
));
1473 gcc_assert (current_ir_type () == IR_RTL_CFGRTL
1474 || current_ir_type () == IR_RTL_CFGLAYOUT
);
1475 if (current_ir_type () == IR_RTL_CFGLAYOUT
)
1477 rtx_insn
*insn
= unlink_insn_chain (barrier
, barrier
);
1481 rtx_insn
*footer_tail
= BB_FOOTER (bb
);
1483 while (NEXT_INSN (footer_tail
))
1484 footer_tail
= NEXT_INSN (footer_tail
);
1485 if (!BARRIER_P (footer_tail
))
1487 SET_NEXT_INSN (footer_tail
) = insn
;
1488 SET_PREV_INSN (insn
) = footer_tail
;
1492 BB_FOOTER (bb
) = insn
;
1496 /* Like force_nonfallthru below, but additionally performs redirection
1497 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1498 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1499 simple_return_rtx, indicating which kind of returnjump to create.
1500 It should be NULL otherwise. */
1503 force_nonfallthru_and_redirect (edge e
, basic_block target
, rtx jump_label
)
1505 basic_block jump_block
, new_bb
= NULL
, src
= e
->src
;
1508 int abnormal_edge_flags
= 0;
1509 bool asm_goto_edge
= false;
1512 /* In the case the last instruction is conditional jump to the next
1513 instruction, first redirect the jump itself and then continue
1514 by creating a basic block afterwards to redirect fallthru edge. */
1515 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
1516 && e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
1517 && any_condjump_p (BB_END (e
->src
))
1518 && JUMP_LABEL (BB_END (e
->src
)) == BB_HEAD (e
->dest
))
1521 edge b
= unchecked_make_edge (e
->src
, target
, 0);
1524 redirected
= redirect_jump (as_a
<rtx_jump_insn
*> (BB_END (e
->src
)),
1525 block_label (target
), 0);
1526 gcc_assert (redirected
);
1528 note
= find_reg_note (BB_END (e
->src
), REG_BR_PROB
, NULL_RTX
);
1531 int prob
= XINT (note
, 0);
1533 b
->probability
= profile_probability::from_reg_br_prob_note (prob
);
1534 e
->probability
-= e
->probability
;
1538 if (e
->flags
& EDGE_ABNORMAL
)
1540 /* Irritating special case - fallthru edge to the same block as abnormal
1542 We can't redirect abnormal edge, but we still can split the fallthru
1543 one and create separate abnormal edge to original destination.
1544 This allows bb-reorder to make such edge non-fallthru. */
1545 gcc_assert (e
->dest
== target
);
1546 abnormal_edge_flags
= e
->flags
& ~EDGE_FALLTHRU
;
1547 e
->flags
&= EDGE_FALLTHRU
;
1551 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
1552 if (e
->src
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
1554 /* We can't redirect the entry block. Create an empty block
1555 at the start of the function which we use to add the new
1561 basic_block bb
= create_basic_block (BB_HEAD (e
->dest
), NULL
,
1562 ENTRY_BLOCK_PTR_FOR_FN (cfun
));
1563 bb
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
1565 /* Make sure new block ends up in correct hot/cold section. */
1566 BB_COPY_PARTITION (bb
, e
->dest
);
1568 /* Change the existing edge's source to be the new block, and add
1569 a new edge from the entry block to the new block. */
1571 for (ei
= ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
);
1572 (tmp
= ei_safe_edge (ei
)); )
1576 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
->unordered_remove (ei
.index
);
1586 vec_safe_push (bb
->succs
, e
);
1587 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), bb
,
1592 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1593 don't point to the target or fallthru label. */
1594 if (JUMP_P (BB_END (e
->src
))
1595 && target
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
1596 && (e
->flags
& EDGE_FALLTHRU
)
1597 && (note
= extract_asm_operands (PATTERN (BB_END (e
->src
)))))
1599 int i
, n
= ASM_OPERANDS_LABEL_LENGTH (note
);
1600 bool adjust_jump_target
= false;
1602 for (i
= 0; i
< n
; ++i
)
1604 if (XEXP (ASM_OPERANDS_LABEL (note
, i
), 0) == BB_HEAD (e
->dest
))
1606 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note
, i
), 0))--;
1607 XEXP (ASM_OPERANDS_LABEL (note
, i
), 0) = block_label (target
);
1608 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note
, i
), 0))++;
1609 adjust_jump_target
= true;
1611 if (XEXP (ASM_OPERANDS_LABEL (note
, i
), 0) == BB_HEAD (target
))
1612 asm_goto_edge
= true;
1614 if (adjust_jump_target
)
1616 rtx_insn
*insn
= BB_END (e
->src
);
1618 rtx_insn
*old_label
= BB_HEAD (e
->dest
);
1619 rtx_insn
*new_label
= BB_HEAD (target
);
1621 if (JUMP_LABEL (insn
) == old_label
)
1623 JUMP_LABEL (insn
) = new_label
;
1624 note
= find_reg_note (insn
, REG_LABEL_TARGET
, new_label
);
1626 remove_note (insn
, note
);
1630 note
= find_reg_note (insn
, REG_LABEL_TARGET
, old_label
);
1632 remove_note (insn
, note
);
1633 if (JUMP_LABEL (insn
) != new_label
1634 && !find_reg_note (insn
, REG_LABEL_TARGET
, new_label
))
1635 add_reg_note (insn
, REG_LABEL_TARGET
, new_label
);
1637 while ((note
= find_reg_note (insn
, REG_LABEL_OPERAND
, old_label
))
1639 XEXP (note
, 0) = new_label
;
1643 if (EDGE_COUNT (e
->src
->succs
) >= 2 || abnormal_edge_flags
|| asm_goto_edge
)
1646 profile_count count
= e
->count ();
1647 profile_probability probability
= e
->probability
;
1648 /* Create the new structures. */
1650 /* If the old block ended with a tablejump, skip its table
1651 by searching forward from there. Otherwise start searching
1652 forward from the last instruction of the old block. */
1653 rtx_jump_table_data
*table
;
1654 if (tablejump_p (BB_END (e
->src
), NULL
, &table
))
1657 new_head
= BB_END (e
->src
);
1658 new_head
= NEXT_INSN (new_head
);
1660 jump_block
= create_basic_block (new_head
, NULL
, e
->src
);
1661 jump_block
->count
= count
;
1663 /* Make sure new block ends up in correct hot/cold section. */
1665 BB_COPY_PARTITION (jump_block
, e
->src
);
1668 new_edge
= make_edge (e
->src
, jump_block
, EDGE_FALLTHRU
);
1669 new_edge
->probability
= probability
;
1671 /* Redirect old edge. */
1672 redirect_edge_pred (e
, jump_block
);
1673 e
->probability
= profile_probability::always ();
1675 /* If e->src was previously region crossing, it no longer is
1676 and the reg crossing note should be removed. */
1677 fixup_partition_crossing (new_edge
);
1679 /* If asm goto has any label refs to target's label,
1680 add also edge from asm goto bb to target. */
1683 new_edge
->probability
= new_edge
->probability
.apply_scale (1, 2);
1684 jump_block
->count
= jump_block
->count
.apply_scale (1, 2);
1685 edge new_edge2
= make_edge (new_edge
->src
, target
,
1686 e
->flags
& ~EDGE_FALLTHRU
);
1687 new_edge2
->probability
= probability
- new_edge
->probability
;
1690 new_bb
= jump_block
;
1693 jump_block
= e
->src
;
1695 loc
= e
->goto_locus
;
1696 e
->flags
&= ~EDGE_FALLTHRU
;
1697 if (target
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1699 if (jump_label
== ret_rtx
)
1700 emit_jump_insn_after_setloc (targetm
.gen_return (),
1701 BB_END (jump_block
), loc
);
1704 gcc_assert (jump_label
== simple_return_rtx
);
1705 emit_jump_insn_after_setloc (targetm
.gen_simple_return (),
1706 BB_END (jump_block
), loc
);
1708 set_return_jump_label (BB_END (jump_block
));
1712 rtx_code_label
*label
= block_label (target
);
1713 emit_jump_insn_after_setloc (targetm
.gen_jump (label
),
1714 BB_END (jump_block
), loc
);
1715 JUMP_LABEL (BB_END (jump_block
)) = label
;
1716 LABEL_NUSES (label
)++;
1719 /* We might be in cfg layout mode, and if so, the following routine will
1720 insert the barrier correctly. */
1721 emit_barrier_after_bb (jump_block
);
1722 redirect_edge_succ_nodup (e
, target
);
1724 if (abnormal_edge_flags
)
1725 make_edge (src
, target
, abnormal_edge_flags
);
1727 df_mark_solutions_dirty ();
1728 fixup_partition_crossing (e
);
1732 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1733 (and possibly create new basic block) to make edge non-fallthru.
1734 Return newly created BB or NULL if none. */
1737 rtl_force_nonfallthru (edge e
)
1739 return force_nonfallthru_and_redirect (e
, e
->dest
, NULL_RTX
);
1742 /* Redirect edge even at the expense of creating new jump insn or
1743 basic block. Return new basic block if created, NULL otherwise.
1744 Conversion must be possible. */
1747 rtl_redirect_edge_and_branch_force (edge e
, basic_block target
)
1749 if (redirect_edge_and_branch (e
, target
)
1750 || e
->dest
== target
)
1753 /* In case the edge redirection failed, try to force it to be non-fallthru
1754 and redirect newly created simplejump. */
1755 df_set_bb_dirty (e
->src
);
1756 return force_nonfallthru_and_redirect (e
, target
, NULL_RTX
);
1759 /* The given edge should potentially be a fallthru edge. If that is in
1760 fact true, delete the jump and barriers that are in the way. */
1763 rtl_tidy_fallthru_edge (edge e
)
1766 basic_block b
= e
->src
, c
= b
->next_bb
;
1768 /* ??? In a late-running flow pass, other folks may have deleted basic
1769 blocks by nopping out blocks, leaving multiple BARRIERs between here
1770 and the target label. They ought to be chastised and fixed.
1772 We can also wind up with a sequence of undeletable labels between
1773 one block and the next.
1775 So search through a sequence of barriers, labels, and notes for
1776 the head of block C and assert that we really do fall through. */
1778 for (q
= NEXT_INSN (BB_END (b
)); q
!= BB_HEAD (c
); q
= NEXT_INSN (q
))
1779 if (NONDEBUG_INSN_P (q
))
1782 /* Remove what will soon cease being the jump insn from the source block.
1783 If block B consisted only of this single jump, turn it into a deleted
1788 && (any_uncondjump_p (q
)
1789 || single_succ_p (b
)))
1792 rtx_jump_table_data
*table
;
1794 if (tablejump_p (q
, &label
, &table
))
1796 /* The label is likely mentioned in some instruction before
1797 the tablejump and might not be DCEd, so turn it into
1798 a note instead and move before the tablejump that is going to
1800 const char *name
= LABEL_NAME (label
);
1801 PUT_CODE (label
, NOTE
);
1802 NOTE_KIND (label
) = NOTE_INSN_DELETED_LABEL
;
1803 NOTE_DELETED_LABEL_NAME (label
) = name
;
1804 reorder_insns (label
, label
, PREV_INSN (q
));
1805 delete_insn (table
);
1808 /* If this was a conditional jump, we need to also delete
1809 the insn that set cc0. */
1810 if (HAVE_cc0
&& any_condjump_p (q
) && only_sets_cc0_p (PREV_INSN (q
)))
1815 /* Unconditional jumps with side-effects (i.e. which we can't just delete
1816 together with the barrier) should never have a fallthru edge. */
1817 else if (JUMP_P (q
) && any_uncondjump_p (q
))
1820 /* Selectively unlink the sequence. */
1821 if (q
!= PREV_INSN (BB_HEAD (c
)))
1822 delete_insn_chain (NEXT_INSN (q
), PREV_INSN (BB_HEAD (c
)), false);
1824 e
->flags
|= EDGE_FALLTHRU
;
1827 /* Should move basic block BB after basic block AFTER. NIY. */
1830 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED
,
1831 basic_block after ATTRIBUTE_UNUSED
)
1836 /* Locate the last bb in the same partition as START_BB. */
1839 last_bb_in_partition (basic_block start_bb
)
1842 FOR_BB_BETWEEN (bb
, start_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), next_bb
)
1844 if (BB_PARTITION (start_bb
) != BB_PARTITION (bb
->next_bb
))
1847 /* Return bb before the exit block. */
1851 /* Split a (typically critical) edge. Return the new block.
1852 The edge must not be abnormal.
1854 ??? The code generally expects to be called on critical edges.
1855 The case of a block ending in an unconditional jump to a
1856 block with multiple predecessors is not handled optimally. */
1859 rtl_split_edge (edge edge_in
)
1861 basic_block bb
, new_bb
;
1864 /* Abnormal edges cannot be split. */
1865 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
1867 /* We are going to place the new block in front of edge destination.
1868 Avoid existence of fallthru predecessors. */
1869 if ((edge_in
->flags
& EDGE_FALLTHRU
) == 0)
1871 edge e
= find_fallthru_edge (edge_in
->dest
->preds
);
1874 force_nonfallthru (e
);
1877 /* Create the basic block note. */
1878 if (edge_in
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
1879 before
= BB_HEAD (edge_in
->dest
);
1883 /* If this is a fall through edge to the exit block, the blocks might be
1884 not adjacent, and the right place is after the source. */
1885 if ((edge_in
->flags
& EDGE_FALLTHRU
)
1886 && edge_in
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1888 before
= NEXT_INSN (BB_END (edge_in
->src
));
1889 bb
= create_basic_block (before
, NULL
, edge_in
->src
);
1890 BB_COPY_PARTITION (bb
, edge_in
->src
);
1894 if (edge_in
->src
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
1896 bb
= create_basic_block (before
, NULL
, edge_in
->dest
->prev_bb
);
1897 BB_COPY_PARTITION (bb
, edge_in
->dest
);
1901 basic_block after
= edge_in
->dest
->prev_bb
;
1902 /* If this is post-bb reordering, and the edge crosses a partition
1903 boundary, the new block needs to be inserted in the bb chain
1904 at the end of the src partition (since we put the new bb into
1905 that partition, see below). Otherwise we may end up creating
1906 an extra partition crossing in the chain, which is illegal.
1907 It can't go after the src, because src may have a fall-through
1908 to a different block. */
1909 if (crtl
->bb_reorder_complete
1910 && (edge_in
->flags
& EDGE_CROSSING
))
1912 after
= last_bb_in_partition (edge_in
->src
);
1913 before
= get_last_bb_insn (after
);
1914 /* The instruction following the last bb in partition should
1915 be a barrier, since it cannot end in a fall-through. */
1916 gcc_checking_assert (BARRIER_P (before
));
1917 before
= NEXT_INSN (before
);
1919 bb
= create_basic_block (before
, NULL
, after
);
1920 /* Put the split bb into the src partition, to avoid creating
1921 a situation where a cold bb dominates a hot bb, in the case
1922 where src is cold and dest is hot. The src will dominate
1923 the new bb (whereas it might not have dominated dest). */
1924 BB_COPY_PARTITION (bb
, edge_in
->src
);
1928 make_single_succ_edge (bb
, edge_in
->dest
, EDGE_FALLTHRU
);
1930 /* Can't allow a region crossing edge to be fallthrough. */
1931 if (BB_PARTITION (bb
) != BB_PARTITION (edge_in
->dest
)
1932 && edge_in
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
1934 new_bb
= force_nonfallthru (single_succ_edge (bb
));
1935 gcc_assert (!new_bb
);
1938 /* For non-fallthru edges, we must adjust the predecessor's
1939 jump instruction to target our new block. */
1940 if ((edge_in
->flags
& EDGE_FALLTHRU
) == 0)
1942 edge redirected
= redirect_edge_and_branch (edge_in
, bb
);
1943 gcc_assert (redirected
);
1947 if (edge_in
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
1949 /* For asm goto even splitting of fallthru edge might
1950 need insn patching, as other labels might point to the
1952 rtx_insn
*last
= BB_END (edge_in
->src
);
1955 && edge_in
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
1956 && (extract_asm_operands (PATTERN (last
))
1957 || JUMP_LABEL (last
) == before
)
1958 && patch_jump_insn (last
, before
, bb
))
1959 df_set_bb_dirty (edge_in
->src
);
1961 redirect_edge_succ (edge_in
, bb
);
1967 /* Queue instructions for insertion on an edge between two basic blocks.
1968 The new instructions and basic blocks (if any) will not appear in the
1969 CFG until commit_edge_insertions is called. */
1972 insert_insn_on_edge (rtx pattern
, edge e
)
1974 /* We cannot insert instructions on an abnormal critical edge.
1975 It will be easier to find the culprit if we die now. */
1976 gcc_assert (!((e
->flags
& EDGE_ABNORMAL
) && EDGE_CRITICAL_P (e
)));
1978 if (e
->insns
.r
== NULL_RTX
)
1981 push_to_sequence (e
->insns
.r
);
1983 emit_insn (pattern
);
1985 e
->insns
.r
= get_insns ();
1989 /* Update the CFG for the instructions queued on edge E. */
1992 commit_one_edge_insertion (edge e
)
1994 rtx_insn
*before
= NULL
, *after
= NULL
, *insns
, *tmp
, *last
;
1997 /* Pull the insns off the edge now since the edge might go away. */
2001 /* Figure out where to put these insns. If the destination has
2002 one predecessor, insert there. Except for the exit block. */
2003 if (single_pred_p (e
->dest
) && e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
2007 /* Get the location correct wrt a code label, and "nice" wrt
2008 a basic block note, and before everything else. */
2011 tmp
= NEXT_INSN (tmp
);
2012 if (NOTE_INSN_BASIC_BLOCK_P (tmp
))
2013 tmp
= NEXT_INSN (tmp
);
2014 if (tmp
== BB_HEAD (bb
))
2017 after
= PREV_INSN (tmp
);
2019 after
= get_last_insn ();
2022 /* If the source has one successor and the edge is not abnormal,
2023 insert there. Except for the entry block.
2024 Don't do this if the predecessor ends in a jump other than
2025 unconditional simple jump. E.g. for asm goto that points all
2026 its labels at the fallthru basic block, we can't insert instructions
2027 before the asm goto, as the asm goto can have various of side effects,
2028 and can't emit instructions after the asm goto, as it must end
2030 else if ((e
->flags
& EDGE_ABNORMAL
) == 0
2031 && single_succ_p (e
->src
)
2032 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
2033 && (!JUMP_P (BB_END (e
->src
))
2034 || simplejump_p (BB_END (e
->src
))))
2038 /* It is possible to have a non-simple jump here. Consider a target
2039 where some forms of unconditional jumps clobber a register. This
2040 happens on the fr30 for example.
2042 We know this block has a single successor, so we can just emit
2043 the queued insns before the jump. */
2044 if (JUMP_P (BB_END (bb
)))
2045 before
= BB_END (bb
);
2048 /* We'd better be fallthru, or we've lost track of what's what. */
2049 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
2051 after
= BB_END (bb
);
2055 /* Otherwise we must split the edge. */
2058 bb
= split_edge (e
);
2060 /* If E crossed a partition boundary, we needed to make bb end in
2061 a region-crossing jump, even though it was originally fallthru. */
2062 if (JUMP_P (BB_END (bb
)))
2063 before
= BB_END (bb
);
2065 after
= BB_END (bb
);
2068 /* Now that we've found the spot, do the insertion. */
2071 emit_insn_before_noloc (insns
, before
, bb
);
2072 last
= prev_nonnote_insn (before
);
2075 last
= emit_insn_after_noloc (insns
, after
, bb
);
2077 if (returnjump_p (last
))
2079 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2080 This is not currently a problem because this only happens
2081 for the (single) epilogue, which already has a fallthru edge
2084 e
= single_succ_edge (bb
);
2085 gcc_assert (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
2086 && single_succ_p (bb
) && (e
->flags
& EDGE_FALLTHRU
));
2088 e
->flags
&= ~EDGE_FALLTHRU
;
2089 emit_barrier_after (last
);
2092 delete_insn (before
);
2095 gcc_assert (!JUMP_P (last
));
2098 /* Update the CFG for all queued instructions. */
2101 commit_edge_insertions (void)
2105 /* Optimization passes that invoke this routine can cause hot blocks
2106 previously reached by both hot and cold blocks to become dominated only
2107 by cold blocks. This will cause the verification below to fail,
2108 and lead to now cold code in the hot section. In some cases this
2109 may only be visible after newly unreachable blocks are deleted,
2110 which will be done by fixup_partitions. */
2111 fixup_partitions ();
2113 if (!currently_expanding_to_rtl
)
2114 checking_verify_flow_info ();
2116 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
),
2117 EXIT_BLOCK_PTR_FOR_FN (cfun
), next_bb
)
2122 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2125 if (currently_expanding_to_rtl
)
2126 rebuild_jump_labels_chain (e
->insns
.r
);
2127 commit_one_edge_insertion (e
);
2133 /* Print out RTL-specific basic block information (live information
2134 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
2135 documented in dumpfile.h. */
2138 rtl_dump_bb (FILE *outf
, basic_block bb
, int indent
, dump_flags_t flags
)
2142 s_indent
= (char *) alloca ((size_t) indent
+ 1);
2143 memset (s_indent
, ' ', (size_t) indent
);
2144 s_indent
[indent
] = '\0';
2146 if (df
&& (flags
& TDF_DETAILS
))
2148 df_dump_top (bb
, outf
);
2152 if (bb
->index
!= ENTRY_BLOCK
&& bb
->index
!= EXIT_BLOCK
)
2154 rtx_insn
*last
= BB_END (bb
);
2156 last
= NEXT_INSN (last
);
2157 for (rtx_insn
*insn
= BB_HEAD (bb
); insn
!= last
; insn
= NEXT_INSN (insn
))
2159 if (flags
& TDF_DETAILS
)
2160 df_dump_insn_top (insn
, outf
);
2161 if (! (flags
& TDF_SLIM
))
2162 print_rtl_single (outf
, insn
);
2164 dump_insn_slim (outf
, insn
);
2165 if (flags
& TDF_DETAILS
)
2166 df_dump_insn_bottom (insn
, outf
);
2170 if (df
&& (flags
& TDF_DETAILS
))
2172 df_dump_bottom (bb
, outf
);
2178 /* Like dump_function_to_file, but for RTL. Print out dataflow information
2179 for the start of each basic block. FLAGS are the TDF_* masks documented
2183 print_rtl_with_bb (FILE *outf
, const rtx_insn
*rtx_first
, dump_flags_t flags
)
2185 const rtx_insn
*tmp_rtx
;
2187 fprintf (outf
, "(nil)\n");
2190 enum bb_state
{ NOT_IN_BB
, IN_ONE_BB
, IN_MULTIPLE_BB
};
2191 int max_uid
= get_max_uid ();
2192 basic_block
*start
= XCNEWVEC (basic_block
, max_uid
);
2193 basic_block
*end
= XCNEWVEC (basic_block
, max_uid
);
2194 enum bb_state
*in_bb_p
= XCNEWVEC (enum bb_state
, max_uid
);
2197 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2198 insns, but the CFG is not maintained so the basic block info
2199 is not reliable. Therefore it's omitted from the dumps. */
2200 if (! (cfun
->curr_properties
& PROP_cfg
))
2201 flags
&= ~TDF_BLOCKS
;
2204 df_dump_start (outf
);
2206 if (cfun
->curr_properties
& PROP_cfg
)
2208 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
2212 start
[INSN_UID (BB_HEAD (bb
))] = bb
;
2213 end
[INSN_UID (BB_END (bb
))] = bb
;
2214 if (flags
& TDF_BLOCKS
)
2216 for (x
= BB_HEAD (bb
); x
!= NULL_RTX
; x
= NEXT_INSN (x
))
2218 enum bb_state state
= IN_MULTIPLE_BB
;
2220 if (in_bb_p
[INSN_UID (x
)] == NOT_IN_BB
)
2222 in_bb_p
[INSN_UID (x
)] = state
;
2224 if (x
== BB_END (bb
))
2231 for (tmp_rtx
= rtx_first
; tmp_rtx
!= NULL
; tmp_rtx
= NEXT_INSN (tmp_rtx
))
2233 if (flags
& TDF_BLOCKS
)
2235 bb
= start
[INSN_UID (tmp_rtx
)];
2238 dump_bb_info (outf
, bb
, 0, dump_flags
, true, false);
2239 if (df
&& (flags
& TDF_DETAILS
))
2240 df_dump_top (bb
, outf
);
2243 if (in_bb_p
[INSN_UID (tmp_rtx
)] == NOT_IN_BB
2244 && !NOTE_P (tmp_rtx
)
2245 && !BARRIER_P (tmp_rtx
))
2246 fprintf (outf
, ";; Insn is not within a basic block\n");
2247 else if (in_bb_p
[INSN_UID (tmp_rtx
)] == IN_MULTIPLE_BB
)
2248 fprintf (outf
, ";; Insn is in multiple basic blocks\n");
2251 if (flags
& TDF_DETAILS
)
2252 df_dump_insn_top (tmp_rtx
, outf
);
2253 if (! (flags
& TDF_SLIM
))
2254 print_rtl_single (outf
, tmp_rtx
);
2256 dump_insn_slim (outf
, tmp_rtx
);
2257 if (flags
& TDF_DETAILS
)
2258 df_dump_insn_bottom (tmp_rtx
, outf
);
2260 bb
= end
[INSN_UID (tmp_rtx
)];
2263 if (flags
& TDF_BLOCKS
)
2265 dump_bb_info (outf
, bb
, 0, dump_flags
, false, true);
2266 if (df
&& (flags
& TDF_DETAILS
))
2267 df_dump_bottom (bb
, outf
);
2270 /* Emit a hint if the fallthrough target of current basic block
2271 isn't the one placed right next. */
2272 else if (EDGE_COUNT (bb
->succs
) > 0)
2274 gcc_assert (BB_END (bb
) == tmp_rtx
);
2275 const rtx_insn
*ninsn
= NEXT_INSN (tmp_rtx
);
2276 /* Bypass intervening deleted-insn notes and debug insns. */
2278 && !NONDEBUG_INSN_P (ninsn
)
2279 && !start
[INSN_UID (ninsn
)])
2280 ninsn
= NEXT_INSN (ninsn
);
2281 edge e
= find_fallthru_edge (bb
->succs
);
2284 basic_block dest
= e
->dest
;
2285 if (start
[INSN_UID (ninsn
)] != dest
)
2286 fprintf (outf
, "%s ; pc falls through to BB %d\n",
2287 print_rtx_head
, dest
->index
);
2299 /* Update the branch probability of BB if a REG_BR_PROB is present. */
2302 update_br_prob_note (basic_block bb
)
2305 note
= find_reg_note (BB_END (bb
), REG_BR_PROB
, NULL_RTX
);
2306 if (!JUMP_P (BB_END (bb
)) || !BRANCH_EDGE (bb
)->probability
.initialized_p ())
2310 rtx
*note_link
, this_rtx
;
2312 note_link
= ®_NOTES (BB_END (bb
));
2313 for (this_rtx
= *note_link
; this_rtx
; this_rtx
= XEXP (this_rtx
, 1))
2314 if (this_rtx
== note
)
2316 *note_link
= XEXP (this_rtx
, 1);
2323 || XINT (note
, 0) == BRANCH_EDGE (bb
)->probability
.to_reg_br_prob_note ())
2325 XINT (note
, 0) = BRANCH_EDGE (bb
)->probability
.to_reg_br_prob_note ();
2328 /* Get the last insn associated with block BB (that includes barriers and
2329 tablejumps after BB). */
2331 get_last_bb_insn (basic_block bb
)
2333 rtx_jump_table_data
*table
;
2335 rtx_insn
*end
= BB_END (bb
);
2337 /* Include any jump table following the basic block. */
2338 if (tablejump_p (end
, NULL
, &table
))
2341 /* Include any barriers that may follow the basic block. */
2342 tmp
= next_nonnote_nondebug_insn_bb (end
);
2343 while (tmp
&& BARRIER_P (tmp
))
2346 tmp
= next_nonnote_nondebug_insn_bb (end
);
2352 /* Add all BBs reachable from entry via hot paths into the SET. */
2355 find_bbs_reachable_by_hot_paths (hash_set
<basic_block
> *set
)
2357 auto_vec
<basic_block
, 64> worklist
;
2359 set
->add (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2360 worklist
.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2362 while (worklist
.length () > 0)
2364 basic_block bb
= worklist
.pop ();
2368 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2369 if (BB_PARTITION (e
->dest
) != BB_COLD_PARTITION
2370 && !set
->add (e
->dest
))
2371 worklist
.safe_push (e
->dest
);
2375 /* Sanity check partition hotness to ensure that basic blocks in
2376 Â the cold partition don't dominate basic blocks in the hot partition.
2377 If FLAG_ONLY is true, report violations as errors. Otherwise
2378 re-mark the dominated blocks as cold, since this is run after
2379 cfg optimizations that may make hot blocks previously reached
2380 by both hot and cold blocks now only reachable along cold paths. */
2382 static vec
<basic_block
>
2383 find_partition_fixes (bool flag_only
)
2386 vec
<basic_block
> bbs_to_fix
= vNULL
;
2387 hash_set
<basic_block
> set
;
2389 /* Callers check this. */
2390 gcc_checking_assert (crtl
->has_bb_partition
);
2392 find_bbs_reachable_by_hot_paths (&set
);
2394 FOR_EACH_BB_FN (bb
, cfun
)
2395 if (!set
.contains (bb
)
2396 && BB_PARTITION (bb
) != BB_COLD_PARTITION
)
2399 error ("non-cold basic block %d reachable only "
2400 "by paths crossing the cold partition", bb
->index
);
2402 BB_SET_PARTITION (bb
, BB_COLD_PARTITION
);
2403 bbs_to_fix
.safe_push (bb
);
2409 /* Perform cleanup on the hot/cold bb partitioning after optimization
2410 passes that modify the cfg. */
2413 fixup_partitions (void)
2417 if (!crtl
->has_bb_partition
)
2420 /* Delete any blocks that became unreachable and weren't
2421 already cleaned up, for example during edge forwarding
2422 and convert_jumps_to_returns. This will expose more
2423 opportunities for fixing the partition boundaries here.
2424 Also, the calculation of the dominance graph during verification
2425 will assert if there are unreachable nodes. */
2426 delete_unreachable_blocks ();
2428 /* If there are partitions, do a sanity check on them: A basic block in
2429 Â a cold partition cannot dominate a basic block in a hot partition.
2430 Fixup any that now violate this requirement, as a result of edge
2431 forwarding and unreachable block deletion. Â */
2432 vec
<basic_block
> bbs_to_fix
= find_partition_fixes (false);
2434 /* Do the partition fixup after all necessary blocks have been converted to
2435 cold, so that we only update the region crossings the minimum number of
2436 places, which can require forcing edges to be non fallthru. */
2437 while (! bbs_to_fix
.is_empty ())
2439 bb
= bbs_to_fix
.pop ();
2440 fixup_new_cold_bb (bb
);
2444 /* Verify, in the basic block chain, that there is at most one switch
2445 between hot/cold partitions. This condition will not be true until
2446 after reorder_basic_blocks is called. */
2449 verify_hot_cold_block_grouping (void)
2453 bool switched_sections
= false;
2454 int current_partition
= BB_UNPARTITIONED
;
2456 /* Even after bb reordering is complete, we go into cfglayout mode
2457 again (in compgoto). Ensure we don't call this before going back
2458 into linearized RTL when any layout fixes would have been committed. */
2459 if (!crtl
->bb_reorder_complete
2460 || current_ir_type () != IR_RTL_CFGRTL
)
2463 FOR_EACH_BB_FN (bb
, cfun
)
2465 if (current_partition
!= BB_UNPARTITIONED
2466 && BB_PARTITION (bb
) != current_partition
)
2468 if (switched_sections
)
2470 error ("multiple hot/cold transitions found (bb %i)",
2475 switched_sections
= true;
2477 if (!crtl
->has_bb_partition
)
2478 error ("partition found but function partition flag not set");
2480 current_partition
= BB_PARTITION (bb
);
2487 /* Perform several checks on the edges out of each block, such as
2488 the consistency of the branch probabilities, the correctness
2489 of hot/cold partition crossing edges, and the number of expected
2490 successor edges. Also verify that the dominance relationship
2491 between hot/cold blocks is sane. */
2494 rtl_verify_edges (void)
2499 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
2501 int n_fallthru
= 0, n_branch
= 0, n_abnormal_call
= 0, n_sibcall
= 0;
2502 int n_eh
= 0, n_abnormal
= 0;
2503 edge e
, fallthru
= NULL
;
2506 bool has_crossing_edge
= false;
2508 if (JUMP_P (BB_END (bb
))
2509 && (note
= find_reg_note (BB_END (bb
), REG_BR_PROB
, NULL_RTX
))
2510 && EDGE_COUNT (bb
->succs
) >= 2
2511 && any_condjump_p (BB_END (bb
)))
2513 if (!BRANCH_EDGE (bb
)->probability
.initialized_p ())
2515 if (profile_status_for_fn (cfun
) != PROFILE_ABSENT
)
2517 error ("verify_flow_info: "
2518 "REG_BR_PROB is set but cfg probability is not");
2522 else if (XINT (note
, 0)
2523 != BRANCH_EDGE (bb
)->probability
.to_reg_br_prob_note ()
2524 && profile_status_for_fn (cfun
) != PROFILE_ABSENT
)
2526 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
2528 BRANCH_EDGE (bb
)->probability
.to_reg_br_prob_note ());
2533 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2537 if (e
->flags
& EDGE_FALLTHRU
)
2538 n_fallthru
++, fallthru
= e
;
2540 is_crossing
= (BB_PARTITION (e
->src
) != BB_PARTITION (e
->dest
)
2541 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
2542 && e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
));
2543 has_crossing_edge
|= is_crossing
;
2544 if (e
->flags
& EDGE_CROSSING
)
2548 error ("EDGE_CROSSING incorrectly set across same section");
2551 if (e
->flags
& EDGE_FALLTHRU
)
2553 error ("fallthru edge crosses section boundary in bb %i",
2557 if (e
->flags
& EDGE_EH
)
2559 error ("EH edge crosses section boundary in bb %i",
2563 if (JUMP_P (BB_END (bb
)) && !CROSSING_JUMP_P (BB_END (bb
)))
2565 error ("No region crossing jump at section boundary in bb %i",
2570 else if (is_crossing
)
2572 error ("EDGE_CROSSING missing across section boundary");
2576 if ((e
->flags
& ~(EDGE_DFS_BACK
2578 | EDGE_IRREDUCIBLE_LOOP
2581 | EDGE_PRESERVE
)) == 0)
2584 if (e
->flags
& EDGE_ABNORMAL_CALL
)
2587 if (e
->flags
& EDGE_SIBCALL
)
2590 if (e
->flags
& EDGE_EH
)
2593 if (e
->flags
& EDGE_ABNORMAL
)
2597 if (!has_crossing_edge
2598 && JUMP_P (BB_END (bb
))
2599 && CROSSING_JUMP_P (BB_END (bb
)))
2601 print_rtl_with_bb (stderr
, get_insns (), TDF_BLOCKS
| TDF_DETAILS
);
2602 error ("Region crossing jump across same section in bb %i",
2607 if (n_eh
&& !find_reg_note (BB_END (bb
), REG_EH_REGION
, NULL_RTX
))
2609 error ("missing REG_EH_REGION note at the end of bb %i", bb
->index
);
2614 error ("too many exception handling edges in bb %i", bb
->index
);
2618 && (!JUMP_P (BB_END (bb
))
2619 || (n_branch
> 1 && (any_uncondjump_p (BB_END (bb
))
2620 || any_condjump_p (BB_END (bb
))))))
2622 error ("too many outgoing branch edges from bb %i", bb
->index
);
2625 if (n_fallthru
&& any_uncondjump_p (BB_END (bb
)))
2627 error ("fallthru edge after unconditional jump in bb %i", bb
->index
);
2630 if (n_branch
!= 1 && any_uncondjump_p (BB_END (bb
)))
2632 error ("wrong number of branch edges after unconditional jump"
2633 " in bb %i", bb
->index
);
2636 if (n_branch
!= 1 && any_condjump_p (BB_END (bb
))
2637 && JUMP_LABEL (BB_END (bb
)) != BB_HEAD (fallthru
->dest
))
2639 error ("wrong amount of branch edges after conditional jump"
2640 " in bb %i", bb
->index
);
2643 if (n_abnormal_call
&& !CALL_P (BB_END (bb
)))
2645 error ("abnormal call edges for non-call insn in bb %i", bb
->index
);
2648 if (n_sibcall
&& !CALL_P (BB_END (bb
)))
2650 error ("sibcall edges for non-call insn in bb %i", bb
->index
);
2653 if (n_abnormal
> n_eh
2654 && !(CALL_P (BB_END (bb
))
2655 && n_abnormal
== n_abnormal_call
+ n_sibcall
)
2656 && (!JUMP_P (BB_END (bb
))
2657 || any_condjump_p (BB_END (bb
))
2658 || any_uncondjump_p (BB_END (bb
))))
2660 error ("abnormal edges for no purpose in bb %i", bb
->index
);
2665 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
2668 has_eh
= (e
->flags
& EDGE_EH
);
2669 if ((e
->flags
& EDGE_EH
) == has_eh
)
2671 error ("EH incoming edge mixed with non-EH incoming edges "
2672 "in bb %i", bb
->index
);
2678 /* If there are partitions, do a sanity check on them: A basic block in
2679 Â a cold partition cannot dominate a basic block in a hot partition. Â */
2680 if (crtl
->has_bb_partition
&& !err
2681 && current_ir_type () == IR_RTL_CFGLAYOUT
)
2683 vec
<basic_block
> bbs_to_fix
= find_partition_fixes (true);
2684 err
= !bbs_to_fix
.is_empty ();
2691 /* Checks on the instructions within blocks. Currently checks that each
2692 block starts with a basic block note, and that basic block notes and
2693 control flow jumps are not found in the middle of the block. */
2696 rtl_verify_bb_insns (void)
2702 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
2704 /* Now check the header of basic
2705 block. It ought to contain optional CODE_LABEL followed
2706 by NOTE_BASIC_BLOCK. */
2710 if (BB_END (bb
) == x
)
2712 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2720 if (!NOTE_INSN_BASIC_BLOCK_P (x
) || NOTE_BASIC_BLOCK (x
) != bb
)
2722 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2727 if (BB_END (bb
) == x
)
2728 /* Do checks for empty blocks here. */
2731 for (x
= NEXT_INSN (x
); x
; x
= NEXT_INSN (x
))
2733 if (NOTE_INSN_BASIC_BLOCK_P (x
))
2735 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2736 INSN_UID (x
), bb
->index
);
2740 if (x
== BB_END (bb
))
2743 if (control_flow_insn_p (x
))
2745 error ("in basic block %d:", bb
->index
);
2746 fatal_insn ("flow control insn inside a basic block", x
);
2755 /* Verify that block pointers for instructions in basic blocks, headers and
2756 footers are set appropriately. */
2759 rtl_verify_bb_pointers (void)
2764 /* Check the general integrity of the basic blocks. */
2765 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
2769 if (!(bb
->flags
& BB_RTL
))
2771 error ("BB_RTL flag not set for block %d", bb
->index
);
2775 FOR_BB_INSNS (bb
, insn
)
2776 if (BLOCK_FOR_INSN (insn
) != bb
)
2778 error ("insn %d basic block pointer is %d, should be %d",
2780 BLOCK_FOR_INSN (insn
) ? BLOCK_FOR_INSN (insn
)->index
: 0,
2785 for (insn
= BB_HEADER (bb
); insn
; insn
= NEXT_INSN (insn
))
2786 if (!BARRIER_P (insn
)
2787 && BLOCK_FOR_INSN (insn
) != NULL
)
2789 error ("insn %d in header of bb %d has non-NULL basic block",
2790 INSN_UID (insn
), bb
->index
);
2793 for (insn
= BB_FOOTER (bb
); insn
; insn
= NEXT_INSN (insn
))
2794 if (!BARRIER_P (insn
)
2795 && BLOCK_FOR_INSN (insn
) != NULL
)
2797 error ("insn %d in footer of bb %d has non-NULL basic block",
2798 INSN_UID (insn
), bb
->index
);
2807 /* Verify the CFG and RTL consistency common for both underlying RTL and
2810 Currently it does following checks:
2812 - overlapping of basic blocks
2813 - insns with wrong BLOCK_FOR_INSN pointers
2814 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2815 - tails of basic blocks (ensure that boundary is necessary)
2816 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2817 and NOTE_INSN_BASIC_BLOCK
2818 - verify that no fall_thru edge crosses hot/cold partition boundaries
2819 - verify that there are no pending RTL branch predictions
2820 - verify that hot blocks are not dominated by cold blocks
2822 In future it can be extended check a lot of other stuff as well
2823 (reachability of basic blocks, life information, etc. etc.). */
2826 rtl_verify_flow_info_1 (void)
2830 err
|= rtl_verify_bb_pointers ();
2832 err
|= rtl_verify_bb_insns ();
2834 err
|= rtl_verify_edges ();
2839 /* Walk the instruction chain and verify that bb head/end pointers
2840 are correct, and that instructions are in exactly one bb and have
2841 correct block pointers. */
2844 rtl_verify_bb_insn_chain (void)
2849 rtx_insn
*last_head
= get_last_insn ();
2850 basic_block
*bb_info
;
2851 const int max_uid
= get_max_uid ();
2853 bb_info
= XCNEWVEC (basic_block
, max_uid
);
2855 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
2857 rtx_insn
*head
= BB_HEAD (bb
);
2858 rtx_insn
*end
= BB_END (bb
);
2860 for (x
= last_head
; x
!= NULL_RTX
; x
= PREV_INSN (x
))
2862 /* Verify the end of the basic block is in the INSN chain. */
2866 /* And that the code outside of basic blocks has NULL bb field. */
2868 && BLOCK_FOR_INSN (x
) != NULL
)
2870 error ("insn %d outside of basic blocks has non-NULL bb field",
2878 error ("end insn %d for block %d not found in the insn stream",
2879 INSN_UID (end
), bb
->index
);
2883 /* Work backwards from the end to the head of the basic block
2884 to verify the head is in the RTL chain. */
2885 for (; x
!= NULL_RTX
; x
= PREV_INSN (x
))
2887 /* While walking over the insn chain, verify insns appear
2888 in only one basic block. */
2889 if (bb_info
[INSN_UID (x
)] != NULL
)
2891 error ("insn %d is in multiple basic blocks (%d and %d)",
2892 INSN_UID (x
), bb
->index
, bb_info
[INSN_UID (x
)]->index
);
2896 bb_info
[INSN_UID (x
)] = bb
;
2903 error ("head insn %d for block %d not found in the insn stream",
2904 INSN_UID (head
), bb
->index
);
2908 last_head
= PREV_INSN (x
);
2911 for (x
= last_head
; x
!= NULL_RTX
; x
= PREV_INSN (x
))
2913 /* Check that the code before the first basic block has NULL
2916 && BLOCK_FOR_INSN (x
) != NULL
)
2918 error ("insn %d outside of basic blocks has non-NULL bb field",
2928 /* Verify that fallthru edges point to adjacent blocks in layout order and
2929 that barriers exist after non-fallthru blocks. */
2932 rtl_verify_fallthru (void)
2937 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
2941 e
= find_fallthru_edge (bb
->succs
);
2946 /* Ensure existence of barrier in BB with no fallthru edges. */
2947 for (insn
= NEXT_INSN (BB_END (bb
)); ; insn
= NEXT_INSN (insn
))
2949 if (!insn
|| NOTE_INSN_BASIC_BLOCK_P (insn
))
2951 error ("missing barrier after block %i", bb
->index
);
2955 if (BARRIER_P (insn
))
2959 else if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
2960 && e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
2964 if (e
->src
->next_bb
!= e
->dest
)
2967 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2968 e
->src
->index
, e
->dest
->index
);
2972 for (insn
= NEXT_INSN (BB_END (e
->src
)); insn
!= BB_HEAD (e
->dest
);
2973 insn
= NEXT_INSN (insn
))
2974 if (BARRIER_P (insn
) || NONDEBUG_INSN_P (insn
))
2976 error ("verify_flow_info: Incorrect fallthru %i->%i",
2977 e
->src
->index
, e
->dest
->index
);
2978 fatal_insn ("wrong insn in the fallthru edge", insn
);
2987 /* Verify that blocks are laid out in consecutive order. While walking the
2988 instructions, verify that all expected instructions are inside the basic
2989 blocks, and that all returns are followed by barriers. */
2992 rtl_verify_bb_layout (void)
2998 rtx_insn
* const rtx_first
= get_insns ();
2999 basic_block last_bb_seen
= ENTRY_BLOCK_PTR_FOR_FN (cfun
), curr_bb
= NULL
;
3003 for (x
= rtx_first
; x
; x
= NEXT_INSN (x
))
3005 if (NOTE_INSN_BASIC_BLOCK_P (x
))
3007 bb
= NOTE_BASIC_BLOCK (x
);
3010 if (bb
!= last_bb_seen
->next_bb
)
3011 internal_error ("basic blocks not laid down consecutively");
3013 curr_bb
= last_bb_seen
= bb
;
3018 switch (GET_CODE (x
))
3025 /* An ADDR_VEC is placed outside any basic block. */
3027 && JUMP_TABLE_DATA_P (NEXT_INSN (x
)))
3030 /* But in any case, non-deletable labels can appear anywhere. */
3034 fatal_insn ("insn outside basic block", x
);
3039 && returnjump_p (x
) && ! condjump_p (x
)
3040 && ! ((y
= next_nonnote_nondebug_insn (x
))
3042 fatal_insn ("return not followed by barrier", x
);
3044 if (curr_bb
&& x
== BB_END (curr_bb
))
3048 if (num_bb_notes
!= n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
)
3050 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
3051 num_bb_notes
, n_basic_blocks_for_fn (cfun
));
3056 /* Verify the CFG and RTL consistency common for both underlying RTL and
3057 cfglayout RTL, plus consistency checks specific to linearized RTL mode.
3059 Currently it does following checks:
3060 - all checks of rtl_verify_flow_info_1
3061 - test head/end pointers
3062 - check that blocks are laid out in consecutive order
3063 - check that all insns are in the basic blocks
3064 (except the switch handling code, barriers and notes)
3065 - check that all returns are followed by barriers
3066 - check that all fallthru edge points to the adjacent blocks
3067 - verify that there is a single hot/cold partition boundary after bbro */
3070 rtl_verify_flow_info (void)
3074 err
|= rtl_verify_flow_info_1 ();
3076 err
|= rtl_verify_bb_insn_chain ();
3078 err
|= rtl_verify_fallthru ();
3080 err
|= rtl_verify_bb_layout ();
3082 err
|= verify_hot_cold_block_grouping ();
3087 /* Assume that the preceding pass has possibly eliminated jump instructions
3088 or converted the unconditional jumps. Eliminate the edges from CFG.
3089 Return true if any edges are eliminated. */
3092 purge_dead_edges (basic_block bb
)
3095 rtx_insn
*insn
= BB_END (bb
);
3097 bool purged
= false;
3101 if ((DEBUG_INSN_P (insn
) || NOTE_P (insn
)) && insn
!= BB_HEAD (bb
))
3103 insn
= PREV_INSN (insn
);
3104 while ((DEBUG_INSN_P (insn
) || NOTE_P (insn
)) && insn
!= BB_HEAD (bb
));
3106 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
3107 if (NONJUMP_INSN_P (insn
)
3108 && (note
= find_reg_note (insn
, REG_EH_REGION
, NULL
)))
3112 if (! may_trap_p (PATTERN (insn
))
3113 || ((eqnote
= find_reg_equal_equiv_note (insn
))
3114 && ! may_trap_p (XEXP (eqnote
, 0))))
3115 remove_note (insn
, note
);
3118 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
3119 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
3121 bool remove
= false;
3123 /* There are three types of edges we need to handle correctly here: EH
3124 edges, abnormal call EH edges, and abnormal call non-EH edges. The
3125 latter can appear when nonlocal gotos are used. */
3126 if (e
->flags
& EDGE_ABNORMAL_CALL
)
3130 else if (can_nonlocal_goto (insn
))
3132 else if ((e
->flags
& EDGE_EH
) && can_throw_internal (insn
))
3134 else if (flag_tm
&& find_reg_note (insn
, REG_TM
, NULL
))
3139 else if (e
->flags
& EDGE_EH
)
3140 remove
= !can_throw_internal (insn
);
3145 df_set_bb_dirty (bb
);
3158 /* We do care only about conditional jumps and simplejumps. */
3159 if (!any_condjump_p (insn
)
3160 && !returnjump_p (insn
)
3161 && !simplejump_p (insn
))
3164 /* Branch probability/prediction notes are defined only for
3165 condjumps. We've possibly turned condjump into simplejump. */
3166 if (simplejump_p (insn
))
3168 note
= find_reg_note (insn
, REG_BR_PROB
, NULL
);
3170 remove_note (insn
, note
);
3171 while ((note
= find_reg_note (insn
, REG_BR_PRED
, NULL
)))
3172 remove_note (insn
, note
);
3175 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
3177 /* Avoid abnormal flags to leak from computed jumps turned
3178 into simplejumps. */
3180 e
->flags
&= ~EDGE_ABNORMAL
;
3182 /* See if this edge is one we should keep. */
3183 if ((e
->flags
& EDGE_FALLTHRU
) && any_condjump_p (insn
))
3184 /* A conditional jump can fall through into the next
3185 block, so we should keep the edge. */
3190 else if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
3191 && BB_HEAD (e
->dest
) == JUMP_LABEL (insn
))
3192 /* If the destination block is the target of the jump,
3198 else if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
3199 && returnjump_p (insn
))
3200 /* If the destination block is the exit block, and this
3201 instruction is a return, then keep the edge. */
3206 else if ((e
->flags
& EDGE_EH
) && can_throw_internal (insn
))
3207 /* Keep the edges that correspond to exceptions thrown by
3208 this instruction and rematerialize the EDGE_ABNORMAL
3209 flag we just cleared above. */
3211 e
->flags
|= EDGE_ABNORMAL
;
3216 /* We do not need this edge. */
3217 df_set_bb_dirty (bb
);
3222 if (EDGE_COUNT (bb
->succs
) == 0 || !purged
)
3226 fprintf (dump_file
, "Purged edges from bb %i\n", bb
->index
);
3231 /* Redistribute probabilities. */
3232 if (single_succ_p (bb
))
3234 single_succ_edge (bb
)->probability
= profile_probability::always ();
3238 note
= find_reg_note (insn
, REG_BR_PROB
, NULL
);
3242 b
= BRANCH_EDGE (bb
);
3243 f
= FALLTHRU_EDGE (bb
);
3244 b
->probability
= profile_probability::from_reg_br_prob_note
3246 f
->probability
= b
->probability
.invert ();
3251 else if (CALL_P (insn
) && SIBLING_CALL_P (insn
))
3253 /* First, there should not be any EH or ABCALL edges resulting
3254 from non-local gotos and the like. If there were, we shouldn't
3255 have created the sibcall in the first place. Second, there
3256 should of course never have been a fallthru edge. */
3257 gcc_assert (single_succ_p (bb
));
3258 gcc_assert (single_succ_edge (bb
)->flags
3259 == (EDGE_SIBCALL
| EDGE_ABNORMAL
));
3264 /* If we don't see a jump insn, we don't know exactly why the block would
3265 have been broken at this point. Look for a simple, non-fallthru edge,
3266 as these are only created by conditional branches. If we find such an
3267 edge we know that there used to be a jump here and can then safely
3268 remove all non-fallthru edges. */
3270 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3271 if (! (e
->flags
& (EDGE_COMPLEX
| EDGE_FALLTHRU
)))
3280 /* Remove all but the fake and fallthru edges. The fake edge may be
3281 the only successor for this block in the case of noreturn
3283 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
3285 if (!(e
->flags
& (EDGE_FALLTHRU
| EDGE_FAKE
)))
3287 df_set_bb_dirty (bb
);
3295 gcc_assert (single_succ_p (bb
));
3297 single_succ_edge (bb
)->probability
= profile_probability::always ();
3300 fprintf (dump_file
, "Purged non-fallthru edges from bb %i\n",
3305 /* Search all basic blocks for potentially dead edges and purge them. Return
3306 true if some edge has been eliminated. */
3309 purge_all_dead_edges (void)
3314 FOR_EACH_BB_FN (bb
, cfun
)
3316 bool purged_here
= purge_dead_edges (bb
);
3318 purged
|= purged_here
;
3324 /* This is used by a few passes that emit some instructions after abnormal
3325 calls, moving the basic block's end, while they in fact do want to emit
3326 them on the fallthru edge. Look for abnormal call edges, find backward
3327 the call in the block and insert the instructions on the edge instead.
3329 Similarly, handle instructions throwing exceptions internally.
3331 Return true when instructions have been found and inserted on edges. */
3334 fixup_abnormal_edges (void)
3336 bool inserted
= false;
3339 FOR_EACH_BB_FN (bb
, cfun
)
3344 /* Look for cases we are interested in - calls or instructions causing
3346 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3347 if ((e
->flags
& EDGE_ABNORMAL_CALL
)
3348 || ((e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
3349 == (EDGE_ABNORMAL
| EDGE_EH
)))
3352 if (e
&& !CALL_P (BB_END (bb
)) && !can_throw_internal (BB_END (bb
)))
3356 /* Get past the new insns generated. Allow notes, as the insns
3357 may be already deleted. */
3359 while ((NONJUMP_INSN_P (insn
) || NOTE_P (insn
))
3360 && !can_throw_internal (insn
)
3361 && insn
!= BB_HEAD (bb
))
3362 insn
= PREV_INSN (insn
);
3364 if (CALL_P (insn
) || can_throw_internal (insn
))
3366 rtx_insn
*stop
, *next
;
3368 e
= find_fallthru_edge (bb
->succs
);
3370 stop
= NEXT_INSN (BB_END (bb
));
3373 for (insn
= NEXT_INSN (insn
); insn
!= stop
; insn
= next
)
3375 next
= NEXT_INSN (insn
);
3380 /* Sometimes there's still the return value USE.
3381 If it's placed after a trapping call (i.e. that
3382 call is the last insn anyway), we have no fallthru
3383 edge. Simply delete this use and don't try to insert
3384 on the non-existent edge.
3385 Similarly, sometimes a call that can throw is
3386 followed in the source with __builtin_unreachable (),
3387 meaning that there is UB if the call returns rather
3388 than throws. If there weren't any instructions
3389 following such calls before, supposedly even the ones
3390 we've deleted aren't significant and can be
3394 /* We're not deleting it, we're moving it. */
3395 insn
->set_undeleted ();
3396 SET_PREV_INSN (insn
) = NULL_RTX
;
3397 SET_NEXT_INSN (insn
) = NULL_RTX
;
3399 insert_insn_on_edge (insn
, e
);
3403 else if (!BARRIER_P (insn
))
3404 set_block_for_insn (insn
, NULL
);
3408 /* It may be that we don't find any trapping insn. In this
3409 case we discovered quite late that the insn that had been
3410 marked as can_throw_internal in fact couldn't trap at all.
3411 So we should in fact delete the EH edges out of the block. */
3413 purge_dead_edges (bb
);
3420 /* Cut the insns from FIRST to LAST out of the insns stream. */
3423 unlink_insn_chain (rtx_insn
*first
, rtx_insn
*last
)
3425 rtx_insn
*prevfirst
= PREV_INSN (first
);
3426 rtx_insn
*nextlast
= NEXT_INSN (last
);
3428 SET_PREV_INSN (first
) = NULL
;
3429 SET_NEXT_INSN (last
) = NULL
;
3431 SET_NEXT_INSN (prevfirst
) = nextlast
;
3433 SET_PREV_INSN (nextlast
) = prevfirst
;
3435 set_last_insn (prevfirst
);
3437 set_first_insn (nextlast
);
3441 /* Skip over inter-block insns occurring after BB which are typically
3442 associated with BB (e.g., barriers). If there are any such insns,
3443 we return the last one. Otherwise, we return the end of BB. */
3446 skip_insns_after_block (basic_block bb
)
3448 rtx_insn
*insn
, *last_insn
, *next_head
, *prev
;
3451 if (bb
->next_bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
3452 next_head
= BB_HEAD (bb
->next_bb
);
3454 for (last_insn
= insn
= BB_END (bb
); (insn
= NEXT_INSN (insn
)) != 0; )
3456 if (insn
== next_head
)
3459 switch (GET_CODE (insn
))
3466 switch (NOTE_KIND (insn
))
3468 case NOTE_INSN_BLOCK_END
:
3478 if (NEXT_INSN (insn
)
3479 && JUMP_TABLE_DATA_P (NEXT_INSN (insn
)))
3481 insn
= NEXT_INSN (insn
);
3494 /* It is possible to hit contradictory sequence. For instance:
3500 Where barrier belongs to jump_insn, but the note does not. This can be
3501 created by removing the basic block originally following
3502 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
3504 for (insn
= last_insn
; insn
!= BB_END (bb
); insn
= prev
)
3506 prev
= PREV_INSN (insn
);
3508 switch (NOTE_KIND (insn
))
3510 case NOTE_INSN_BLOCK_END
:
3513 case NOTE_INSN_DELETED
:
3514 case NOTE_INSN_DELETED_LABEL
:
3515 case NOTE_INSN_DELETED_DEBUG_LABEL
:
3518 reorder_insns (insn
, insn
, last_insn
);
3525 /* Locate or create a label for a given basic block. */
3528 label_for_bb (basic_block bb
)
3530 rtx_insn
*label
= BB_HEAD (bb
);
3532 if (!LABEL_P (label
))
3535 fprintf (dump_file
, "Emitting label for block %d\n", bb
->index
);
3537 label
= block_label (bb
);
3543 /* Locate the effective beginning and end of the insn chain for each
3544 block, as defined by skip_insns_after_block above. */
3547 record_effective_endpoints (void)
3549 rtx_insn
*next_insn
;
3553 for (insn
= get_insns ();
3556 && NOTE_KIND (insn
) != NOTE_INSN_BASIC_BLOCK
;
3557 insn
= NEXT_INSN (insn
))
3559 /* No basic blocks at all? */
3562 if (PREV_INSN (insn
))
3563 cfg_layout_function_header
=
3564 unlink_insn_chain (get_insns (), PREV_INSN (insn
));
3566 cfg_layout_function_header
= NULL
;
3568 next_insn
= get_insns ();
3569 FOR_EACH_BB_FN (bb
, cfun
)
3573 if (PREV_INSN (BB_HEAD (bb
)) && next_insn
!= BB_HEAD (bb
))
3574 BB_HEADER (bb
) = unlink_insn_chain (next_insn
,
3575 PREV_INSN (BB_HEAD (bb
)));
3576 end
= skip_insns_after_block (bb
);
3577 if (NEXT_INSN (BB_END (bb
)) && BB_END (bb
) != end
)
3578 BB_FOOTER (bb
) = unlink_insn_chain (NEXT_INSN (BB_END (bb
)), end
);
3579 next_insn
= NEXT_INSN (BB_END (bb
));
3582 cfg_layout_function_footer
= next_insn
;
3583 if (cfg_layout_function_footer
)
3584 cfg_layout_function_footer
= unlink_insn_chain (cfg_layout_function_footer
, get_last_insn ());
3589 const pass_data pass_data_into_cfg_layout_mode
=
3591 RTL_PASS
, /* type */
3592 "into_cfglayout", /* name */
3593 OPTGROUP_NONE
, /* optinfo_flags */
3595 0, /* properties_required */
3596 PROP_cfglayout
, /* properties_provided */
3597 0, /* properties_destroyed */
3598 0, /* todo_flags_start */
3599 0, /* todo_flags_finish */
3602 class pass_into_cfg_layout_mode
: public rtl_opt_pass
3605 pass_into_cfg_layout_mode (gcc::context
*ctxt
)
3606 : rtl_opt_pass (pass_data_into_cfg_layout_mode
, ctxt
)
3609 /* opt_pass methods: */
3610 virtual unsigned int execute (function
*)
3612 cfg_layout_initialize (0);
3616 }; // class pass_into_cfg_layout_mode
3621 make_pass_into_cfg_layout_mode (gcc::context
*ctxt
)
3623 return new pass_into_cfg_layout_mode (ctxt
);
3628 const pass_data pass_data_outof_cfg_layout_mode
=
3630 RTL_PASS
, /* type */
3631 "outof_cfglayout", /* name */
3632 OPTGROUP_NONE
, /* optinfo_flags */
3634 0, /* properties_required */
3635 0, /* properties_provided */
3636 PROP_cfglayout
, /* properties_destroyed */
3637 0, /* todo_flags_start */
3638 0, /* todo_flags_finish */
3641 class pass_outof_cfg_layout_mode
: public rtl_opt_pass
3644 pass_outof_cfg_layout_mode (gcc::context
*ctxt
)
3645 : rtl_opt_pass (pass_data_outof_cfg_layout_mode
, ctxt
)
3648 /* opt_pass methods: */
3649 virtual unsigned int execute (function
*);
3651 }; // class pass_outof_cfg_layout_mode
3654 pass_outof_cfg_layout_mode::execute (function
*fun
)
3658 FOR_EACH_BB_FN (bb
, fun
)
3659 if (bb
->next_bb
!= EXIT_BLOCK_PTR_FOR_FN (fun
))
3660 bb
->aux
= bb
->next_bb
;
3662 cfg_layout_finalize ();
3670 make_pass_outof_cfg_layout_mode (gcc::context
*ctxt
)
3672 return new pass_outof_cfg_layout_mode (ctxt
);
3676 /* Link the basic blocks in the correct order, compacting the basic
3677 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3678 function also clears the basic block header and footer fields.
3680 This function is usually called after a pass (e.g. tracer) finishes
3681 some transformations while in cfglayout mode. The required sequence
3682 of the basic blocks is in a linked list along the bb->aux field.
3683 This functions re-links the basic block prev_bb and next_bb pointers
3684 accordingly, and it compacts and renumbers the blocks.
3686 FIXME: This currently works only for RTL, but the only RTL-specific
3687 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3688 to GIMPLE a long time ago, but it doesn't relink the basic block
3689 chain. It could do that (to give better initial RTL) if this function
3690 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
3693 relink_block_chain (bool stay_in_cfglayout_mode
)
3695 basic_block bb
, prev_bb
;
3698 /* Maybe dump the re-ordered sequence. */
3701 fprintf (dump_file
, "Reordered sequence:\n");
3702 for (bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
, index
=
3705 bb
= (basic_block
) bb
->aux
, index
++)
3707 fprintf (dump_file
, " %i ", index
);
3708 if (get_bb_original (bb
))
3709 fprintf (dump_file
, "duplicate of %i\n",
3710 get_bb_original (bb
)->index
);
3711 else if (forwarder_block_p (bb
)
3712 && !LABEL_P (BB_HEAD (bb
)))
3713 fprintf (dump_file
, "compensation\n");
3715 fprintf (dump_file
, "bb %i\n", bb
->index
);
3719 /* Now reorder the blocks. */
3720 prev_bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
3721 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
;
3722 for (; bb
; prev_bb
= bb
, bb
= (basic_block
) bb
->aux
)
3724 bb
->prev_bb
= prev_bb
;
3725 prev_bb
->next_bb
= bb
;
3727 prev_bb
->next_bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
);
3728 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
= prev_bb
;
3730 /* Then, clean up the aux fields. */
3731 FOR_ALL_BB_FN (bb
, cfun
)
3734 if (!stay_in_cfglayout_mode
)
3735 BB_HEADER (bb
) = BB_FOOTER (bb
) = NULL
;
3738 /* Maybe reset the original copy tables, they are not valid anymore
3739 when we renumber the basic blocks in compact_blocks. If we are
3740 are going out of cfglayout mode, don't re-allocate the tables. */
3741 if (original_copy_tables_initialized_p ())
3742 free_original_copy_tables ();
3743 if (stay_in_cfglayout_mode
)
3744 initialize_original_copy_tables ();
3746 /* Finally, put basic_block_info in the new order. */
3751 /* Given a reorder chain, rearrange the code to match. */
3754 fixup_reorder_chain (void)
3757 rtx_insn
*insn
= NULL
;
3759 if (cfg_layout_function_header
)
3761 set_first_insn (cfg_layout_function_header
);
3762 insn
= cfg_layout_function_header
;
3763 while (NEXT_INSN (insn
))
3764 insn
= NEXT_INSN (insn
);
3767 /* First do the bulk reordering -- rechain the blocks without regard to
3768 the needed changes to jumps and labels. */
3770 for (bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
; bb
; bb
= (basic_block
)
3776 SET_NEXT_INSN (insn
) = BB_HEADER (bb
);
3778 set_first_insn (BB_HEADER (bb
));
3779 SET_PREV_INSN (BB_HEADER (bb
)) = insn
;
3780 insn
= BB_HEADER (bb
);
3781 while (NEXT_INSN (insn
))
3782 insn
= NEXT_INSN (insn
);
3785 SET_NEXT_INSN (insn
) = BB_HEAD (bb
);
3787 set_first_insn (BB_HEAD (bb
));
3788 SET_PREV_INSN (BB_HEAD (bb
)) = insn
;
3792 SET_NEXT_INSN (insn
) = BB_FOOTER (bb
);
3793 SET_PREV_INSN (BB_FOOTER (bb
)) = insn
;
3794 while (NEXT_INSN (insn
))
3795 insn
= NEXT_INSN (insn
);
3799 SET_NEXT_INSN (insn
) = cfg_layout_function_footer
;
3800 if (cfg_layout_function_footer
)
3801 SET_PREV_INSN (cfg_layout_function_footer
) = insn
;
3803 while (NEXT_INSN (insn
))
3804 insn
= NEXT_INSN (insn
);
3806 set_last_insn (insn
);
3808 verify_insn_chain ();
3810 /* Now add jumps and labels as needed to match the blocks new
3813 for (bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
; bb
; bb
= (basic_block
)
3816 edge e_fall
, e_taken
, e
;
3817 rtx_insn
*bb_end_insn
;
3818 rtx ret_label
= NULL_RTX
;
3822 if (EDGE_COUNT (bb
->succs
) == 0)
3825 /* Find the old fallthru edge, and another non-EH edge for
3827 e_taken
= e_fall
= NULL
;
3829 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3830 if (e
->flags
& EDGE_FALLTHRU
)
3832 else if (! (e
->flags
& EDGE_EH
))
3835 bb_end_insn
= BB_END (bb
);
3836 if (rtx_jump_insn
*bb_end_jump
= dyn_cast
<rtx_jump_insn
*> (bb_end_insn
))
3838 ret_label
= JUMP_LABEL (bb_end_jump
);
3839 if (any_condjump_p (bb_end_jump
))
3841 /* This might happen if the conditional jump has side
3842 effects and could therefore not be optimized away.
3843 Make the basic block to end with a barrier in order
3844 to prevent rtl_verify_flow_info from complaining. */
3847 gcc_assert (!onlyjump_p (bb_end_jump
)
3848 || returnjump_p (bb_end_jump
)
3849 || (e_taken
->flags
& EDGE_CROSSING
));
3850 emit_barrier_after (bb_end_jump
);
3854 /* If the old fallthru is still next, nothing to do. */
3855 if (bb
->aux
== e_fall
->dest
3856 || e_fall
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
3859 /* The degenerated case of conditional jump jumping to the next
3860 instruction can happen for jumps with side effects. We need
3861 to construct a forwarder block and this will be done just
3862 fine by force_nonfallthru below. */
3866 /* There is another special case: if *neither* block is next,
3867 such as happens at the very end of a function, then we'll
3868 need to add a new unconditional jump. Choose the taken
3869 edge based on known or assumed probability. */
3870 else if (bb
->aux
!= e_taken
->dest
)
3872 rtx note
= find_reg_note (bb_end_jump
, REG_BR_PROB
, 0);
3875 && profile_probability::from_reg_br_prob_note
3876 (XINT (note
, 0)) < profile_probability::even ()
3877 && invert_jump (bb_end_jump
,
3879 == EXIT_BLOCK_PTR_FOR_FN (cfun
)
3881 : label_for_bb (e_fall
->dest
)), 0))
3883 e_fall
->flags
&= ~EDGE_FALLTHRU
;
3884 gcc_checking_assert (could_fall_through
3885 (e_taken
->src
, e_taken
->dest
));
3886 e_taken
->flags
|= EDGE_FALLTHRU
;
3887 update_br_prob_note (bb
);
3888 e
= e_fall
, e_fall
= e_taken
, e_taken
= e
;
3892 /* If the "jumping" edge is a crossing edge, and the fall
3893 through edge is non-crossing, leave things as they are. */
3894 else if ((e_taken
->flags
& EDGE_CROSSING
)
3895 && !(e_fall
->flags
& EDGE_CROSSING
))
3898 /* Otherwise we can try to invert the jump. This will
3899 basically never fail, however, keep up the pretense. */
3900 else if (invert_jump (bb_end_jump
,
3902 == EXIT_BLOCK_PTR_FOR_FN (cfun
)
3904 : label_for_bb (e_fall
->dest
)), 0))
3906 e_fall
->flags
&= ~EDGE_FALLTHRU
;
3907 gcc_checking_assert (could_fall_through
3908 (e_taken
->src
, e_taken
->dest
));
3909 e_taken
->flags
|= EDGE_FALLTHRU
;
3910 update_br_prob_note (bb
);
3911 if (LABEL_NUSES (ret_label
) == 0
3912 && single_pred_p (e_taken
->dest
))
3913 delete_insn (as_a
<rtx_insn
*> (ret_label
));
3917 else if (extract_asm_operands (PATTERN (bb_end_insn
)) != NULL
)
3919 /* If the old fallthru is still next or if
3920 asm goto doesn't have a fallthru (e.g. when followed by
3921 __builtin_unreachable ()), nothing to do. */
3923 || bb
->aux
== e_fall
->dest
3924 || e_fall
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
3927 /* Otherwise we'll have to use the fallthru fixup below. */
3931 /* Otherwise we have some return, switch or computed
3932 jump. In the 99% case, there should not have been a
3934 gcc_assert (returnjump_p (bb_end_insn
) || !e_fall
);
3940 /* No fallthru implies a noreturn function with EH edges, or
3941 something similarly bizarre. In any case, we don't need to
3946 /* If the fallthru block is still next, nothing to do. */
3947 if (bb
->aux
== e_fall
->dest
)
3950 /* A fallthru to exit block. */
3951 if (e_fall
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
3955 /* We got here if we need to add a new jump insn.
3956 Note force_nonfallthru can delete E_FALL and thus we have to
3957 save E_FALL->src prior to the call to force_nonfallthru. */
3958 nb
= force_nonfallthru_and_redirect (e_fall
, e_fall
->dest
, ret_label
);
3963 /* Don't process this new block. */
3968 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3970 /* Annoying special case - jump around dead jumptables left in the code. */
3971 FOR_EACH_BB_FN (bb
, cfun
)
3973 edge e
= find_fallthru_edge (bb
->succs
);
3975 if (e
&& !can_fallthru (e
->src
, e
->dest
))
3976 force_nonfallthru (e
);
3979 /* Ensure goto_locus from edges has some instructions with that locus in RTL
3980 when not optimizing. */
3981 if (!optimize
&& !DECL_IGNORED_P (current_function_decl
))
3982 FOR_EACH_BB_FN (bb
, cfun
)
3987 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3988 if (LOCATION_LOCUS (e
->goto_locus
) != UNKNOWN_LOCATION
3989 && !(e
->flags
& EDGE_ABNORMAL
))
3993 basic_block dest
, nb
;
3996 insn
= BB_END (e
->src
);
3997 end
= PREV_INSN (BB_HEAD (e
->src
));
3999 && (!NONDEBUG_INSN_P (insn
) || !INSN_HAS_LOCATION (insn
)))
4000 insn
= PREV_INSN (insn
);
4002 && INSN_LOCATION (insn
) == e
->goto_locus
)
4004 if (simplejump_p (BB_END (e
->src
))
4005 && !INSN_HAS_LOCATION (BB_END (e
->src
)))
4007 INSN_LOCATION (BB_END (e
->src
)) = e
->goto_locus
;
4011 if (dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
4013 /* Non-fallthru edges to the exit block cannot be split. */
4014 if (!(e
->flags
& EDGE_FALLTHRU
))
4019 insn
= BB_HEAD (dest
);
4020 end
= NEXT_INSN (BB_END (dest
));
4021 while (insn
!= end
&& !NONDEBUG_INSN_P (insn
))
4022 insn
= NEXT_INSN (insn
);
4023 if (insn
!= end
&& INSN_HAS_LOCATION (insn
)
4024 && INSN_LOCATION (insn
) == e
->goto_locus
)
4027 nb
= split_edge (e
);
4028 if (!INSN_P (BB_END (nb
)))
4029 BB_END (nb
) = emit_insn_after_noloc (gen_nop (), BB_END (nb
),
4031 INSN_LOCATION (BB_END (nb
)) = e
->goto_locus
;
4033 /* If there are other incoming edges to the destination block
4034 with the same goto locus, redirect them to the new block as
4035 well, this can prevent other such blocks from being created
4036 in subsequent iterations of the loop. */
4037 for (ei2
= ei_start (dest
->preds
); (e2
= ei_safe_edge (ei2
)); )
4038 if (LOCATION_LOCUS (e2
->goto_locus
) != UNKNOWN_LOCATION
4039 && !(e2
->flags
& (EDGE_ABNORMAL
| EDGE_FALLTHRU
))
4040 && e
->goto_locus
== e2
->goto_locus
)
4041 redirect_edge_and_branch (e2
, nb
);
4048 /* Perform sanity checks on the insn chain.
4049 1. Check that next/prev pointers are consistent in both the forward and
4051 2. Count insns in chain, going both directions, and check if equal.
4052 3. Check that get_last_insn () returns the actual end of chain. */
4055 verify_insn_chain (void)
4057 rtx_insn
*x
, *prevx
, *nextx
;
4058 int insn_cnt1
, insn_cnt2
;
4060 for (prevx
= NULL
, insn_cnt1
= 1, x
= get_insns ();
4062 prevx
= x
, insn_cnt1
++, x
= NEXT_INSN (x
))
4063 gcc_assert (PREV_INSN (x
) == prevx
);
4065 gcc_assert (prevx
== get_last_insn ());
4067 for (nextx
= NULL
, insn_cnt2
= 1, x
= get_last_insn ();
4069 nextx
= x
, insn_cnt2
++, x
= PREV_INSN (x
))
4070 gcc_assert (NEXT_INSN (x
) == nextx
);
4072 gcc_assert (insn_cnt1
== insn_cnt2
);
4075 /* If we have assembler epilogues, the block falling through to exit must
4076 be the last one in the reordered chain when we reach final. Ensure
4077 that this condition is met. */
4079 fixup_fallthru_exit_predecessor (void)
4082 basic_block bb
= NULL
;
4084 /* This transformation is not valid before reload, because we might
4085 separate a call from the instruction that copies the return
4087 gcc_assert (reload_completed
);
4089 e
= find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
);
4095 basic_block c
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
;
4097 /* If the very first block is the one with the fall-through exit
4098 edge, we have to split that block. */
4101 bb
= split_block_after_labels (bb
)->dest
;
4104 BB_FOOTER (bb
) = BB_FOOTER (c
);
4105 BB_FOOTER (c
) = NULL
;
4108 while (c
->aux
!= bb
)
4109 c
= (basic_block
) c
->aux
;
4113 c
= (basic_block
) c
->aux
;
4120 /* In case there are more than one fallthru predecessors of exit, force that
4121 there is only one. */
4124 force_one_exit_fallthru (void)
4126 edge e
, predecessor
= NULL
;
4129 basic_block forwarder
, bb
;
4131 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
4132 if (e
->flags
& EDGE_FALLTHRU
)
4134 if (predecessor
== NULL
)
4146 /* Exit has several fallthru predecessors. Create a forwarder block for
4148 forwarder
= split_edge (predecessor
);
4149 for (ei
= ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
);
4150 (e
= ei_safe_edge (ei
)); )
4152 if (e
->src
== forwarder
4153 || !(e
->flags
& EDGE_FALLTHRU
))
4156 redirect_edge_and_branch_force (e
, forwarder
);
4159 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4161 FOR_EACH_BB_FN (bb
, cfun
)
4163 if (bb
->aux
== NULL
&& bb
!= forwarder
)
4165 bb
->aux
= forwarder
;
4171 /* Return true in case it is possible to duplicate the basic block BB. */
4174 cfg_layout_can_duplicate_bb_p (const_basic_block bb
)
4176 /* Do not attempt to duplicate tablejumps, as we need to unshare
4177 the dispatch table. This is difficult to do, as the instructions
4178 computing jump destination may be hoisted outside the basic block. */
4179 if (tablejump_p (BB_END (bb
), NULL
, NULL
))
4182 /* Do not duplicate blocks containing insns that can't be copied. */
4183 if (targetm
.cannot_copy_insn_p
)
4185 rtx_insn
*insn
= BB_HEAD (bb
);
4188 if (INSN_P (insn
) && targetm
.cannot_copy_insn_p (insn
))
4190 if (insn
== BB_END (bb
))
4192 insn
= NEXT_INSN (insn
);
4200 duplicate_insn_chain (rtx_insn
*from
, rtx_insn
*to
,
4201 class loop
*loop
, copy_bb_data
*id
)
4203 rtx_insn
*insn
, *next
, *copy
;
4206 /* Avoid updating of boundaries of previous basic block. The
4207 note will get removed from insn stream in fixup. */
4208 last
= emit_note (NOTE_INSN_DELETED
);
4210 /* Create copy at the end of INSN chain. The chain will
4211 be reordered later. */
4212 for (insn
= from
; insn
!= NEXT_INSN (to
); insn
= NEXT_INSN (insn
))
4214 switch (GET_CODE (insn
))
4217 /* Don't duplicate label debug insns. */
4218 if (DEBUG_BIND_INSN_P (insn
)
4219 && TREE_CODE (INSN_VAR_LOCATION_DECL (insn
)) == LABEL_DECL
)
4225 copy
= emit_copy_of_insn_after (insn
, get_last_insn ());
4226 if (JUMP_P (insn
) && JUMP_LABEL (insn
) != NULL_RTX
4227 && ANY_RETURN_P (JUMP_LABEL (insn
)))
4228 JUMP_LABEL (copy
) = JUMP_LABEL (insn
);
4229 maybe_copy_prologue_epilogue_insn (insn
, copy
);
4230 /* If requested remap dependence info of cliques brought in
4234 subrtx_iterator::array_type array
;
4235 FOR_EACH_SUBRTX (iter
, array
, PATTERN (insn
), ALL
)
4236 if (MEM_P (*iter
) && MEM_EXPR (*iter
))
4238 tree op
= MEM_EXPR (*iter
);
4239 if (TREE_CODE (op
) == WITH_SIZE_EXPR
)
4240 op
= TREE_OPERAND (op
, 0);
4241 while (handled_component_p (op
))
4242 op
= TREE_OPERAND (op
, 0);
4243 if ((TREE_CODE (op
) == MEM_REF
4244 || TREE_CODE (op
) == TARGET_MEM_REF
)
4245 && MR_DEPENDENCE_CLIQUE (op
) > 1
4247 || (MR_DEPENDENCE_CLIQUE (op
)
4248 != loop
->owned_clique
)))
4250 if (!id
->dependence_map
)
4251 id
->dependence_map
= new hash_map
<dependence_hash
,
4254 unsigned short &newc
= id
->dependence_map
->get_or_insert
4255 (MR_DEPENDENCE_CLIQUE (op
), &existed
);
4259 (MR_DEPENDENCE_CLIQUE (op
) <= cfun
->last_clique
);
4260 newc
= ++cfun
->last_clique
;
4262 /* We cannot adjust MR_DEPENDENCE_CLIQUE in-place
4263 since MEM_EXPR is shared so make a copy and
4264 walk to the subtree again. */
4265 tree new_expr
= unshare_expr (MEM_EXPR (*iter
));
4266 if (TREE_CODE (new_expr
) == WITH_SIZE_EXPR
)
4267 new_expr
= TREE_OPERAND (new_expr
, 0);
4268 while (handled_component_p (new_expr
))
4269 new_expr
= TREE_OPERAND (new_expr
, 0);
4270 MR_DEPENDENCE_CLIQUE (new_expr
) = newc
;
4271 set_mem_expr (const_cast <rtx
> (*iter
), new_expr
);
4277 case JUMP_TABLE_DATA
:
4278 /* Avoid copying of dispatch tables. We never duplicate
4279 tablejumps, so this can hit only in case the table got
4280 moved far from original jump.
4281 Avoid copying following barrier as well if any
4282 (and debug insns in between). */
4283 for (next
= NEXT_INSN (insn
);
4284 next
!= NEXT_INSN (to
);
4285 next
= NEXT_INSN (next
))
4286 if (!DEBUG_INSN_P (next
))
4288 if (next
!= NEXT_INSN (to
) && BARRIER_P (next
))
4300 switch (NOTE_KIND (insn
))
4302 /* In case prologue is empty and function contain label
4303 in first BB, we may want to copy the block. */
4304 case NOTE_INSN_PROLOGUE_END
:
4306 case NOTE_INSN_DELETED
:
4307 case NOTE_INSN_DELETED_LABEL
:
4308 case NOTE_INSN_DELETED_DEBUG_LABEL
:
4309 /* No problem to strip these. */
4310 case NOTE_INSN_FUNCTION_BEG
:
4311 /* There is always just single entry to function. */
4312 case NOTE_INSN_BASIC_BLOCK
:
4313 /* We should only switch text sections once. */
4314 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
4317 case NOTE_INSN_EPILOGUE_BEG
:
4318 case NOTE_INSN_UPDATE_SJLJ_CONTEXT
:
4319 emit_note_copy (as_a
<rtx_note
*> (insn
));
4323 /* All other notes should have already been eliminated. */
4331 insn
= NEXT_INSN (last
);
4336 /* Create a duplicate of the basic block BB. */
4339 cfg_layout_duplicate_bb (basic_block bb
, copy_bb_data
*id
)
4344 class loop
*loop
= (id
&& current_loops
) ? bb
->loop_father
: NULL
;
4346 insn
= duplicate_insn_chain (BB_HEAD (bb
), BB_END (bb
), loop
, id
);
4347 new_bb
= create_basic_block (insn
,
4348 insn
? get_last_insn () : NULL
,
4349 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
4351 BB_COPY_PARTITION (new_bb
, bb
);
4354 insn
= BB_HEADER (bb
);
4355 while (NEXT_INSN (insn
))
4356 insn
= NEXT_INSN (insn
);
4357 insn
= duplicate_insn_chain (BB_HEADER (bb
), insn
, loop
, id
);
4359 BB_HEADER (new_bb
) = unlink_insn_chain (insn
, get_last_insn ());
4364 insn
= BB_FOOTER (bb
);
4365 while (NEXT_INSN (insn
))
4366 insn
= NEXT_INSN (insn
);
4367 insn
= duplicate_insn_chain (BB_FOOTER (bb
), insn
, loop
, id
);
4369 BB_FOOTER (new_bb
) = unlink_insn_chain (insn
, get_last_insn ());
4376 /* Main entry point to this module - initialize the datastructures for
4377 CFG layout changes. It keeps LOOPS up-to-date if not null.
4379 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
4382 cfg_layout_initialize (int flags
)
4387 /* Once bb partitioning is complete, cfg layout mode should not be
4388 re-entered. Entering cfg layout mode may require fixups. As an
4389 example, if edge forwarding performed when optimizing the cfg
4390 layout required moving a block from the hot to the cold
4391 section. This would create an illegal partitioning unless some
4392 manual fixup was performed. */
4393 gcc_assert (!crtl
->bb_reorder_complete
|| !crtl
->has_bb_partition
);
4395 initialize_original_copy_tables ();
4397 cfg_layout_rtl_register_cfg_hooks ();
4399 record_effective_endpoints ();
4401 /* Make sure that the targets of non local gotos are marked. */
4402 for (x
= nonlocal_goto_handler_labels
; x
; x
= x
->next ())
4404 bb
= BLOCK_FOR_INSN (x
->insn ());
4405 bb
->flags
|= BB_NON_LOCAL_GOTO_TARGET
;
4408 cleanup_cfg (CLEANUP_CFGLAYOUT
| flags
);
4411 /* Splits superblocks. */
4413 break_superblocks (void)
4418 auto_sbitmap
superblocks (last_basic_block_for_fn (cfun
));
4419 bitmap_clear (superblocks
);
4421 FOR_EACH_BB_FN (bb
, cfun
)
4422 if (bb
->flags
& BB_SUPERBLOCK
)
4424 bb
->flags
&= ~BB_SUPERBLOCK
;
4425 bitmap_set_bit (superblocks
, bb
->index
);
4431 rebuild_jump_labels (get_insns ());
4432 find_many_sub_basic_blocks (superblocks
);
4436 /* Finalize the changes: reorder insn list according to the sequence specified
4437 by aux pointers, enter compensation code, rebuild scope forest. */
4440 cfg_layout_finalize (void)
4442 free_dominance_info (CDI_DOMINATORS
);
4443 force_one_exit_fallthru ();
4444 rtl_register_cfg_hooks ();
4445 if (reload_completed
&& !targetm
.have_epilogue ())
4446 fixup_fallthru_exit_predecessor ();
4447 fixup_reorder_chain ();
4449 rebuild_jump_labels (get_insns ());
4450 delete_dead_jumptables ();
4453 verify_insn_chain ();
4454 checking_verify_flow_info ();
4458 /* Same as split_block but update cfg_layout structures. */
4461 cfg_layout_split_block (basic_block bb
, void *insnp
)
4463 rtx insn
= (rtx
) insnp
;
4464 basic_block new_bb
= rtl_split_block (bb
, insn
);
4466 BB_FOOTER (new_bb
) = BB_FOOTER (bb
);
4467 BB_FOOTER (bb
) = NULL
;
4472 /* Redirect Edge to DEST. */
4474 cfg_layout_redirect_edge_and_branch (edge e
, basic_block dest
)
4476 basic_block src
= e
->src
;
4479 if (e
->flags
& (EDGE_ABNORMAL_CALL
| EDGE_EH
))
4482 if (e
->dest
== dest
)
4485 if (e
->flags
& EDGE_CROSSING
4486 && BB_PARTITION (e
->src
) == BB_PARTITION (dest
)
4487 && simplejump_p (BB_END (src
)))
4491 "Removing crossing jump while redirecting edge form %i to %i\n",
4492 e
->src
->index
, dest
->index
);
4493 delete_insn (BB_END (src
));
4494 remove_barriers_from_footer (src
);
4495 e
->flags
|= EDGE_FALLTHRU
;
4498 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
4499 && (ret
= try_redirect_by_replacing_jump (e
, dest
, true)))
4501 df_set_bb_dirty (src
);
4505 if (e
->src
== ENTRY_BLOCK_PTR_FOR_FN (cfun
)
4506 && (e
->flags
& EDGE_FALLTHRU
) && !(e
->flags
& EDGE_COMPLEX
))
4509 fprintf (dump_file
, "Redirecting entry edge from bb %i to %i\n",
4510 e
->src
->index
, dest
->index
);
4512 df_set_bb_dirty (e
->src
);
4513 redirect_edge_succ (e
, dest
);
4517 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4518 in the case the basic block appears to be in sequence. Avoid this
4521 if (e
->flags
& EDGE_FALLTHRU
)
4523 /* Redirect any branch edges unified with the fallthru one. */
4524 if (JUMP_P (BB_END (src
))
4525 && label_is_jump_target_p (BB_HEAD (e
->dest
),
4531 fprintf (dump_file
, "Fallthru edge unified with branch "
4532 "%i->%i redirected to %i\n",
4533 e
->src
->index
, e
->dest
->index
, dest
->index
);
4534 e
->flags
&= ~EDGE_FALLTHRU
;
4535 redirected
= redirect_branch_edge (e
, dest
);
4536 gcc_assert (redirected
);
4537 redirected
->flags
|= EDGE_FALLTHRU
;
4538 df_set_bb_dirty (redirected
->src
);
4541 /* In case we are redirecting fallthru edge to the branch edge
4542 of conditional jump, remove it. */
4543 if (EDGE_COUNT (src
->succs
) == 2)
4545 /* Find the edge that is different from E. */
4546 edge s
= EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
);
4549 && any_condjump_p (BB_END (src
))
4550 && onlyjump_p (BB_END (src
)))
4551 delete_insn (BB_END (src
));
4554 fprintf (dump_file
, "Redirecting fallthru edge %i->%i to %i\n",
4555 e
->src
->index
, e
->dest
->index
, dest
->index
);
4556 ret
= redirect_edge_succ_nodup (e
, dest
);
4559 ret
= redirect_branch_edge (e
, dest
);
4564 fixup_partition_crossing (ret
);
4565 /* We don't want simplejumps in the insn stream during cfglayout. */
4566 gcc_assert (!simplejump_p (BB_END (src
)) || CROSSING_JUMP_P (BB_END (src
)));
4568 df_set_bb_dirty (src
);
4572 /* Simple wrapper as we always can redirect fallthru edges. */
4574 cfg_layout_redirect_edge_and_branch_force (edge e
, basic_block dest
)
4576 edge redirected
= cfg_layout_redirect_edge_and_branch (e
, dest
);
4578 gcc_assert (redirected
);
4582 /* Same as delete_basic_block but update cfg_layout structures. */
4585 cfg_layout_delete_block (basic_block bb
)
4587 rtx_insn
*insn
, *next
, *prev
= PREV_INSN (BB_HEAD (bb
)), *remaints
;
4592 next
= BB_HEAD (bb
);
4594 SET_NEXT_INSN (prev
) = BB_HEADER (bb
);
4596 set_first_insn (BB_HEADER (bb
));
4597 SET_PREV_INSN (BB_HEADER (bb
)) = prev
;
4598 insn
= BB_HEADER (bb
);
4599 while (NEXT_INSN (insn
))
4600 insn
= NEXT_INSN (insn
);
4601 SET_NEXT_INSN (insn
) = next
;
4602 SET_PREV_INSN (next
) = insn
;
4604 next
= NEXT_INSN (BB_END (bb
));
4607 insn
= BB_FOOTER (bb
);
4610 if (BARRIER_P (insn
))
4612 if (PREV_INSN (insn
))
4613 SET_NEXT_INSN (PREV_INSN (insn
)) = NEXT_INSN (insn
);
4615 BB_FOOTER (bb
) = NEXT_INSN (insn
);
4616 if (NEXT_INSN (insn
))
4617 SET_PREV_INSN (NEXT_INSN (insn
)) = PREV_INSN (insn
);
4621 insn
= NEXT_INSN (insn
);
4626 SET_NEXT_INSN (insn
) = BB_FOOTER (bb
);
4627 SET_PREV_INSN (BB_FOOTER (bb
)) = insn
;
4628 while (NEXT_INSN (insn
))
4629 insn
= NEXT_INSN (insn
);
4630 SET_NEXT_INSN (insn
) = next
;
4632 SET_PREV_INSN (next
) = insn
;
4634 set_last_insn (insn
);
4637 if (bb
->next_bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
4638 to
= &BB_HEADER (bb
->next_bb
);
4640 to
= &cfg_layout_function_footer
;
4642 rtl_delete_block (bb
);
4645 prev
= NEXT_INSN (prev
);
4647 prev
= get_insns ();
4649 next
= PREV_INSN (next
);
4651 next
= get_last_insn ();
4653 if (next
&& NEXT_INSN (next
) != prev
)
4655 remaints
= unlink_insn_chain (prev
, next
);
4657 while (NEXT_INSN (insn
))
4658 insn
= NEXT_INSN (insn
);
4659 SET_NEXT_INSN (insn
) = *to
;
4661 SET_PREV_INSN (*to
) = insn
;
4666 /* Return true when blocks A and B can be safely merged. */
4669 cfg_layout_can_merge_blocks_p (basic_block a
, basic_block b
)
4671 /* If we are partitioning hot/cold basic blocks, we don't want to
4672 mess up unconditional or indirect jumps that cross between hot
4675 Basic block partitioning may result in some jumps that appear to
4676 be optimizable (or blocks that appear to be mergeable), but which really
4677 must be left untouched (they are required to make it safely across
4678 partition boundaries). See the comments at the top of
4679 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4681 if (BB_PARTITION (a
) != BB_PARTITION (b
))
4684 /* Protect the loop latches. */
4685 if (current_loops
&& b
->loop_father
->latch
== b
)
4688 /* If we would end up moving B's instructions, make sure it doesn't fall
4689 through into the exit block, since we cannot recover from a fallthrough
4690 edge into the exit block occurring in the middle of a function. */
4691 if (NEXT_INSN (BB_END (a
)) != BB_HEAD (b
))
4693 edge e
= find_fallthru_edge (b
->succs
);
4694 if (e
&& e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
4698 /* There must be exactly one edge in between the blocks. */
4699 return (single_succ_p (a
)
4700 && single_succ (a
) == b
4701 && single_pred_p (b
) == 1
4703 /* Must be simple edge. */
4704 && !(single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
4705 && a
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
4706 && b
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
4707 /* If the jump insn has side effects, we can't kill the edge.
4708 When not optimizing, try_redirect_by_replacing_jump will
4709 not allow us to redirect an edge by replacing a table jump. */
4710 && (!JUMP_P (BB_END (a
))
4711 || ((!optimize
|| reload_completed
)
4712 ? simplejump_p (BB_END (a
)) : onlyjump_p (BB_END (a
)))));
4715 /* Merge block A and B. The blocks must be mergeable. */
4718 cfg_layout_merge_blocks (basic_block a
, basic_block b
)
4720 /* If B is a forwarder block whose outgoing edge has no location, we'll
4721 propagate the locus of the edge between A and B onto it. */
4722 const bool forward_edge_locus
4723 = (b
->flags
& BB_FORWARDER_BLOCK
) != 0
4724 && LOCATION_LOCUS (EDGE_SUCC (b
, 0)->goto_locus
) == UNKNOWN_LOCATION
;
4727 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a
, b
));
4730 fprintf (dump_file
, "Merging block %d into block %d...\n", b
->index
,
4733 /* If there was a CODE_LABEL beginning B, delete it. */
4734 if (LABEL_P (BB_HEAD (b
)))
4736 delete_insn (BB_HEAD (b
));
4739 /* We should have fallthru edge in a, or we can do dummy redirection to get
4741 if (JUMP_P (BB_END (a
)))
4742 try_redirect_by_replacing_jump (EDGE_SUCC (a
, 0), b
, true);
4743 gcc_assert (!JUMP_P (BB_END (a
)));
4745 /* If not optimizing, preserve the locus of the single edge between
4746 blocks A and B if necessary by emitting a nop. */
4748 && !forward_edge_locus
4749 && !DECL_IGNORED_P (current_function_decl
))
4750 emit_nop_for_unique_locus_between (a
, b
);
4752 /* Move things from b->footer after a->footer. */
4756 BB_FOOTER (a
) = BB_FOOTER (b
);
4759 rtx_insn
*last
= BB_FOOTER (a
);
4761 while (NEXT_INSN (last
))
4762 last
= NEXT_INSN (last
);
4763 SET_NEXT_INSN (last
) = BB_FOOTER (b
);
4764 SET_PREV_INSN (BB_FOOTER (b
)) = last
;
4766 BB_FOOTER (b
) = NULL
;
4769 /* Move things from b->header before a->footer.
4770 Note that this may include dead tablejump data, but we don't clean
4771 those up until we go out of cfglayout mode. */
4774 if (! BB_FOOTER (a
))
4775 BB_FOOTER (a
) = BB_HEADER (b
);
4778 rtx_insn
*last
= BB_HEADER (b
);
4780 while (NEXT_INSN (last
))
4781 last
= NEXT_INSN (last
);
4782 SET_NEXT_INSN (last
) = BB_FOOTER (a
);
4783 SET_PREV_INSN (BB_FOOTER (a
)) = last
;
4784 BB_FOOTER (a
) = BB_HEADER (b
);
4786 BB_HEADER (b
) = NULL
;
4789 /* In the case basic blocks are not adjacent, move them around. */
4790 if (NEXT_INSN (BB_END (a
)) != BB_HEAD (b
))
4792 insn
= unlink_insn_chain (BB_HEAD (b
), BB_END (b
));
4794 emit_insn_after_noloc (insn
, BB_END (a
), a
);
4796 /* Otherwise just re-associate the instructions. */
4800 BB_END (a
) = BB_END (b
);
4803 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4804 We need to explicitly call. */
4805 update_bb_for_insn_chain (insn
, BB_END (b
), a
);
4807 /* Skip possible DELETED_LABEL insn. */
4808 if (!NOTE_INSN_BASIC_BLOCK_P (insn
))
4809 insn
= NEXT_INSN (insn
);
4810 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn
));
4811 BB_HEAD (b
) = BB_END (b
) = NULL
;
4814 df_bb_delete (b
->index
);
4816 if (forward_edge_locus
)
4817 EDGE_SUCC (b
, 0)->goto_locus
= EDGE_SUCC (a
, 0)->goto_locus
;
4820 fprintf (dump_file
, "Merged blocks %d and %d.\n", a
->index
, b
->index
);
4826 cfg_layout_split_edge (edge e
)
4828 basic_block new_bb
=
4829 create_basic_block (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
4830 ? NEXT_INSN (BB_END (e
->src
)) : get_insns (),
4833 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
4834 BB_COPY_PARTITION (new_bb
, e
->src
);
4836 BB_COPY_PARTITION (new_bb
, e
->dest
);
4837 make_edge (new_bb
, e
->dest
, EDGE_FALLTHRU
);
4838 redirect_edge_and_branch_force (e
, new_bb
);
4843 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4846 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED
)
4850 /* Return true if BB contains only labels or non-executable
4854 rtl_block_empty_p (basic_block bb
)
4858 if (bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
)
4859 || bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
4862 FOR_BB_INSNS (bb
, insn
)
4863 if (NONDEBUG_INSN_P (insn
) && !any_uncondjump_p (insn
))
4869 /* Split a basic block if it ends with a conditional branch and if
4870 the other part of the block is not empty. */
4873 rtl_split_block_before_cond_jump (basic_block bb
)
4876 rtx_insn
*split_point
= NULL
;
4877 rtx_insn
*last
= NULL
;
4878 bool found_code
= false;
4880 FOR_BB_INSNS (bb
, insn
)
4882 if (any_condjump_p (insn
))
4884 else if (NONDEBUG_INSN_P (insn
))
4889 /* Did not find everything. */
4890 if (found_code
&& split_point
)
4891 return split_block (bb
, split_point
)->dest
;
4896 /* Return 1 if BB ends with a call, possibly followed by some
4897 instructions that must stay with the call, 0 otherwise. */
4900 rtl_block_ends_with_call_p (basic_block bb
)
4902 rtx_insn
*insn
= BB_END (bb
);
4904 while (!CALL_P (insn
)
4905 && insn
!= BB_HEAD (bb
)
4906 && (keep_with_call_p (insn
)
4908 || DEBUG_INSN_P (insn
)))
4909 insn
= PREV_INSN (insn
);
4910 return (CALL_P (insn
));
4913 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4916 rtl_block_ends_with_condjump_p (const_basic_block bb
)
4918 return any_condjump_p (BB_END (bb
));
4921 /* Return true if we need to add fake edge to exit.
4922 Helper function for rtl_flow_call_edges_add. */
4925 need_fake_edge_p (const rtx_insn
*insn
)
4931 && !SIBLING_CALL_P (insn
)
4932 && !find_reg_note (insn
, REG_NORETURN
, NULL
)
4933 && !(RTL_CONST_OR_PURE_CALL_P (insn
))))
4936 return ((GET_CODE (PATTERN (insn
)) == ASM_OPERANDS
4937 && MEM_VOLATILE_P (PATTERN (insn
)))
4938 || (GET_CODE (PATTERN (insn
)) == PARALLEL
4939 && asm_noperands (insn
) != -1
4940 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn
), 0, 0)))
4941 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
);
4944 /* Add fake edges to the function exit for any non constant and non noreturn
4945 calls, volatile inline assembly in the bitmap of blocks specified by
4946 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4949 The goal is to expose cases in which entering a basic block does not imply
4950 that all subsequent instructions must be executed. */
4953 rtl_flow_call_edges_add (sbitmap blocks
)
4956 int blocks_split
= 0;
4957 int last_bb
= last_basic_block_for_fn (cfun
);
4958 bool check_last_block
= false;
4960 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
4964 check_last_block
= true;
4966 check_last_block
= bitmap_bit_p (blocks
,
4967 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
->index
);
4969 /* In the last basic block, before epilogue generation, there will be
4970 a fallthru edge to EXIT. Special care is required if the last insn
4971 of the last basic block is a call because make_edge folds duplicate
4972 edges, which would result in the fallthru edge also being marked
4973 fake, which would result in the fallthru edge being removed by
4974 remove_fake_edges, which would result in an invalid CFG.
4976 Moreover, we can't elide the outgoing fake edge, since the block
4977 profiler needs to take this into account in order to solve the minimal
4978 spanning tree in the case that the call doesn't return.
4980 Handle this by adding a dummy instruction in a new last basic block. */
4981 if (check_last_block
)
4983 basic_block bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
4984 rtx_insn
*insn
= BB_END (bb
);
4986 /* Back up past insns that must be kept in the same block as a call. */
4987 while (insn
!= BB_HEAD (bb
)
4988 && keep_with_call_p (insn
))
4989 insn
= PREV_INSN (insn
);
4991 if (need_fake_edge_p (insn
))
4995 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
4998 insert_insn_on_edge (gen_use (const0_rtx
), e
);
4999 commit_edge_insertions ();
5004 /* Now add fake edges to the function exit for any non constant
5005 calls since there is no way that we can determine if they will
5008 for (i
= NUM_FIXED_BLOCKS
; i
< last_bb
; i
++)
5010 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
5012 rtx_insn
*prev_insn
;
5017 if (blocks
&& !bitmap_bit_p (blocks
, i
))
5020 for (insn
= BB_END (bb
); ; insn
= prev_insn
)
5022 prev_insn
= PREV_INSN (insn
);
5023 if (need_fake_edge_p (insn
))
5026 rtx_insn
*split_at_insn
= insn
;
5028 /* Don't split the block between a call and an insn that should
5029 remain in the same block as the call. */
5031 while (split_at_insn
!= BB_END (bb
)
5032 && keep_with_call_p (NEXT_INSN (split_at_insn
)))
5033 split_at_insn
= NEXT_INSN (split_at_insn
);
5035 /* The handling above of the final block before the epilogue
5036 should be enough to verify that there is no edge to the exit
5037 block in CFG already. Calling make_edge in such case would
5038 cause us to mark that edge as fake and remove it later. */
5040 if (flag_checking
&& split_at_insn
== BB_END (bb
))
5042 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
5043 gcc_assert (e
== NULL
);
5046 /* Note that the following may create a new basic block
5047 and renumber the existing basic blocks. */
5048 if (split_at_insn
!= BB_END (bb
))
5050 e
= split_block (bb
, split_at_insn
);
5055 edge ne
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FAKE
);
5056 ne
->probability
= profile_probability::guessed_never ();
5059 if (insn
== BB_HEAD (bb
))
5065 verify_flow_info ();
5067 return blocks_split
;
5070 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
5071 the conditional branch target, SECOND_HEAD should be the fall-thru
5072 there is no need to handle this here the loop versioning code handles
5073 this. the reason for SECON_HEAD is that it is needed for condition
5074 in trees, and this should be of the same type since it is a hook. */
5076 rtl_lv_add_condition_to_bb (basic_block first_head
,
5077 basic_block second_head ATTRIBUTE_UNUSED
,
5078 basic_block cond_bb
, void *comp_rtx
)
5080 rtx_code_label
*label
;
5081 rtx_insn
*seq
, *jump
;
5082 rtx op0
= XEXP ((rtx
)comp_rtx
, 0);
5083 rtx op1
= XEXP ((rtx
)comp_rtx
, 1);
5084 enum rtx_code comp
= GET_CODE ((rtx
)comp_rtx
);
5088 label
= block_label (first_head
);
5089 mode
= GET_MODE (op0
);
5090 if (mode
== VOIDmode
)
5091 mode
= GET_MODE (op1
);
5094 op0
= force_operand (op0
, NULL_RTX
);
5095 op1
= force_operand (op1
, NULL_RTX
);
5096 do_compare_rtx_and_jump (op0
, op1
, comp
, 0, mode
, NULL_RTX
, NULL
, label
,
5097 profile_probability::uninitialized ());
5098 jump
= get_last_insn ();
5099 JUMP_LABEL (jump
) = label
;
5100 LABEL_NUSES (label
)++;
5104 /* Add the new cond, in the new head. */
5105 emit_insn_after (seq
, BB_END (cond_bb
));
5109 /* Given a block B with unconditional branch at its end, get the
5110 store the return the branch edge and the fall-thru edge in
5111 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
5113 rtl_extract_cond_bb_edges (basic_block b
, edge
*branch_edge
,
5114 edge
*fallthru_edge
)
5116 edge e
= EDGE_SUCC (b
, 0);
5118 if (e
->flags
& EDGE_FALLTHRU
)
5121 *branch_edge
= EDGE_SUCC (b
, 1);
5126 *fallthru_edge
= EDGE_SUCC (b
, 1);
5131 init_rtl_bb_info (basic_block bb
)
5133 gcc_assert (!bb
->il
.x
.rtl
);
5134 bb
->il
.x
.head_
= NULL
;
5135 bb
->il
.x
.rtl
= ggc_cleared_alloc
<rtl_bb_info
> ();
5138 /* Returns true if it is possible to remove edge E by redirecting
5139 it to the destination of the other edge from E->src. */
5142 rtl_can_remove_branch_p (const_edge e
)
5144 const_basic_block src
= e
->src
;
5145 const_basic_block target
= EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
;
5146 const rtx_insn
*insn
= BB_END (src
);
5149 /* The conditions are taken from try_redirect_by_replacing_jump. */
5150 if (target
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
5153 if (e
->flags
& (EDGE_ABNORMAL_CALL
| EDGE_EH
))
5156 if (BB_PARTITION (src
) != BB_PARTITION (target
))
5159 if (!onlyjump_p (insn
)
5160 || tablejump_p (insn
, NULL
, NULL
))
5163 set
= single_set (insn
);
5164 if (!set
|| side_effects_p (set
))
5171 rtl_duplicate_bb (basic_block bb
, copy_bb_data
*id
)
5173 bb
= cfg_layout_duplicate_bb (bb
, id
);
5178 /* Do book-keeping of basic block BB for the profile consistency checker.
5179 Store the counting in RECORD. */
5181 rtl_account_profile_record (basic_block bb
, struct profile_record
*record
)
5184 FOR_BB_INSNS (bb
, insn
)
5187 record
->size
+= insn_cost (insn
, false);
5188 if (bb
->count
.initialized_p ())
5190 += insn_cost (insn
, true) * bb
->count
.to_gcov_type ();
5191 else if (profile_status_for_fn (cfun
) == PROFILE_GUESSED
)
5193 += insn_cost (insn
, true) * bb
->count
.to_frequency (cfun
);
5197 /* Implementation of CFG manipulation for linearized RTL. */
5198 struct cfg_hooks rtl_cfg_hooks
= {
5200 rtl_verify_flow_info
,
5202 rtl_dump_bb_for_graph
,
5203 rtl_create_basic_block
,
5204 rtl_redirect_edge_and_branch
,
5205 rtl_redirect_edge_and_branch_force
,
5206 rtl_can_remove_branch_p
,
5209 rtl_move_block_after
,
5210 rtl_can_merge_blocks
, /* can_merge_blocks_p */
5214 cfg_layout_can_duplicate_bb_p
,
5217 rtl_make_forwarder_block
,
5218 rtl_tidy_fallthru_edge
,
5219 rtl_force_nonfallthru
,
5220 rtl_block_ends_with_call_p
,
5221 rtl_block_ends_with_condjump_p
,
5222 rtl_flow_call_edges_add
,
5223 NULL
, /* execute_on_growing_pred */
5224 NULL
, /* execute_on_shrinking_pred */
5225 NULL
, /* duplicate loop for trees */
5226 NULL
, /* lv_add_condition_to_bb */
5227 NULL
, /* lv_adjust_loop_header_phi*/
5228 NULL
, /* extract_cond_bb_edges */
5229 NULL
, /* flush_pending_stmts */
5230 rtl_block_empty_p
, /* block_empty_p */
5231 rtl_split_block_before_cond_jump
, /* split_block_before_cond_jump */
5232 rtl_account_profile_record
,
5235 /* Implementation of CFG manipulation for cfg layout RTL, where
5236 basic block connected via fallthru edges does not have to be adjacent.
5237 This representation will hopefully become the default one in future
5238 version of the compiler. */
5240 struct cfg_hooks cfg_layout_rtl_cfg_hooks
= {
5242 rtl_verify_flow_info_1
,
5244 rtl_dump_bb_for_graph
,
5245 cfg_layout_create_basic_block
,
5246 cfg_layout_redirect_edge_and_branch
,
5247 cfg_layout_redirect_edge_and_branch_force
,
5248 rtl_can_remove_branch_p
,
5249 cfg_layout_delete_block
,
5250 cfg_layout_split_block
,
5251 rtl_move_block_after
,
5252 cfg_layout_can_merge_blocks_p
,
5253 cfg_layout_merge_blocks
,
5256 cfg_layout_can_duplicate_bb_p
,
5257 cfg_layout_duplicate_bb
,
5258 cfg_layout_split_edge
,
5259 rtl_make_forwarder_block
,
5260 NULL
, /* tidy_fallthru_edge */
5261 rtl_force_nonfallthru
,
5262 rtl_block_ends_with_call_p
,
5263 rtl_block_ends_with_condjump_p
,
5264 rtl_flow_call_edges_add
,
5265 NULL
, /* execute_on_growing_pred */
5266 NULL
, /* execute_on_shrinking_pred */
5267 duplicate_loop_to_header_edge
, /* duplicate loop for trees */
5268 rtl_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
5269 NULL
, /* lv_adjust_loop_header_phi*/
5270 rtl_extract_cond_bb_edges
, /* extract_cond_bb_edges */
5271 NULL
, /* flush_pending_stmts */
5272 rtl_block_empty_p
, /* block_empty_p */
5273 rtl_split_block_before_cond_jump
, /* split_block_before_cond_jump */
5274 rtl_account_profile_record
,
5277 #include "gt-cfgrtl.h"
5280 # pragma GCC diagnostic pop