1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
25 Available functionality:
26 - CFG-aware instruction chain manipulation
27 delete_insn, delete_insn_chain
28 - Basic block manipulation
29 create_basic_block, rtl_delete_block,rtl_split_block,
31 - Infrastructure to determine quickly basic block for insn
32 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
33 - Edge redirection with updating and optimizing of insn chain
34 block_label, redirect_edge_and_branch,
35 redirect_edge_and_branch_force, tidy_fallthru_edge, force_nonfallthru
36 - Edge splitting and committing to edges
37 split_edge, insert_insn_on_edge, commit_edge_insertions
38 - CFG updating after constant propagation
39 purge_dead_edges, purge_all_dead_edges */
43 #include "coretypes.h"
47 #include "hard-reg-set.h"
48 #include "basic-block.h"
57 #include "insn-config.h"
58 #include "cfglayout.h"
60 /* Stubs in case we don't have a return insn. */
63 #define gen_return() NULL_RTX
66 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
67 /* ??? Should probably be using LABEL_NUSES instead. It would take a
68 bit of surgery to be able to use or co-opt the routines in jump. */
70 rtx tail_recursion_label_list
;
72 static int can_delete_note_p
PARAMS ((rtx
));
73 static int can_delete_label_p
PARAMS ((rtx
));
74 static void commit_one_edge_insertion
PARAMS ((edge
, int));
75 static bool try_redirect_by_replacing_jump
PARAMS ((edge
, basic_block
));
76 static rtx last_loop_beg_note
PARAMS ((rtx
));
77 static bool back_edge_of_syntactic_loop_p
PARAMS ((basic_block
, basic_block
));
78 basic_block force_nonfallthru_and_redirect
PARAMS ((edge
, basic_block
));
79 static basic_block rtl_split_edge
PARAMS ((edge
));
80 static int rtl_verify_flow_info
PARAMS ((void));
81 static edge cfg_layout_split_block
PARAMS ((basic_block
, void *));
82 static bool cfg_layout_redirect_edge_and_branch
PARAMS ((edge
, basic_block
));
83 static basic_block cfg_layout_redirect_edge_and_branch_force
PARAMS ((edge
, basic_block
));
84 static void cfg_layout_delete_block
PARAMS ((basic_block
));
85 static void rtl_delete_block
PARAMS ((basic_block
));
86 static basic_block rtl_redirect_edge_and_branch_force
PARAMS ((edge
, basic_block
));
87 static bool rtl_redirect_edge_and_branch
PARAMS ((edge
, basic_block
));
88 static edge rtl_split_block
PARAMS ((basic_block
, void *));
89 static void rtl_dump_bb
PARAMS ((basic_block
, FILE *));
90 static int rtl_verify_flow_info_1
PARAMS ((void));
92 /* Return true if NOTE is not one of the ones that must be kept paired,
93 so that we may simply delete it. */
96 can_delete_note_p (note
)
99 return (NOTE_LINE_NUMBER (note
) == NOTE_INSN_DELETED
100 || NOTE_LINE_NUMBER (note
) == NOTE_INSN_BASIC_BLOCK
101 || NOTE_LINE_NUMBER (note
) == NOTE_INSN_PREDICTION
);
104 /* True if a given label can be deleted. */
107 can_delete_label_p (label
)
110 return (!LABEL_PRESERVE_P (label
)
111 /* User declared labels must be preserved. */
112 && LABEL_NAME (label
) == 0
113 && !in_expr_list_p (forced_labels
, label
)
114 && !in_expr_list_p (label_value_list
, label
));
117 /* Delete INSN by patching it out. Return the next insn. */
123 rtx next
= NEXT_INSN (insn
);
125 bool really_delete
= true;
127 if (GET_CODE (insn
) == CODE_LABEL
)
129 /* Some labels can't be directly removed from the INSN chain, as they
130 might be references via variables, constant pool etc.
131 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
132 if (! can_delete_label_p (insn
))
134 const char *name
= LABEL_NAME (insn
);
136 really_delete
= false;
137 PUT_CODE (insn
, NOTE
);
138 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED_LABEL
;
139 NOTE_SOURCE_FILE (insn
) = name
;
142 remove_node_from_expr_list (insn
, &nonlocal_goto_handler_labels
);
147 /* If this insn has already been deleted, something is very wrong. */
148 if (INSN_DELETED_P (insn
))
151 INSN_DELETED_P (insn
) = 1;
154 /* If deleting a jump, decrement the use count of the label. Deleting
155 the label itself should happen in the normal course of block merging. */
156 if (GET_CODE (insn
) == JUMP_INSN
158 && GET_CODE (JUMP_LABEL (insn
)) == CODE_LABEL
)
159 LABEL_NUSES (JUMP_LABEL (insn
))--;
161 /* Also if deleting an insn that references a label. */
162 else if ((note
= find_reg_note (insn
, REG_LABEL
, NULL_RTX
)) != NULL_RTX
163 && GET_CODE (XEXP (note
, 0)) == CODE_LABEL
)
164 LABEL_NUSES (XEXP (note
, 0))--;
166 if (GET_CODE (insn
) == JUMP_INSN
167 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
168 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
170 rtx pat
= PATTERN (insn
);
171 int diff_vec_p
= GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
;
172 int len
= XVECLEN (pat
, diff_vec_p
);
175 for (i
= 0; i
< len
; i
++)
177 rtx label
= XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0);
179 /* When deleting code in bulk (e.g. removing many unreachable
180 blocks) we can delete a label that's a target of the vector
181 before deleting the vector itself. */
182 if (GET_CODE (label
) != NOTE
)
183 LABEL_NUSES (label
)--;
190 /* Like delete_insn but also purge dead edges from BB. */
192 delete_insn_and_edges (insn
)
199 && BLOCK_FOR_INSN (insn
)
200 && BLOCK_FOR_INSN (insn
)->end
== insn
)
202 x
= delete_insn (insn
);
204 purge_dead_edges (BLOCK_FOR_INSN (insn
));
208 /* Unlink a chain of insns between START and FINISH, leaving notes
209 that must be paired. */
212 delete_insn_chain (start
, finish
)
217 /* Unchain the insns one by one. It would be quicker to delete all of these
218 with a single unchaining, rather than one at a time, but we need to keep
222 next
= NEXT_INSN (start
);
223 if (GET_CODE (start
) == NOTE
&& !can_delete_note_p (start
))
226 next
= delete_insn (start
);
234 /* Like delete_insn but also purge dead edges from BB. */
236 delete_insn_chain_and_edges (first
, last
)
242 && BLOCK_FOR_INSN (last
)
243 && BLOCK_FOR_INSN (last
)->end
== last
)
245 delete_insn_chain (first
, last
);
247 purge_dead_edges (BLOCK_FOR_INSN (last
));
250 /* Create a new basic block consisting of the instructions between HEAD and END
251 inclusive. This function is designed to allow fast BB construction - reuses
252 the note and basic block struct in BB_NOTE, if any and do not grow
253 BASIC_BLOCK chain and should be used directly only by CFG construction code.
254 END can be NULL in to create new empty basic block before HEAD. Both END
255 and HEAD can be NULL to create basic block at the end of INSN chain.
256 AFTER is the basic block we should be put after. */
259 create_basic_block_structure (head
, end
, bb_note
, after
)
260 rtx head
, end
, bb_note
;
266 && ! RTX_INTEGRATED_P (bb_note
)
267 && (bb
= NOTE_BASIC_BLOCK (bb_note
)) != NULL
270 /* If we found an existing note, thread it back onto the chain. */
274 if (GET_CODE (head
) == CODE_LABEL
)
278 after
= PREV_INSN (head
);
282 if (after
!= bb_note
&& NEXT_INSN (after
) != bb_note
)
283 reorder_insns_nobb (bb_note
, bb_note
, after
);
287 /* Otherwise we must create a note and a basic block structure. */
293 = emit_note_after (NOTE_INSN_BASIC_BLOCK
, get_last_insn ());
294 else if (GET_CODE (head
) == CODE_LABEL
&& end
)
296 bb_note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, head
);
302 bb_note
= emit_note_before (NOTE_INSN_BASIC_BLOCK
, head
);
308 NOTE_BASIC_BLOCK (bb_note
) = bb
;
311 /* Always include the bb note in the block. */
312 if (NEXT_INSN (end
) == bb_note
)
317 bb
->index
= last_basic_block
++;
319 link_block (bb
, after
);
320 BASIC_BLOCK (bb
->index
) = bb
;
321 update_bb_for_insn (bb
);
323 /* Tag the block so that we know it has been used when considering
324 other basic block notes. */
330 /* Create new basic block consisting of instructions in between HEAD and END
331 and place it to the BB chain after block AFTER. END can be NULL in to
332 create new empty basic block before HEAD. Both END and HEAD can be NULL to
333 create basic block at the end of INSN chain. */
336 create_basic_block (head
, end
, after
)
342 /* Place the new block just after the end. */
343 VARRAY_GROW (basic_block_info
, last_basic_block
+1);
347 bb
= create_basic_block_structure (head
, end
, NULL
, after
);
352 /* Delete the insns in a (non-live) block. We physically delete every
353 non-deleted-note insn, and update the flow graph appropriately.
355 Return nonzero if we deleted an exception handler. */
357 /* ??? Preserving all such notes strikes me as wrong. It would be nice
358 to post-process the stream to remove empty blocks, loops, ranges, etc. */
366 /* If the head of this block is a CODE_LABEL, then it might be the
367 label for an exception handler which can't be reached.
369 We need to remove the label from the exception_handler_label list
370 and remove the associated NOTE_INSN_EH_REGION_BEG and
371 NOTE_INSN_EH_REGION_END notes. */
373 /* Get rid of all NOTE_INSN_PREDICTIONs and NOTE_INSN_LOOP_CONTs
374 hanging before the block. */
376 for (insn
= PREV_INSN (b
->head
); insn
; insn
= PREV_INSN (insn
))
378 if (GET_CODE (insn
) != NOTE
)
380 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_PREDICTION
381 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_CONT
)
382 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
387 never_reached_warning (insn
, b
->end
);
389 if (GET_CODE (insn
) == CODE_LABEL
)
390 maybe_remove_eh_handler (insn
);
392 /* Include any jump table following the basic block. */
394 if (tablejump_p (end
, NULL
, &tmp
))
397 /* Include any barrier that may follow the basic block. */
398 tmp
= next_nonnote_insn (end
);
399 if (tmp
&& GET_CODE (tmp
) == BARRIER
)
402 /* Selectively delete the entire chain. */
404 delete_insn_chain (insn
, end
);
406 /* Remove the edges into and out of this block. Note that there may
407 indeed be edges in, if we are removing an unreachable loop. */
408 while (b
->pred
!= NULL
)
409 remove_edge (b
->pred
);
410 while (b
->succ
!= NULL
)
411 remove_edge (b
->succ
);
416 /* Remove the basic block from the array. */
420 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
423 compute_bb_for_insn ()
432 for (insn
= bb
->head
; ; insn
= NEXT_INSN (insn
))
434 BLOCK_FOR_INSN (insn
) = bb
;
441 /* Release the basic_block_for_insn array. */
447 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
448 if (GET_CODE (insn
) != BARRIER
)
449 BLOCK_FOR_INSN (insn
) = NULL
;
452 /* Update insns block within BB. */
455 update_bb_for_insn (bb
)
460 for (insn
= bb
->head
; ; insn
= NEXT_INSN (insn
))
462 if (GET_CODE (insn
) != BARRIER
)
463 set_block_for_insn (insn
, bb
);
469 /* Split a block BB after insn INSN creating a new fallthru edge.
470 Return the new edge. Note that to keep other parts of the compiler happy,
471 this function renumbers all the basic blocks so that the new
472 one has a number one greater than the block split. */
475 rtl_split_block (bb
, insnp
)
484 /* There is no point splitting the block after its end. */
488 /* Create the new basic block. */
489 new_bb
= create_basic_block (NEXT_INSN (insn
), bb
->end
, bb
);
490 new_bb
->count
= bb
->count
;
491 new_bb
->frequency
= bb
->frequency
;
492 new_bb
->loop_depth
= bb
->loop_depth
;
495 /* Redirect the outgoing edges. */
496 new_bb
->succ
= bb
->succ
;
498 for (e
= new_bb
->succ
; e
; e
= e
->succ_next
)
501 new_edge
= make_single_succ_edge (bb
, new_bb
, EDGE_FALLTHRU
);
503 if (bb
->global_live_at_start
)
505 new_bb
->global_live_at_start
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
506 new_bb
->global_live_at_end
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
507 COPY_REG_SET (new_bb
->global_live_at_end
, bb
->global_live_at_end
);
509 /* We now have to calculate which registers are live at the end
510 of the split basic block and at the start of the new basic
511 block. Start with those registers that are known to be live
512 at the end of the original basic block and get
513 propagate_block to determine which registers are live. */
514 COPY_REG_SET (new_bb
->global_live_at_start
, bb
->global_live_at_end
);
515 propagate_block (new_bb
, new_bb
->global_live_at_start
, NULL
, NULL
, 0);
516 COPY_REG_SET (bb
->global_live_at_end
,
517 new_bb
->global_live_at_start
);
518 #ifdef HAVE_conditional_execution
519 /* In the presence of conditional execution we are not able to update
520 liveness precisely. */
521 if (reload_completed
)
523 bb
->flags
|= BB_DIRTY
;
524 new_bb
->flags
|= BB_DIRTY
;
532 /* Blocks A and B are to be merged into a single block A. The insns
533 are already contiguous, hence `nomove'. */
536 merge_blocks_nomove (a
, b
)
539 rtx b_head
= b
->head
, b_end
= b
->end
, a_end
= a
->end
;
540 rtx del_first
= NULL_RTX
, del_last
= NULL_RTX
;
544 /* If there was a CODE_LABEL beginning B, delete it. */
545 if (GET_CODE (b_head
) == CODE_LABEL
)
547 /* Detect basic blocks with nothing but a label. This can happen
548 in particular at the end of a function. */
552 del_first
= del_last
= b_head
;
553 b_head
= NEXT_INSN (b_head
);
556 /* Delete the basic block note and handle blocks containing just that
558 if (NOTE_INSN_BASIC_BLOCK_P (b_head
))
566 b_head
= NEXT_INSN (b_head
);
569 /* If there was a jump out of A, delete it. */
570 if (GET_CODE (a_end
) == JUMP_INSN
)
574 for (prev
= PREV_INSN (a_end
); ; prev
= PREV_INSN (prev
))
575 if (GET_CODE (prev
) != NOTE
576 || NOTE_LINE_NUMBER (prev
) == NOTE_INSN_BASIC_BLOCK
583 /* If this was a conditional jump, we need to also delete
584 the insn that set cc0. */
585 if (only_sets_cc0_p (prev
))
589 prev
= prev_nonnote_insn (prev
);
596 a_end
= PREV_INSN (del_first
);
598 else if (GET_CODE (NEXT_INSN (a_end
)) == BARRIER
)
599 del_first
= NEXT_INSN (a_end
);
601 /* Normally there should only be one successor of A and that is B, but
602 partway though the merge of blocks for conditional_execution we'll
603 be merging a TEST block with THEN and ELSE successors. Free the
604 whole lot of them and hope the caller knows what they're doing. */
606 remove_edge (a
->succ
);
608 /* Adjust the edges out of B for the new owner. */
609 for (e
= b
->succ
; e
; e
= e
->succ_next
)
612 a
->flags
|= b
->flags
;
614 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
615 b
->pred
= b
->succ
= NULL
;
616 a
->global_live_at_end
= b
->global_live_at_end
;
620 /* Delete everything marked above as well as crap that might be
621 hanging out between the two blocks. */
622 delete_insn_chain (del_first
, del_last
);
624 /* Reassociate the insns of B with A. */
629 for (x
= a_end
; x
!= b_end
; x
= NEXT_INSN (x
))
630 set_block_for_insn (x
, a
);
632 set_block_for_insn (b_end
, a
);
640 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
647 if (block
== EXIT_BLOCK_PTR
)
650 if (GET_CODE (block
->head
) != CODE_LABEL
)
652 block
->head
= emit_label_before (gen_label_rtx (), block
->head
);
658 /* Attempt to perform edge redirection by replacing possibly complex jump
659 instruction by unconditional jump or removing jump completely. This can
660 apply only if all edges now point to the same block. The parameters and
661 return values are equivalent to redirect_edge_and_branch. */
664 try_redirect_by_replacing_jump (e
, target
)
668 basic_block src
= e
->src
;
669 rtx insn
= src
->end
, kill_from
;
674 /* Verify that all targets will be TARGET. */
675 for (tmp
= src
->succ
; tmp
; tmp
= tmp
->succ_next
)
676 if (tmp
->dest
!= target
&& tmp
!= e
)
679 if (tmp
|| !onlyjump_p (insn
))
681 if ((!optimize
|| flow2_completed
) && tablejump_p (insn
, NULL
, NULL
))
684 /* Avoid removing branch with side effects. */
685 set
= single_set (insn
);
686 if (!set
|| side_effects_p (set
))
689 /* In case we zap a conditional jump, we'll need to kill
690 the cc0 setter too. */
693 if (reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
694 kill_from
= PREV_INSN (insn
);
697 /* See if we can create the fallthru edge. */
698 if (can_fallthru (src
, target
))
701 fprintf (rtl_dump_file
, "Removing jump %i.\n", INSN_UID (insn
));
704 /* Selectively unlink whole insn chain. */
705 delete_insn_chain (kill_from
, PREV_INSN (target
->head
));
708 /* If this already is simplejump, redirect it. */
709 else if (simplejump_p (insn
))
711 if (e
->dest
== target
)
714 fprintf (rtl_dump_file
, "Redirecting jump %i from %i to %i.\n",
715 INSN_UID (insn
), e
->dest
->index
, target
->index
);
716 if (!redirect_jump (insn
, block_label (target
), 0))
718 if (target
== EXIT_BLOCK_PTR
)
724 /* Cannot do anything for target exit block. */
725 else if (target
== EXIT_BLOCK_PTR
)
728 /* Or replace possibly complicated jump insn by simple jump insn. */
731 rtx target_label
= block_label (target
);
732 rtx barrier
, label
, table
;
734 emit_jump_insn_after (gen_jump (target_label
), insn
);
735 JUMP_LABEL (src
->end
) = target_label
;
736 LABEL_NUSES (target_label
)++;
738 fprintf (rtl_dump_file
, "Replacing insn %i by jump %i\n",
739 INSN_UID (insn
), INSN_UID (src
->end
));
742 delete_insn_chain (kill_from
, insn
);
744 /* Recognize a tablejump that we are converting to a
745 simple jump and remove its associated CODE_LABEL
746 and ADDR_VEC or ADDR_DIFF_VEC. */
747 if (tablejump_p (insn
, &label
, &table
))
748 delete_insn_chain (label
, table
);
750 barrier
= next_nonnote_insn (src
->end
);
751 if (!barrier
|| GET_CODE (barrier
) != BARRIER
)
752 emit_barrier_after (src
->end
);
755 /* Keep only one edge out and set proper flags. */
756 while (src
->succ
->succ_next
)
757 remove_edge (src
->succ
);
760 e
->flags
= EDGE_FALLTHRU
;
764 e
->probability
= REG_BR_PROB_BASE
;
765 e
->count
= src
->count
;
767 /* We don't want a block to end on a line-number note since that has
768 the potential of changing the code between -g and not -g. */
769 while (GET_CODE (e
->src
->end
) == NOTE
770 && NOTE_LINE_NUMBER (e
->src
->end
) >= 0)
771 delete_insn (e
->src
->end
);
773 if (e
->dest
!= target
)
774 redirect_edge_succ (e
, target
);
779 /* Return last loop_beg note appearing after INSN, before start of next
780 basic block. Return INSN if there are no such notes.
782 When emitting jump to redirect a fallthru edge, it should always appear
783 after the LOOP_BEG notes, as loop optimizer expect loop to either start by
784 fallthru edge or jump following the LOOP_BEG note jumping to the loop exit
788 last_loop_beg_note (insn
)
793 for (insn
= NEXT_INSN (insn
); insn
&& GET_CODE (insn
) == NOTE
794 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BASIC_BLOCK
;
795 insn
= NEXT_INSN (insn
))
796 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
)
802 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
803 expense of adding new instructions or reordering basic blocks.
805 Function can be also called with edge destination equivalent to the TARGET.
806 Then it should try the simplifications and do nothing if none is possible.
808 Return true if transformation succeeded. We still return false in case E
809 already destinated TARGET and we didn't managed to simplify instruction
813 rtl_redirect_edge_and_branch (e
, target
)
818 rtx old_label
= e
->dest
->head
;
819 basic_block src
= e
->src
;
822 if (e
->flags
& (EDGE_ABNORMAL_CALL
| EDGE_EH
))
825 if (try_redirect_by_replacing_jump (e
, target
))
828 /* Do this fast path late, as we want above code to simplify for cases
829 where called on single edge leaving basic block containing nontrivial
831 else if (e
->dest
== target
)
834 /* We can only redirect non-fallthru edges of jump insn. */
835 if (e
->flags
& EDGE_FALLTHRU
)
837 else if (GET_CODE (insn
) != JUMP_INSN
)
840 /* Recognize a tablejump and adjust all matching cases. */
841 if (tablejump_p (insn
, NULL
, &tmp
))
845 rtx new_label
= block_label (target
);
847 if (target
== EXIT_BLOCK_PTR
)
849 if (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
)
850 vec
= XVEC (PATTERN (tmp
), 0);
852 vec
= XVEC (PATTERN (tmp
), 1);
854 for (j
= GET_NUM_ELEM (vec
) - 1; j
>= 0; --j
)
855 if (XEXP (RTVEC_ELT (vec
, j
), 0) == old_label
)
857 RTVEC_ELT (vec
, j
) = gen_rtx_LABEL_REF (Pmode
, new_label
);
858 --LABEL_NUSES (old_label
);
859 ++LABEL_NUSES (new_label
);
862 /* Handle casesi dispatch insns */
863 if ((tmp
= single_set (insn
)) != NULL
864 && SET_DEST (tmp
) == pc_rtx
865 && GET_CODE (SET_SRC (tmp
)) == IF_THEN_ELSE
866 && GET_CODE (XEXP (SET_SRC (tmp
), 2)) == LABEL_REF
867 && XEXP (XEXP (SET_SRC (tmp
), 2), 0) == old_label
)
869 XEXP (SET_SRC (tmp
), 2) = gen_rtx_LABEL_REF (VOIDmode
,
871 --LABEL_NUSES (old_label
);
872 ++LABEL_NUSES (new_label
);
877 /* ?? We may play the games with moving the named labels from
878 one basic block to the other in case only one computed_jump is
880 if (computed_jump_p (insn
)
881 /* A return instruction can't be redirected. */
882 || returnjump_p (insn
))
885 /* If the insn doesn't go where we think, we're confused. */
886 if (JUMP_LABEL (insn
) != old_label
)
889 /* If the substitution doesn't succeed, die. This can happen
890 if the back end emitted unrecognizable instructions or if
891 target is exit block on some arches. */
892 if (!redirect_jump (insn
, block_label (target
), 0))
894 if (target
== EXIT_BLOCK_PTR
)
901 fprintf (rtl_dump_file
, "Edge %i->%i redirected to %i\n",
902 e
->src
->index
, e
->dest
->index
, target
->index
);
904 if (e
->dest
!= target
)
905 redirect_edge_succ_nodup (e
, target
);
910 /* Like force_nonfallthru below, but additionally performs redirection
911 Used by redirect_edge_and_branch_force. */
914 force_nonfallthru_and_redirect (e
, target
)
918 basic_block jump_block
, new_bb
= NULL
, src
= e
->src
;
921 int abnormal_edge_flags
= 0;
923 /* In the case the last instruction is conditional jump to the next
924 instruction, first redirect the jump itself and then continue
925 by creating an basic block afterwards to redirect fallthru edge. */
926 if (e
->src
!= ENTRY_BLOCK_PTR
&& e
->dest
!= EXIT_BLOCK_PTR
927 && any_condjump_p (e
->src
->end
)
928 /* When called from cfglayout, fallthru edges do not
929 neccessarily go to the next block. */
930 && e
->src
->next_bb
== e
->dest
931 && JUMP_LABEL (e
->src
->end
) == e
->dest
->head
)
934 edge b
= unchecked_make_edge (e
->src
, target
, 0);
936 if (!redirect_jump (e
->src
->end
, block_label (target
), 0))
938 note
= find_reg_note (e
->src
->end
, REG_BR_PROB
, NULL_RTX
);
941 int prob
= INTVAL (XEXP (note
, 0));
943 b
->probability
= prob
;
944 b
->count
= e
->count
* prob
/ REG_BR_PROB_BASE
;
945 e
->probability
-= e
->probability
;
946 e
->count
-= b
->count
;
947 if (e
->probability
< 0)
954 if (e
->flags
& EDGE_ABNORMAL
)
956 /* Irritating special case - fallthru edge to the same block as abnormal
958 We can't redirect abnormal edge, but we still can split the fallthru
959 one and create separate abnormal edge to original destination.
960 This allows bb-reorder to make such edge non-fallthru. */
961 if (e
->dest
!= target
)
963 abnormal_edge_flags
= e
->flags
& ~(EDGE_FALLTHRU
| EDGE_CAN_FALLTHRU
);
964 e
->flags
&= EDGE_FALLTHRU
| EDGE_CAN_FALLTHRU
;
966 else if (!(e
->flags
& EDGE_FALLTHRU
))
968 else if (e
->src
== ENTRY_BLOCK_PTR
)
970 /* We can't redirect the entry block. Create an empty block at the
971 start of the function which we use to add the new jump. */
973 basic_block bb
= create_basic_block (e
->dest
->head
, NULL
, ENTRY_BLOCK_PTR
);
975 /* Change the existing edge's source to be the new block, and add
976 a new edge from the entry block to the new block. */
978 for (pe1
= &ENTRY_BLOCK_PTR
->succ
; *pe1
; pe1
= &(*pe1
)->succ_next
)
986 make_single_succ_edge (ENTRY_BLOCK_PTR
, bb
, EDGE_FALLTHRU
);
989 if (e
->src
->succ
->succ_next
|| abnormal_edge_flags
)
991 /* Create the new structures. */
993 /* Position the new block correctly relative to loop notes. */
994 note
= last_loop_beg_note (e
->src
->end
);
995 note
= NEXT_INSN (note
);
997 /* ... and ADDR_VECs. */
999 && GET_CODE (note
) == CODE_LABEL
1001 && GET_CODE (NEXT_INSN (note
)) == JUMP_INSN
1002 && (GET_CODE (PATTERN (NEXT_INSN (note
))) == ADDR_DIFF_VEC
1003 || GET_CODE (PATTERN (NEXT_INSN (note
))) == ADDR_VEC
))
1004 note
= NEXT_INSN (NEXT_INSN (note
));
1006 jump_block
= create_basic_block (note
, NULL
, e
->src
);
1007 jump_block
->count
= e
->count
;
1008 jump_block
->frequency
= EDGE_FREQUENCY (e
);
1009 jump_block
->loop_depth
= target
->loop_depth
;
1011 if (target
->global_live_at_start
)
1013 jump_block
->global_live_at_start
1014 = OBSTACK_ALLOC_REG_SET (&flow_obstack
);
1015 jump_block
->global_live_at_end
1016 = OBSTACK_ALLOC_REG_SET (&flow_obstack
);
1017 COPY_REG_SET (jump_block
->global_live_at_start
,
1018 target
->global_live_at_start
);
1019 COPY_REG_SET (jump_block
->global_live_at_end
,
1020 target
->global_live_at_start
);
1024 new_edge
= make_edge (e
->src
, jump_block
, EDGE_FALLTHRU
);
1025 new_edge
->probability
= e
->probability
;
1026 new_edge
->count
= e
->count
;
1028 /* Redirect old edge. */
1029 redirect_edge_pred (e
, jump_block
);
1030 e
->probability
= REG_BR_PROB_BASE
;
1032 new_bb
= jump_block
;
1035 jump_block
= e
->src
;
1037 e
->flags
&= ~EDGE_FALLTHRU
;
1038 if (target
== EXIT_BLOCK_PTR
)
1041 emit_jump_insn_after (gen_return (), jump_block
->end
);
1047 rtx label
= block_label (target
);
1048 emit_jump_insn_after (gen_jump (label
), jump_block
->end
);
1049 JUMP_LABEL (jump_block
->end
) = label
;
1050 LABEL_NUSES (label
)++;
1053 emit_barrier_after (jump_block
->end
);
1054 redirect_edge_succ_nodup (e
, target
);
1056 if (abnormal_edge_flags
)
1057 make_edge (src
, target
, abnormal_edge_flags
);
1062 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1063 (and possibly create new basic block) to make edge non-fallthru.
1064 Return newly created BB or NULL if none. */
1067 force_nonfallthru (e
)
1070 return force_nonfallthru_and_redirect (e
, e
->dest
);
1073 /* Redirect edge even at the expense of creating new jump insn or
1074 basic block. Return new basic block if created, NULL otherwise.
1075 Abort if conversion is impossible. */
1078 rtl_redirect_edge_and_branch_force (e
, target
)
1082 if (redirect_edge_and_branch (e
, target
)
1083 || e
->dest
== target
)
1086 /* In case the edge redirection failed, try to force it to be non-fallthru
1087 and redirect newly created simplejump. */
1088 return force_nonfallthru_and_redirect (e
, target
);
1091 /* The given edge should potentially be a fallthru edge. If that is in
1092 fact true, delete the jump and barriers that are in the way. */
1095 tidy_fallthru_edge (e
, b
, c
)
1101 /* ??? In a late-running flow pass, other folks may have deleted basic
1102 blocks by nopping out blocks, leaving multiple BARRIERs between here
1103 and the target label. They ought to be chastized and fixed.
1105 We can also wind up with a sequence of undeletable labels between
1106 one block and the next.
1108 So search through a sequence of barriers, labels, and notes for
1109 the head of block C and assert that we really do fall through. */
1111 for (q
= NEXT_INSN (b
->end
); q
!= c
->head
; q
= NEXT_INSN (q
))
1115 /* Remove what will soon cease being the jump insn from the source block.
1116 If block B consisted only of this single jump, turn it into a deleted
1119 if (GET_CODE (q
) == JUMP_INSN
1121 && (any_uncondjump_p (q
)
1122 || (b
->succ
== e
&& e
->succ_next
== NULL
)))
1125 /* If this was a conditional jump, we need to also delete
1126 the insn that set cc0. */
1127 if (any_condjump_p (q
) && only_sets_cc0_p (PREV_INSN (q
)))
1133 /* We don't want a block to end on a line-number note since that has
1134 the potential of changing the code between -g and not -g. */
1135 while (GET_CODE (q
) == NOTE
&& NOTE_LINE_NUMBER (q
) >= 0)
1139 /* Selectively unlink the sequence. */
1140 if (q
!= PREV_INSN (c
->head
))
1141 delete_insn_chain (NEXT_INSN (q
), PREV_INSN (c
->head
));
1143 e
->flags
|= EDGE_FALLTHRU
;
1146 /* Fix up edges that now fall through, or rather should now fall through
1147 but previously required a jump around now deleted blocks. Simplify
1148 the search by only examining blocks numerically adjacent, since this
1149 is how find_basic_blocks created them. */
1152 tidy_fallthru_edges ()
1156 if (ENTRY_BLOCK_PTR
->next_bb
== EXIT_BLOCK_PTR
)
1159 FOR_BB_BETWEEN (b
, ENTRY_BLOCK_PTR
->next_bb
, EXIT_BLOCK_PTR
->prev_bb
, next_bb
)
1165 /* We care about simple conditional or unconditional jumps with
1168 If we had a conditional branch to the next instruction when
1169 find_basic_blocks was called, then there will only be one
1170 out edge for the block which ended with the conditional
1171 branch (since we do not create duplicate edges).
1173 Furthermore, the edge will be marked as a fallthru because we
1174 merge the flags for the duplicate edges. So we do not want to
1175 check that the edge is not a FALLTHRU edge. */
1177 if ((s
= b
->succ
) != NULL
1178 && ! (s
->flags
& EDGE_COMPLEX
)
1179 && s
->succ_next
== NULL
1181 /* If the jump insn has side effects, we can't tidy the edge. */
1182 && (GET_CODE (b
->end
) != JUMP_INSN
1183 || onlyjump_p (b
->end
)))
1184 tidy_fallthru_edge (s
, b
, c
);
1188 /* Helper function for split_edge. Return true in case edge BB2 to BB1
1189 is back edge of syntactic loop. */
1192 back_edge_of_syntactic_loop_p (bb1
, bb2
)
1193 basic_block bb1
, bb2
;
1202 /* ??? Could we guarantee that bb indices are monotone, so that we could
1203 just compare them? */
1204 for (bb
= bb1
; bb
&& bb
!= bb2
; bb
= bb
->next_bb
)
1210 for (insn
= bb1
->end
; insn
!= bb2
->head
&& count
>= 0;
1211 insn
= NEXT_INSN (insn
))
1212 if (GET_CODE (insn
) == NOTE
)
1214 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
)
1216 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
)
1223 /* Split a (typically critical) edge. Return the new block.
1224 Abort on abnormal edges.
1226 ??? The code generally expects to be called on critical edges.
1227 The case of a block ending in an unconditional jump to a
1228 block with multiple predecessors is not handled optimally. */
1231 rtl_split_edge (edge_in
)
1237 /* Abnormal edges cannot be split. */
1238 if ((edge_in
->flags
& EDGE_ABNORMAL
) != 0)
1241 /* We are going to place the new block in front of edge destination.
1242 Avoid existence of fallthru predecessors. */
1243 if ((edge_in
->flags
& EDGE_FALLTHRU
) == 0)
1247 for (e
= edge_in
->dest
->pred
; e
; e
= e
->pred_next
)
1248 if (e
->flags
& EDGE_FALLTHRU
)
1252 force_nonfallthru (e
);
1255 /* Create the basic block note.
1257 Where we place the note can have a noticeable impact on the generated
1258 code. Consider this cfg:
1268 If we need to insert an insn on the edge from block 0 to block 1,
1269 we want to ensure the instructions we insert are outside of any
1270 loop notes that physically sit between block 0 and block 1. Otherwise
1271 we confuse the loop optimizer into thinking the loop is a phony. */
1273 if (edge_in
->dest
!= EXIT_BLOCK_PTR
1274 && PREV_INSN (edge_in
->dest
->head
)
1275 && GET_CODE (PREV_INSN (edge_in
->dest
->head
)) == NOTE
1276 && (NOTE_LINE_NUMBER (PREV_INSN (edge_in
->dest
->head
))
1277 == NOTE_INSN_LOOP_BEG
)
1278 && !back_edge_of_syntactic_loop_p (edge_in
->dest
, edge_in
->src
))
1279 before
= PREV_INSN (edge_in
->dest
->head
);
1280 else if (edge_in
->dest
!= EXIT_BLOCK_PTR
)
1281 before
= edge_in
->dest
->head
;
1285 bb
= create_basic_block (before
, NULL
, edge_in
->dest
->prev_bb
);
1286 bb
->count
= edge_in
->count
;
1287 bb
->frequency
= EDGE_FREQUENCY (edge_in
);
1289 /* ??? This info is likely going to be out of date very soon. */
1290 if (edge_in
->dest
->global_live_at_start
)
1292 bb
->global_live_at_start
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
1293 bb
->global_live_at_end
= OBSTACK_ALLOC_REG_SET (&flow_obstack
);
1294 COPY_REG_SET (bb
->global_live_at_start
,
1295 edge_in
->dest
->global_live_at_start
);
1296 COPY_REG_SET (bb
->global_live_at_end
,
1297 edge_in
->dest
->global_live_at_start
);
1300 make_single_succ_edge (bb
, edge_in
->dest
, EDGE_FALLTHRU
);
1302 /* For non-fallthry edges, we must adjust the predecessor's
1303 jump instruction to target our new block. */
1304 if ((edge_in
->flags
& EDGE_FALLTHRU
) == 0)
1306 if (!redirect_edge_and_branch (edge_in
, bb
))
1310 redirect_edge_succ (edge_in
, bb
);
1315 /* Queue instructions for insertion on an edge between two basic blocks.
1316 The new instructions and basic blocks (if any) will not appear in the
1317 CFG until commit_edge_insertions is called. */
1320 insert_insn_on_edge (pattern
, e
)
1324 /* We cannot insert instructions on an abnormal critical edge.
1325 It will be easier to find the culprit if we die now. */
1326 if ((e
->flags
& EDGE_ABNORMAL
) && EDGE_CRITICAL_P (e
))
1329 if (e
->insns
== NULL_RTX
)
1332 push_to_sequence (e
->insns
);
1334 emit_insn (pattern
);
1336 e
->insns
= get_insns ();
1340 /* Update the CFG for the instructions queued on edge E. */
1343 commit_one_edge_insertion (e
, watch_calls
)
1347 rtx before
= NULL_RTX
, after
= NULL_RTX
, insns
, tmp
, last
;
1348 basic_block bb
= NULL
;
1350 /* Pull the insns off the edge now since the edge might go away. */
1352 e
->insns
= NULL_RTX
;
1354 /* Special case -- avoid inserting code between call and storing
1355 its return value. */
1356 if (watch_calls
&& (e
->flags
& EDGE_FALLTHRU
) && !e
->dest
->pred
->pred_next
1357 && e
->src
!= ENTRY_BLOCK_PTR
1358 && GET_CODE (e
->src
->end
) == CALL_INSN
)
1360 rtx next
= next_nonnote_insn (e
->src
->end
);
1362 after
= e
->dest
->head
;
1363 /* The first insn after the call may be a stack pop, skip it. */
1365 && keep_with_call_p (next
))
1368 next
= next_nonnote_insn (next
);
1372 if (!before
&& !after
)
1374 /* Figure out where to put these things. If the destination has
1375 one predecessor, insert there. Except for the exit block. */
1376 if (e
->dest
->pred
->pred_next
== NULL
&& e
->dest
!= EXIT_BLOCK_PTR
)
1380 /* Get the location correct wrt a code label, and "nice" wrt
1381 a basic block note, and before everything else. */
1383 if (GET_CODE (tmp
) == CODE_LABEL
)
1384 tmp
= NEXT_INSN (tmp
);
1385 if (NOTE_INSN_BASIC_BLOCK_P (tmp
))
1386 tmp
= NEXT_INSN (tmp
);
1387 if (tmp
== bb
->head
)
1390 after
= PREV_INSN (tmp
);
1392 after
= get_last_insn ();
1395 /* If the source has one successor and the edge is not abnormal,
1396 insert there. Except for the entry block. */
1397 else if ((e
->flags
& EDGE_ABNORMAL
) == 0
1398 && e
->src
->succ
->succ_next
== NULL
1399 && e
->src
!= ENTRY_BLOCK_PTR
)
1403 /* It is possible to have a non-simple jump here. Consider a target
1404 where some forms of unconditional jumps clobber a register. This
1405 happens on the fr30 for example.
1407 We know this block has a single successor, so we can just emit
1408 the queued insns before the jump. */
1409 if (GET_CODE (bb
->end
) == JUMP_INSN
)
1410 for (before
= bb
->end
;
1411 GET_CODE (PREV_INSN (before
)) == NOTE
1412 && NOTE_LINE_NUMBER (PREV_INSN (before
)) ==
1413 NOTE_INSN_LOOP_BEG
; before
= PREV_INSN (before
))
1417 /* We'd better be fallthru, or we've lost track of what's what. */
1418 if ((e
->flags
& EDGE_FALLTHRU
) == 0)
1424 /* Otherwise we must split the edge. */
1427 bb
= split_edge (e
);
1432 /* Now that we've found the spot, do the insertion. */
1436 emit_insn_before (insns
, before
);
1437 last
= prev_nonnote_insn (before
);
1440 last
= emit_insn_after (insns
, after
);
1442 if (returnjump_p (last
))
1444 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1445 This is not currently a problem because this only happens
1446 for the (single) epilogue, which already has a fallthru edge
1450 if (e
->dest
!= EXIT_BLOCK_PTR
1451 || e
->succ_next
!= NULL
|| (e
->flags
& EDGE_FALLTHRU
) == 0)
1454 e
->flags
&= ~EDGE_FALLTHRU
;
1455 emit_barrier_after (last
);
1458 delete_insn (before
);
1460 else if (GET_CODE (last
) == JUMP_INSN
)
1463 /* Mark the basic block for find_sub_basic_blocks. */
1467 /* Update the CFG for all queued instructions. */
1470 commit_edge_insertions ()
1474 bool changed
= false;
1476 #ifdef ENABLE_CHECKING
1477 verify_flow_info ();
1480 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, next_bb
)
1484 for (e
= bb
->succ
; e
; e
= next
)
1486 next
= e
->succ_next
;
1490 commit_one_edge_insertion (e
, false);
1498 blocks
= sbitmap_alloc (last_basic_block
);
1499 sbitmap_zero (blocks
);
1503 SET_BIT (blocks
, bb
->index
);
1504 /* Check for forgotten bb->aux values before commit_edge_insertions
1506 if (bb
->aux
!= &bb
->aux
)
1510 find_many_sub_basic_blocks (blocks
);
1511 sbitmap_free (blocks
);
1514 /* Update the CFG for all queued instructions, taking special care of inserting
1515 code on edges between call and storing its return value. */
1518 commit_edge_insertions_watch_calls ()
1522 bool changed
= false;
1524 #ifdef ENABLE_CHECKING
1525 verify_flow_info ();
1528 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, next_bb
)
1532 for (e
= bb
->succ
; e
; e
= next
)
1534 next
= e
->succ_next
;
1538 commit_one_edge_insertion (e
, true);
1546 blocks
= sbitmap_alloc (last_basic_block
);
1547 sbitmap_zero (blocks
);
1551 SET_BIT (blocks
, bb
->index
);
1552 /* Check for forgotten bb->aux values before commit_edge_insertions
1554 if (bb
->aux
!= &bb
->aux
)
1558 find_many_sub_basic_blocks (blocks
);
1559 sbitmap_free (blocks
);
1562 /* Print out one basic block with live information at start and end. */
1565 rtl_dump_bb (bb
, outf
)
1572 fputs (";; Registers live at start:", outf
);
1573 dump_regset (bb
->global_live_at_start
, outf
);
1576 for (insn
= bb
->head
, last
= NEXT_INSN (bb
->end
); insn
!= last
;
1577 insn
= NEXT_INSN (insn
))
1578 print_rtl_single (outf
, insn
);
1580 fputs (";; Registers live at end:", outf
);
1581 dump_regset (bb
->global_live_at_end
, outf
);
1585 /* Like print_rtl, but also print out live information for the start of each
1589 print_rtl_with_bb (outf
, rtx_first
)
1596 fprintf (outf
, "(nil)\n");
1599 enum bb_state
{ NOT_IN_BB
, IN_ONE_BB
, IN_MULTIPLE_BB
};
1600 int max_uid
= get_max_uid ();
1602 = (basic_block
*) xcalloc (max_uid
, sizeof (basic_block
));
1604 = (basic_block
*) xcalloc (max_uid
, sizeof (basic_block
));
1605 enum bb_state
*in_bb_p
1606 = (enum bb_state
*) xcalloc (max_uid
, sizeof (enum bb_state
));
1610 FOR_EACH_BB_REVERSE (bb
)
1614 start
[INSN_UID (bb
->head
)] = bb
;
1615 end
[INSN_UID (bb
->end
)] = bb
;
1616 for (x
= bb
->head
; x
!= NULL_RTX
; x
= NEXT_INSN (x
))
1618 enum bb_state state
= IN_MULTIPLE_BB
;
1620 if (in_bb_p
[INSN_UID (x
)] == NOT_IN_BB
)
1622 in_bb_p
[INSN_UID (x
)] = state
;
1629 for (tmp_rtx
= rtx_first
; NULL
!= tmp_rtx
; tmp_rtx
= NEXT_INSN (tmp_rtx
))
1633 if ((bb
= start
[INSN_UID (tmp_rtx
)]) != NULL
)
1635 fprintf (outf
, ";; Start of basic block %d, registers live:",
1637 dump_regset (bb
->global_live_at_start
, outf
);
1641 if (in_bb_p
[INSN_UID (tmp_rtx
)] == NOT_IN_BB
1642 && GET_CODE (tmp_rtx
) != NOTE
1643 && GET_CODE (tmp_rtx
) != BARRIER
)
1644 fprintf (outf
, ";; Insn is not within a basic block\n");
1645 else if (in_bb_p
[INSN_UID (tmp_rtx
)] == IN_MULTIPLE_BB
)
1646 fprintf (outf
, ";; Insn is in multiple basic blocks\n");
1648 did_output
= print_rtl_single (outf
, tmp_rtx
);
1650 if ((bb
= end
[INSN_UID (tmp_rtx
)]) != NULL
)
1652 fprintf (outf
, ";; End of basic block %d, registers live:\n",
1654 dump_regset (bb
->global_live_at_end
, outf
);
1667 if (current_function_epilogue_delay_list
!= 0)
1669 fprintf (outf
, "\n;; Insns in epilogue delay list:\n\n");
1670 for (tmp_rtx
= current_function_epilogue_delay_list
; tmp_rtx
!= 0;
1671 tmp_rtx
= XEXP (tmp_rtx
, 1))
1672 print_rtl_single (outf
, XEXP (tmp_rtx
, 0));
1677 update_br_prob_note (bb
)
1681 if (GET_CODE (bb
->end
) != JUMP_INSN
)
1683 note
= find_reg_note (bb
->end
, REG_BR_PROB
, NULL_RTX
);
1684 if (!note
|| INTVAL (XEXP (note
, 0)) == BRANCH_EDGE (bb
)->probability
)
1686 XEXP (note
, 0) = GEN_INT (BRANCH_EDGE (bb
)->probability
);
1689 /* Verify the CFG and RTL consistency common for both underlying RTL and
1692 Currently it does following checks:
1694 - test head/end pointers
1695 - overlapping of basic blocks
1696 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1697 - tails of basic blocks (ensure that boundary is necessary)
1698 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1699 and NOTE_INSN_BASIC_BLOCK
1701 In future it can be extended check a lot of other stuff as well
1702 (reachability of basic blocks, life information, etc. etc.). */
1704 rtl_verify_flow_info_1 ()
1706 const int max_uid
= get_max_uid ();
1707 rtx last_head
= get_last_insn ();
1708 basic_block
*bb_info
;
1711 basic_block bb
, last_bb_seen
;
1713 bb_info
= (basic_block
*) xcalloc (max_uid
, sizeof (basic_block
));
1715 /* Check bb chain & numbers. */
1716 last_bb_seen
= ENTRY_BLOCK_PTR
;
1718 FOR_EACH_BB_REVERSE (bb
)
1720 rtx head
= bb
->head
;
1723 /* Verify the end of the basic block is in the INSN chain. */
1724 for (x
= last_head
; x
!= NULL_RTX
; x
= PREV_INSN (x
))
1730 error ("end insn %d for block %d not found in the insn stream",
1731 INSN_UID (end
), bb
->index
);
1735 /* Work backwards from the end to the head of the basic block
1736 to verify the head is in the RTL chain. */
1737 for (; x
!= NULL_RTX
; x
= PREV_INSN (x
))
1739 /* While walking over the insn chain, verify insns appear
1740 in only one basic block and initialize the BB_INFO array
1741 used by other passes. */
1742 if (bb_info
[INSN_UID (x
)] != NULL
)
1744 error ("insn %d is in multiple basic blocks (%d and %d)",
1745 INSN_UID (x
), bb
->index
, bb_info
[INSN_UID (x
)]->index
);
1749 bb_info
[INSN_UID (x
)] = bb
;
1756 error ("head insn %d for block %d not found in the insn stream",
1757 INSN_UID (head
), bb
->index
);
1764 /* Now check the basic blocks (boundaries etc.) */
1765 FOR_EACH_BB_REVERSE (bb
)
1767 int n_fallthru
= 0, n_eh
= 0, n_call
= 0, n_abnormal
= 0, n_branch
= 0;
1771 if (INSN_P (bb
->end
)
1772 && (note
= find_reg_note (bb
->end
, REG_BR_PROB
, NULL_RTX
))
1773 && bb
->succ
&& bb
->succ
->succ_next
1774 && any_condjump_p (bb
->end
))
1776 if (INTVAL (XEXP (note
, 0)) != BRANCH_EDGE (bb
)->probability
)
1778 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
1779 INTVAL (XEXP (note
, 0)), BRANCH_EDGE (bb
)->probability
);
1783 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
1785 if (e
->flags
& EDGE_FALLTHRU
)
1788 if ((e
->flags
& ~(EDGE_DFS_BACK
| EDGE_CAN_FALLTHRU
| EDGE_IRREDUCIBLE_LOOP
)) == 0)
1791 if (e
->flags
& EDGE_ABNORMAL_CALL
)
1794 if (e
->flags
& EDGE_EH
)
1796 else if (e
->flags
& EDGE_ABNORMAL
)
1800 if (n_eh
&& GET_CODE (PATTERN (bb
->end
)) != RESX
1801 && !find_reg_note (bb
->end
, REG_EH_REGION
, NULL_RTX
))
1803 error ("Missing REG_EH_REGION note in the end of bb %i", bb
->index
);
1807 && (GET_CODE (bb
->end
) != JUMP_INSN
1808 || (n_branch
> 1 && (any_uncondjump_p (bb
->end
)
1809 || any_condjump_p (bb
->end
)))))
1811 error ("Too many outgoing branch edges from bb %i", bb
->index
);
1814 if (n_fallthru
&& any_uncondjump_p (bb
->end
))
1816 error ("Fallthru edge after unconditional jump %i", bb
->index
);
1819 if (n_branch
!= 1 && any_uncondjump_p (bb
->end
))
1821 error ("Wrong amount of branch edges after unconditional jump %i", bb
->index
);
1824 if (n_branch
!= 1 && any_condjump_p (bb
->end
)
1825 && JUMP_LABEL (bb
->end
) != bb
->next_bb
->head
)
1827 error ("Wrong amount of branch edges after conditional jump %i", bb
->index
);
1830 if (n_call
&& GET_CODE (bb
->end
) != CALL_INSN
)
1832 error ("Call edges for non-call insn in bb %i", bb
->index
);
1836 && (GET_CODE (bb
->end
) != CALL_INSN
&& n_call
!= n_abnormal
)
1837 && (GET_CODE (bb
->end
) != JUMP_INSN
1838 || any_condjump_p (bb
->end
)
1839 || any_uncondjump_p (bb
->end
)))
1841 error ("Abnormal edges for no purpose in bb %i", bb
->index
);
1845 for (x
= bb
->head
; x
!= NEXT_INSN (bb
->end
); x
= NEXT_INSN (x
))
1846 if (BLOCK_FOR_INSN (x
) != bb
)
1849 if (! BLOCK_FOR_INSN (x
))
1851 ("insn %d inside basic block %d but block_for_insn is NULL",
1852 INSN_UID (x
), bb
->index
);
1855 ("insn %d inside basic block %d but block_for_insn is %i",
1856 INSN_UID (x
), bb
->index
, BLOCK_FOR_INSN (x
)->index
);
1861 /* OK pointers are correct. Now check the header of basic
1862 block. It ought to contain optional CODE_LABEL followed
1863 by NOTE_BASIC_BLOCK. */
1865 if (GET_CODE (x
) == CODE_LABEL
)
1869 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1877 if (!NOTE_INSN_BASIC_BLOCK_P (x
) || NOTE_BASIC_BLOCK (x
) != bb
)
1879 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1885 /* Do checks for empty blocks her. e */
1888 for (x
= NEXT_INSN (x
); x
; x
= NEXT_INSN (x
))
1890 if (NOTE_INSN_BASIC_BLOCK_P (x
))
1892 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
1893 INSN_UID (x
), bb
->index
);
1900 if (control_flow_insn_p (x
))
1902 error ("in basic block %d:", bb
->index
);
1903 fatal_insn ("flow control insn inside a basic block", x
);
1913 /* Verify the CFG and RTL consistency common for both underlying RTL and
1916 Currently it does following checks:
1917 - all checks of rtl_verify_flow_info_1
1918 - check that all insns are in the basic blocks
1919 (except the switch handling code, barriers and notes)
1920 - check that all returns are followed by barriers
1921 - check that all fallthru edge points to the adjacent blocks. */
1923 rtl_verify_flow_info ()
1926 int err
= rtl_verify_flow_info_1 ();
1929 const rtx rtx_first
= get_insns ();
1930 basic_block last_bb_seen
= ENTRY_BLOCK_PTR
, curr_bb
= NULL
;
1932 FOR_EACH_BB_REVERSE (bb
)
1935 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
1936 if (e
->flags
& EDGE_FALLTHRU
)
1942 /* Ensure existence of barrier in BB with no fallthru edges. */
1943 for (insn
= bb
->end
; !insn
|| GET_CODE (insn
) != BARRIER
;
1944 insn
= NEXT_INSN (insn
))
1946 || (GET_CODE (insn
) == NOTE
1947 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BASIC_BLOCK
))
1949 error ("missing barrier after block %i", bb
->index
);
1954 else if (e
->src
!= ENTRY_BLOCK_PTR
1955 && e
->dest
!= EXIT_BLOCK_PTR
)
1959 if (e
->src
->next_bb
!= e
->dest
)
1962 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
1963 e
->src
->index
, e
->dest
->index
);
1967 for (insn
= NEXT_INSN (e
->src
->end
); insn
!= e
->dest
->head
;
1968 insn
= NEXT_INSN (insn
))
1969 if (GET_CODE (insn
) == BARRIER
1970 #ifndef CASE_DROPS_THROUGH
1973 || (INSN_P (insn
) && ! JUMP_TABLE_DATA_P (insn
))
1977 error ("verify_flow_info: Incorrect fallthru %i->%i",
1978 e
->src
->index
, e
->dest
->index
);
1979 fatal_insn ("wrong insn in the fallthru edge", insn
);
1986 last_bb_seen
= ENTRY_BLOCK_PTR
;
1988 for (x
= rtx_first
; x
; x
= NEXT_INSN (x
))
1990 if (NOTE_INSN_BASIC_BLOCK_P (x
))
1992 bb
= NOTE_BASIC_BLOCK (x
);
1995 if (bb
!= last_bb_seen
->next_bb
)
1996 internal_error ("basic blocks not laid down consecutively");
1998 curr_bb
= last_bb_seen
= bb
;
2003 switch (GET_CODE (x
))
2010 /* An addr_vec is placed outside any block block. */
2012 && GET_CODE (NEXT_INSN (x
)) == JUMP_INSN
2013 && (GET_CODE (PATTERN (NEXT_INSN (x
))) == ADDR_DIFF_VEC
2014 || GET_CODE (PATTERN (NEXT_INSN (x
))) == ADDR_VEC
))
2017 /* But in any case, non-deletable labels can appear anywhere. */
2021 fatal_insn ("insn outside basic block", x
);
2026 && GET_CODE (x
) == JUMP_INSN
2027 && returnjump_p (x
) && ! condjump_p (x
)
2028 && ! (NEXT_INSN (x
) && GET_CODE (NEXT_INSN (x
)) == BARRIER
))
2029 fatal_insn ("return not followed by barrier", x
);
2030 if (curr_bb
&& x
== curr_bb
->end
)
2034 if (num_bb_notes
!= n_basic_blocks
)
2036 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2037 num_bb_notes
, n_basic_blocks
);
2042 /* Assume that the preceding pass has possibly eliminated jump instructions
2043 or converted the unconditional jumps. Eliminate the edges from CFG.
2044 Return true if any edges are eliminated. */
2047 purge_dead_edges (bb
)
2051 rtx insn
= bb
->end
, note
;
2052 bool purged
= false;
2054 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2055 if (GET_CODE (insn
) == INSN
2056 && (note
= find_reg_note (insn
, REG_EH_REGION
, NULL
)))
2060 if (! may_trap_p (PATTERN (insn
))
2061 || ((eqnote
= find_reg_equal_equiv_note (insn
))
2062 && ! may_trap_p (XEXP (eqnote
, 0))))
2063 remove_note (insn
, note
);
2066 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2067 for (e
= bb
->succ
; e
; e
= next
)
2069 next
= e
->succ_next
;
2070 if (e
->flags
& EDGE_EH
)
2072 if (can_throw_internal (bb
->end
))
2075 else if (e
->flags
& EDGE_ABNORMAL_CALL
)
2077 if (GET_CODE (bb
->end
) == CALL_INSN
2078 && (! (note
= find_reg_note (insn
, REG_EH_REGION
, NULL
))
2079 || INTVAL (XEXP (note
, 0)) >= 0))
2086 bb
->flags
|= BB_DIRTY
;
2090 if (GET_CODE (insn
) == JUMP_INSN
)
2095 /* We do care only about conditional jumps and simplejumps. */
2096 if (!any_condjump_p (insn
)
2097 && !returnjump_p (insn
)
2098 && !simplejump_p (insn
))
2101 /* Branch probability/prediction notes are defined only for
2102 condjumps. We've possibly turned condjump into simplejump. */
2103 if (simplejump_p (insn
))
2105 note
= find_reg_note (insn
, REG_BR_PROB
, NULL
);
2107 remove_note (insn
, note
);
2108 while ((note
= find_reg_note (insn
, REG_BR_PRED
, NULL
)))
2109 remove_note (insn
, note
);
2112 for (e
= bb
->succ
; e
; e
= next
)
2114 next
= e
->succ_next
;
2116 /* Avoid abnormal flags to leak from computed jumps turned
2117 into simplejumps. */
2119 e
->flags
&= ~EDGE_ABNORMAL
;
2121 /* See if this edge is one we should keep. */
2122 if ((e
->flags
& EDGE_FALLTHRU
) && any_condjump_p (insn
))
2123 /* A conditional jump can fall through into the next
2124 block, so we should keep the edge. */
2126 else if (e
->dest
!= EXIT_BLOCK_PTR
2127 && e
->dest
->head
== JUMP_LABEL (insn
))
2128 /* If the destination block is the target of the jump,
2131 else if (e
->dest
== EXIT_BLOCK_PTR
&& returnjump_p (insn
))
2132 /* If the destination block is the exit block, and this
2133 instruction is a return, then keep the edge. */
2135 else if ((e
->flags
& EDGE_EH
) && can_throw_internal (insn
))
2136 /* Keep the edges that correspond to exceptions thrown by
2137 this instruction. */
2140 /* We do not need this edge. */
2141 bb
->flags
|= BB_DIRTY
;
2146 if (!bb
->succ
|| !purged
)
2150 fprintf (rtl_dump_file
, "Purged edges from bb %i\n", bb
->index
);
2155 /* Redistribute probabilities. */
2156 if (!bb
->succ
->succ_next
)
2158 bb
->succ
->probability
= REG_BR_PROB_BASE
;
2159 bb
->succ
->count
= bb
->count
;
2163 note
= find_reg_note (insn
, REG_BR_PROB
, NULL
);
2167 b
= BRANCH_EDGE (bb
);
2168 f
= FALLTHRU_EDGE (bb
);
2169 b
->probability
= INTVAL (XEXP (note
, 0));
2170 f
->probability
= REG_BR_PROB_BASE
- b
->probability
;
2171 b
->count
= bb
->count
* b
->probability
/ REG_BR_PROB_BASE
;
2172 f
->count
= bb
->count
* f
->probability
/ REG_BR_PROB_BASE
;
2177 else if (GET_CODE (insn
) == CALL_INSN
&& SIBLING_CALL_P (insn
))
2179 /* First, there should not be any EH or ABCALL edges resulting
2180 from non-local gotos and the like. If there were, we shouldn't
2181 have created the sibcall in the first place. Second, there
2182 should of course never have been a fallthru edge. */
2183 if (!bb
->succ
|| bb
->succ
->succ_next
)
2185 if (bb
->succ
->flags
!= (EDGE_SIBCALL
| EDGE_ABNORMAL
))
2191 /* If we don't see a jump insn, we don't know exactly why the block would
2192 have been broken at this point. Look for a simple, non-fallthru edge,
2193 as these are only created by conditional branches. If we find such an
2194 edge we know that there used to be a jump here and can then safely
2195 remove all non-fallthru edges. */
2196 for (e
= bb
->succ
; e
&& (e
->flags
& (EDGE_COMPLEX
| EDGE_FALLTHRU
));
2203 for (e
= bb
->succ
; e
; e
= next
)
2205 next
= e
->succ_next
;
2206 if (!(e
->flags
& EDGE_FALLTHRU
))
2208 bb
->flags
|= BB_DIRTY
;
2214 if (!bb
->succ
|| bb
->succ
->succ_next
)
2217 bb
->succ
->probability
= REG_BR_PROB_BASE
;
2218 bb
->succ
->count
= bb
->count
;
2221 fprintf (rtl_dump_file
, "Purged non-fallthru edges from bb %i\n",
2226 /* Search all basic blocks for potentially dead edges and purge them. Return
2227 true if some edge has been eliminated. */
2230 purge_all_dead_edges (update_life_p
)
2239 blocks
= sbitmap_alloc (last_basic_block
);
2240 sbitmap_zero (blocks
);
2245 bool purged_here
= purge_dead_edges (bb
);
2247 purged
|= purged_here
;
2248 if (purged_here
&& update_life_p
)
2249 SET_BIT (blocks
, bb
->index
);
2252 if (update_life_p
&& purged
)
2253 update_life_info (blocks
, UPDATE_LIFE_GLOBAL
,
2254 PROP_DEATH_NOTES
| PROP_SCAN_DEAD_CODE
2255 | PROP_KILL_DEAD_CODE
);
2258 sbitmap_free (blocks
);
2262 /* Same as split_block but update cfg_layout structures. */
2264 cfg_layout_split_block (bb
, insnp
)
2270 edge fallthru
= rtl_split_block (bb
, insn
);
2272 alloc_aux_for_block (fallthru
->dest
, sizeof (struct reorder_block_def
));
2273 RBI (fallthru
->dest
)->footer
= RBI (fallthru
->src
)->footer
;
2274 RBI (fallthru
->src
)->footer
= NULL
;
2279 /* Redirect Edge to DEST. */
2281 cfg_layout_redirect_edge_and_branch (e
, dest
)
2285 basic_block src
= e
->src
;
2286 basic_block old_next_bb
= src
->next_bb
;
2289 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
2290 in the case the basic block appears to be in sequence. Avoid this
2293 src
->next_bb
= NULL
;
2294 if (e
->flags
& EDGE_FALLTHRU
)
2296 /* Redirect any branch edges unified with the fallthru one. */
2297 if (GET_CODE (src
->end
) == JUMP_INSN
2298 && JUMP_LABEL (src
->end
) == e
->dest
->head
)
2300 if (!redirect_jump (src
->end
, block_label (dest
), 0))
2303 /* In case we are redirecting fallthru edge to the branch edge
2304 of conditional jump, remove it. */
2305 if (src
->succ
->succ_next
2306 && !src
->succ
->succ_next
->succ_next
)
2308 edge s
= e
->succ_next
? e
->succ_next
: src
->succ
;
2310 && any_condjump_p (src
->end
)
2311 && onlyjump_p (src
->end
))
2312 delete_insn (src
->end
);
2314 redirect_edge_succ_nodup (e
, dest
);
2319 ret
= rtl_redirect_edge_and_branch (e
, dest
);
2321 /* We don't want simplejumps in the insn stream during cfglayout. */
2322 if (simplejump_p (src
->end
))
2324 delete_insn (src
->end
);
2325 delete_barrier (NEXT_INSN (src
->end
));
2326 src
->succ
->flags
|= EDGE_FALLTHRU
;
2328 src
->next_bb
= old_next_bb
;
2333 /* Simple wrapper as we always can redirect fallthru edges. */
2335 cfg_layout_redirect_edge_and_branch_force (e
, dest
)
2339 if (!cfg_layout_redirect_edge_and_branch (e
, dest
))
2344 /* Same as flow_delete_block but update cfg_layout structures. */
2346 cfg_layout_delete_block (bb
)
2349 rtx insn
, next
, prev
= PREV_INSN (bb
->head
), *to
, remaints
;
2351 if (RBI (bb
)->header
)
2355 NEXT_INSN (prev
) = RBI (bb
)->header
;
2357 set_first_insn (RBI (bb
)->header
);
2358 PREV_INSN (RBI (bb
)->header
) = prev
;
2359 insn
= RBI (bb
)->header
;
2360 while (NEXT_INSN (insn
))
2361 insn
= NEXT_INSN (insn
);
2362 NEXT_INSN (insn
) = next
;
2363 PREV_INSN (next
) = insn
;
2365 next
= NEXT_INSN (bb
->end
);
2366 if (RBI (bb
)->footer
)
2369 NEXT_INSN (insn
) = RBI (bb
)->footer
;
2370 PREV_INSN (RBI (bb
)->footer
) = insn
;
2371 while (NEXT_INSN (insn
))
2372 insn
= NEXT_INSN (insn
);
2373 NEXT_INSN (insn
) = next
;
2375 PREV_INSN (next
) = insn
;
2377 set_last_insn (insn
);
2379 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
2380 to
= &RBI(bb
->next_bb
)->header
;
2382 to
= &cfg_layout_function_footer
;
2383 rtl_delete_block (bb
);
2386 prev
= NEXT_INSN (prev
);
2388 prev
= get_insns ();
2390 next
= PREV_INSN (next
);
2392 next
= get_last_insn ();
2394 if (next
&& NEXT_INSN (next
) != prev
)
2396 remaints
= unlink_insn_chain (prev
, next
);
2398 while (NEXT_INSN (insn
))
2399 insn
= NEXT_INSN (insn
);
2400 NEXT_INSN (insn
) = *to
;
2402 PREV_INSN (*to
) = insn
;
2407 /* Implementation of CFG manipulation for linearized RTL. */
2408 struct cfg_hooks rtl_cfg_hooks
= {
2409 rtl_verify_flow_info
,
2411 rtl_redirect_edge_and_branch
,
2412 rtl_redirect_edge_and_branch_force
,
2418 /* Implementation of CFG manipulation for cfg layout RTL, where
2419 basic block connected via fallthru edges does not have to be adjacent.
2420 This representation will hopefully become the default one in future
2421 version of the compiler. */
2422 struct cfg_hooks cfg_layout_rtl_cfg_hooks
= {
2423 rtl_verify_flow_info_1
, /* verify_flow_info. */
2425 cfg_layout_redirect_edge_and_branch
,
2426 cfg_layout_redirect_edge_and_branch_force
,
2427 cfg_layout_delete_block
,
2428 cfg_layout_split_block
,
2429 NULL
/* split_edge. */