1 /* Control flow graph building code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /* find_basic_blocks divides the current function's rtl into basic
23 blocks and constructs the CFG. The blocks are recorded in the
24 basic_block_info array; the CFG exists in the edge structures
25 referenced by the blocks.
27 find_basic_blocks also finds any unreachable loops and deletes them.
29 Available functionality:
35 #include "coretypes.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
49 static int count_basic_blocks (rtx
);
50 static void find_basic_blocks_1 (rtx
);
51 static void make_edges (basic_block
, basic_block
, int);
52 static void make_label_edge (sbitmap
, basic_block
, rtx
, int);
53 static void find_bb_boundaries (basic_block
);
54 static void compute_outgoing_frequencies (basic_block
);
56 /* Return true if insn is something that should be contained inside basic
60 inside_basic_block_p (rtx insn
)
62 switch (GET_CODE (insn
))
65 /* Avoid creating of basic block for jumptables. */
66 return (NEXT_INSN (insn
) == 0
67 || !JUMP_P (NEXT_INSN (insn
))
68 || (GET_CODE (PATTERN (NEXT_INSN (insn
))) != ADDR_VEC
69 && GET_CODE (PATTERN (NEXT_INSN (insn
))) != ADDR_DIFF_VEC
));
72 return (GET_CODE (PATTERN (insn
)) != ADDR_VEC
73 && GET_CODE (PATTERN (insn
)) != ADDR_DIFF_VEC
);
88 /* Return true if INSN may cause control flow transfer, so it should be last in
92 control_flow_insn_p (rtx insn
)
96 switch (GET_CODE (insn
))
103 /* Jump insn always causes control transfer except for tablejumps. */
104 return (GET_CODE (PATTERN (insn
)) != ADDR_VEC
105 && GET_CODE (PATTERN (insn
)) != ADDR_DIFF_VEC
);
108 /* Noreturn and sibling call instructions terminate the basic blocks
109 (but only if they happen unconditionally). */
110 if ((SIBLING_CALL_P (insn
)
111 || find_reg_note (insn
, REG_NORETURN
, 0))
112 && GET_CODE (PATTERN (insn
)) != COND_EXEC
)
114 /* Call insn may return to the nonlocal goto handler. */
115 return ((nonlocal_goto_handler_labels
116 && (0 == (note
= find_reg_note (insn
, REG_EH_REGION
,
118 || INTVAL (XEXP (note
, 0)) >= 0))
120 || can_throw_internal (insn
));
123 return (flag_non_call_exceptions
&& can_throw_internal (insn
));
126 /* It is nonsense to reach barrier when looking for the
127 end of basic block, but before dead code is eliminated
136 /* Count the basic blocks of the function. */
139 count_basic_blocks (rtx f
)
141 int count
= NUM_FIXED_BLOCKS
;
142 bool saw_insn
= false;
145 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
147 /* Code labels and barriers causes current basic block to be
148 terminated at previous real insn. */
149 if ((LABEL_P (insn
) || BARRIER_P (insn
))
151 count
++, saw_insn
= false;
153 /* Start basic block if needed. */
154 if (!saw_insn
&& inside_basic_block_p (insn
))
157 /* Control flow insn causes current basic block to be terminated. */
158 if (saw_insn
&& control_flow_insn_p (insn
))
159 count
++, saw_insn
= false;
165 /* The rest of the compiler works a bit smoother when we don't have to
166 check for the edge case of do-nothing functions with no basic blocks. */
167 if (count
== NUM_FIXED_BLOCKS
)
169 emit_insn (gen_rtx_USE (VOIDmode
, const0_rtx
));
170 count
= NUM_FIXED_BLOCKS
+ 1;
176 /* Create an edge between two basic blocks. FLAGS are auxiliary information
177 about the edge that is accumulated between calls. */
179 /* Create an edge from a basic block to a label. */
182 make_label_edge (sbitmap edge_cache
, basic_block src
, rtx label
, int flags
)
184 gcc_assert (LABEL_P (label
));
186 /* If the label was never emitted, this insn is junk, but avoid a
187 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
188 as a result of a syntax error and a diagnostic has already been
191 if (INSN_UID (label
) == 0)
194 cached_make_edge (edge_cache
, src
, BLOCK_FOR_INSN (label
), flags
);
197 /* Create the edges generated by INSN in REGION. */
200 rtl_make_eh_edge (sbitmap edge_cache
, basic_block src
, rtx insn
)
202 int is_call
= CALL_P (insn
) ? EDGE_ABNORMAL_CALL
: 0;
205 handlers
= reachable_handlers (insn
);
207 for (i
= handlers
; i
; i
= XEXP (i
, 1))
208 make_label_edge (edge_cache
, src
, XEXP (i
, 0),
209 EDGE_ABNORMAL
| EDGE_EH
| is_call
);
211 free_INSN_LIST_list (&handlers
);
214 /* States of basic block as seen by find_many_sub_basic_blocks. */
216 /* Basic blocks created via split_block belong to this state.
217 make_edges will examine these basic blocks to see if we need to
218 create edges going out of them. */
221 /* Basic blocks that do not need examining belong to this state.
222 These blocks will be left intact. In particular, make_edges will
223 not create edges going out of these basic blocks. */
226 /* Basic blocks that may need splitting (due to a label appearing in
227 the middle, etc) belong to this state. After splitting them,
228 make_edges will create edges going out of them as needed. */
232 #define STATE(BB) (enum state) ((size_t) (BB)->aux)
233 #define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE))
235 /* Used internally by purge_dead_tablejump_edges, ORed into state. */
236 #define BLOCK_USED_BY_TABLEJUMP 32
237 #define FULL_STATE(BB) ((size_t) (BB)->aux)
239 /* Identify the edges going out of basic blocks between MIN and MAX,
240 inclusive, that have their states set to BLOCK_NEW or
243 UPDATE_P should be nonzero if we are updating CFG and zero if we
244 are building CFG from scratch. */
247 make_edges (basic_block min
, basic_block max
, int update_p
)
250 sbitmap edge_cache
= NULL
;
252 /* Heavy use of computed goto in machine-generated code can lead to
253 nearly fully-connected CFGs. In that case we spend a significant
254 amount of time searching the edge lists for duplicates. */
255 if (forced_labels
|| cfun
->max_jumptable_ents
> 100)
256 edge_cache
= sbitmap_alloc (last_basic_block
);
258 /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
259 is always the entry. */
260 if (min
== ENTRY_BLOCK_PTR
->next_bb
)
261 make_edge (ENTRY_BLOCK_PTR
, min
, EDGE_FALLTHRU
);
263 FOR_BB_BETWEEN (bb
, min
, max
->next_bb
, next_bb
)
270 if (STATE (bb
) == BLOCK_ORIGINAL
)
273 /* If we have an edge cache, cache edges going out of BB. */
276 sbitmap_zero (edge_cache
);
279 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
280 if (e
->dest
!= EXIT_BLOCK_PTR
)
281 SET_BIT (edge_cache
, e
->dest
->index
);
285 if (LABEL_P (BB_HEAD (bb
))
286 && LABEL_ALT_ENTRY_P (BB_HEAD (bb
)))
287 cached_make_edge (NULL
, ENTRY_BLOCK_PTR
, bb
, 0);
289 /* Examine the last instruction of the block, and discover the
290 ways we can leave the block. */
293 code
= GET_CODE (insn
);
296 if (code
== JUMP_INSN
)
300 /* Recognize exception handling placeholders. */
301 if (GET_CODE (PATTERN (insn
)) == RESX
)
302 rtl_make_eh_edge (edge_cache
, bb
, insn
);
304 /* Recognize a non-local goto as a branch outside the
306 else if (find_reg_note (insn
, REG_NON_LOCAL_GOTO
, NULL_RTX
))
309 /* Recognize a tablejump and do the right thing. */
310 else if (tablejump_p (insn
, NULL
, &tmp
))
315 if (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
)
316 vec
= XVEC (PATTERN (tmp
), 0);
318 vec
= XVEC (PATTERN (tmp
), 1);
320 for (j
= GET_NUM_ELEM (vec
) - 1; j
>= 0; --j
)
321 make_label_edge (edge_cache
, bb
,
322 XEXP (RTVEC_ELT (vec
, j
), 0), 0);
324 /* Some targets (eg, ARM) emit a conditional jump that also
325 contains the out-of-range target. Scan for these and
326 add an edge if necessary. */
327 if ((tmp
= single_set (insn
)) != NULL
328 && SET_DEST (tmp
) == pc_rtx
329 && GET_CODE (SET_SRC (tmp
)) == IF_THEN_ELSE
330 && GET_CODE (XEXP (SET_SRC (tmp
), 2)) == LABEL_REF
)
331 make_label_edge (edge_cache
, bb
,
332 XEXP (XEXP (SET_SRC (tmp
), 2), 0), 0);
335 /* If this is a computed jump, then mark it as reaching
336 everything on the forced_labels list. */
337 else if (computed_jump_p (insn
))
339 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
340 make_label_edge (edge_cache
, bb
, XEXP (x
, 0), EDGE_ABNORMAL
);
343 /* Returns create an exit out. */
344 else if (returnjump_p (insn
))
345 cached_make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
, 0);
347 /* Otherwise, we have a plain conditional or unconditional jump. */
350 gcc_assert (JUMP_LABEL (insn
));
351 make_label_edge (edge_cache
, bb
, JUMP_LABEL (insn
), 0);
355 /* If this is a sibling call insn, then this is in effect a combined call
356 and return, and so we need an edge to the exit block. No need to
357 worry about EH edges, since we wouldn't have created the sibling call
358 in the first place. */
359 if (code
== CALL_INSN
&& SIBLING_CALL_P (insn
))
360 cached_make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
,
361 EDGE_SIBCALL
| EDGE_ABNORMAL
);
363 /* If this is a CALL_INSN, then mark it as reaching the active EH
364 handler for this CALL_INSN. If we're handling non-call
365 exceptions then any insn can reach any of the active handlers.
366 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
367 else if (code
== CALL_INSN
|| flag_non_call_exceptions
)
369 /* Add any appropriate EH edges. */
370 rtl_make_eh_edge (edge_cache
, bb
, insn
);
372 if (code
== CALL_INSN
&& nonlocal_goto_handler_labels
)
374 /* ??? This could be made smarter: in some cases it's possible
375 to tell that certain calls will not do a nonlocal goto.
376 For example, if the nested functions that do the nonlocal
377 gotos do not have their addresses taken, then only calls to
378 those functions or to other nested functions that use them
379 could possibly do nonlocal gotos. */
381 /* We do know that a REG_EH_REGION note with a value less
382 than 0 is guaranteed not to perform a non-local goto. */
383 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
385 if (!note
|| INTVAL (XEXP (note
, 0)) >= 0)
386 for (x
= nonlocal_goto_handler_labels
; x
; x
= XEXP (x
, 1))
387 make_label_edge (edge_cache
, bb
, XEXP (x
, 0),
388 EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
);
392 /* Find out if we can drop through to the next block. */
393 insn
= NEXT_INSN (insn
);
394 e
= find_edge (bb
, EXIT_BLOCK_PTR
);
395 if (e
&& e
->flags
& EDGE_FALLTHRU
)
400 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BASIC_BLOCK
)
401 insn
= NEXT_INSN (insn
);
404 cached_make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
, EDGE_FALLTHRU
);
405 else if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
407 if (insn
== BB_HEAD (bb
->next_bb
))
408 cached_make_edge (edge_cache
, bb
, bb
->next_bb
, EDGE_FALLTHRU
);
413 sbitmap_vector_free (edge_cache
);
416 /* Find all basic blocks of the function whose first insn is F.
418 Collect and return a list of labels whose addresses are taken. This
419 will be used in make_edges for use with computed gotos. */
422 find_basic_blocks_1 (rtx f
)
425 rtx bb_note
= NULL_RTX
;
428 basic_block prev
= ENTRY_BLOCK_PTR
;
430 /* We process the instructions in a slightly different way than we did
431 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
432 closed out the previous block, so that it gets attached at the proper
433 place. Since this form should be equivalent to the previous,
434 count_basic_blocks continues to use the old form as a check. */
436 for (insn
= f
; insn
; insn
= next
)
438 enum rtx_code code
= GET_CODE (insn
);
440 next
= NEXT_INSN (insn
);
442 if ((LABEL_P (insn
) || BARRIER_P (insn
))
445 prev
= create_basic_block_structure (head
, end
, bb_note
, prev
);
446 head
= end
= NULL_RTX
;
450 if (inside_basic_block_p (insn
))
452 if (head
== NULL_RTX
)
457 if (head
&& control_flow_insn_p (insn
))
459 prev
= create_basic_block_structure (head
, end
, bb_note
, prev
);
460 head
= end
= NULL_RTX
;
468 int kind
= NOTE_LINE_NUMBER (insn
);
470 /* Look for basic block notes with which to keep the
471 basic_block_info pointers stable. Unthread the note now;
472 we'll put it back at the right place in create_basic_block.
473 Or not at all if we've already found a note in this block. */
474 if (kind
== NOTE_INSN_BASIC_BLOCK
)
476 if (bb_note
== NULL_RTX
)
479 next
= delete_insn (insn
);
496 if (head
!= NULL_RTX
)
497 create_basic_block_structure (head
, end
, bb_note
, prev
);
499 delete_insn (bb_note
);
501 gcc_assert (last_basic_block
== n_basic_blocks
);
503 clear_aux_for_blocks ();
507 /* Find basic blocks of the current function.
508 F is the first insn of the function. */
511 find_basic_blocks (rtx f
)
515 timevar_push (TV_CFG
);
517 /* Flush out existing data. */
518 if (basic_block_info
!= NULL
)
522 /* Clear bb->aux on all extant basic blocks. We'll use this as a
523 tag for reuse during create_basic_block, just in case some pass
524 copies around basic block notes improperly. */
528 basic_block_info
= NULL
;
531 n_basic_blocks
= count_basic_blocks (f
);
532 last_basic_block
= NUM_FIXED_BLOCKS
;
533 ENTRY_BLOCK_PTR
->next_bb
= EXIT_BLOCK_PTR
;
534 EXIT_BLOCK_PTR
->prev_bb
= ENTRY_BLOCK_PTR
;
537 /* Size the basic block table. The actual structures will be allocated
538 by find_basic_blocks_1, since we want to keep the structure pointers
539 stable across calls to find_basic_blocks. */
540 /* ??? This whole issue would be much simpler if we called find_basic_blocks
541 exactly once, and thereafter we don't have a single long chain of
542 instructions at all until close to the end of compilation when we
543 actually lay them out. */
545 basic_block_info
= VEC_alloc (basic_block
, gc
, n_basic_blocks
);
546 VEC_safe_grow (basic_block
, gc
, basic_block_info
, n_basic_blocks
);
547 memset (VEC_address (basic_block
, basic_block_info
), 0,
548 sizeof (basic_block
) * n_basic_blocks
);
549 SET_BASIC_BLOCK (ENTRY_BLOCK
, ENTRY_BLOCK_PTR
);
550 SET_BASIC_BLOCK (EXIT_BLOCK
, EXIT_BLOCK_PTR
);
552 find_basic_blocks_1 (f
);
554 profile_status
= PROFILE_ABSENT
;
556 /* Tell make_edges to examine every block for out-going edges. */
558 SET_STATE (bb
, BLOCK_NEW
);
560 /* Discover the edges of our cfg. */
561 make_edges (ENTRY_BLOCK_PTR
->next_bb
, EXIT_BLOCK_PTR
->prev_bb
, 0);
563 /* Do very simple cleanup now, for the benefit of code that runs between
564 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
565 tidy_fallthru_edges ();
567 #ifdef ENABLE_CHECKING
570 timevar_pop (TV_CFG
);
574 mark_tablejump_edge (rtx label
)
578 gcc_assert (LABEL_P (label
));
579 /* See comment in make_label_edge. */
580 if (INSN_UID (label
) == 0)
582 bb
= BLOCK_FOR_INSN (label
);
583 SET_STATE (bb
, FULL_STATE (bb
) | BLOCK_USED_BY_TABLEJUMP
);
587 purge_dead_tablejump_edges (basic_block bb
, rtx table
)
589 rtx insn
= BB_END (bb
), tmp
;
595 if (GET_CODE (PATTERN (table
)) == ADDR_VEC
)
596 vec
= XVEC (PATTERN (table
), 0);
598 vec
= XVEC (PATTERN (table
), 1);
600 for (j
= GET_NUM_ELEM (vec
) - 1; j
>= 0; --j
)
601 mark_tablejump_edge (XEXP (RTVEC_ELT (vec
, j
), 0));
603 /* Some targets (eg, ARM) emit a conditional jump that also
604 contains the out-of-range target. Scan for these and
605 add an edge if necessary. */
606 if ((tmp
= single_set (insn
)) != NULL
607 && SET_DEST (tmp
) == pc_rtx
608 && GET_CODE (SET_SRC (tmp
)) == IF_THEN_ELSE
609 && GET_CODE (XEXP (SET_SRC (tmp
), 2)) == LABEL_REF
)
610 mark_tablejump_edge (XEXP (XEXP (SET_SRC (tmp
), 2), 0));
612 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
614 if (FULL_STATE (e
->dest
) & BLOCK_USED_BY_TABLEJUMP
)
615 SET_STATE (e
->dest
, FULL_STATE (e
->dest
)
616 & ~(size_t) BLOCK_USED_BY_TABLEJUMP
);
617 else if (!(e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)))
626 /* Scan basic block BB for possible BB boundaries inside the block
627 and create new basic blocks in the progress. */
630 find_bb_boundaries (basic_block bb
)
632 basic_block orig_bb
= bb
;
633 rtx insn
= BB_HEAD (bb
);
634 rtx end
= BB_END (bb
);
636 rtx flow_transfer_insn
= NULL_RTX
;
637 edge fallthru
= NULL
;
639 if (insn
== BB_END (bb
))
643 insn
= NEXT_INSN (insn
);
645 /* Scan insn chain and try to find new basic block boundaries. */
648 enum rtx_code code
= GET_CODE (insn
);
650 /* On code label, split current basic block. */
651 if (code
== CODE_LABEL
)
653 fallthru
= split_block (bb
, PREV_INSN (insn
));
654 if (flow_transfer_insn
)
655 BB_END (bb
) = flow_transfer_insn
;
658 remove_edge (fallthru
);
659 flow_transfer_insn
= NULL_RTX
;
660 if (LABEL_ALT_ENTRY_P (insn
))
661 make_edge (ENTRY_BLOCK_PTR
, bb
, 0);
664 /* In case we've previously seen an insn that effects a control
665 flow transfer, split the block. */
666 if (flow_transfer_insn
&& inside_basic_block_p (insn
))
668 fallthru
= split_block (bb
, PREV_INSN (insn
));
669 BB_END (bb
) = flow_transfer_insn
;
671 remove_edge (fallthru
);
672 flow_transfer_insn
= NULL_RTX
;
675 if (control_flow_insn_p (insn
))
676 flow_transfer_insn
= insn
;
679 insn
= NEXT_INSN (insn
);
682 /* In case expander replaced normal insn by sequence terminating by
683 return and barrier, or possibly other sequence not behaving like
684 ordinary jump, we need to take care and move basic block boundary. */
685 if (flow_transfer_insn
)
686 BB_END (bb
) = flow_transfer_insn
;
688 /* We've possibly replaced the conditional jump by conditional jump
689 followed by cleanup at fallthru edge, so the outgoing edges may
691 purge_dead_edges (bb
);
693 /* purge_dead_edges doesn't handle tablejump's, but if we have split the
694 basic block, we might need to kill some edges. */
695 if (bb
!= orig_bb
&& tablejump_p (BB_END (bb
), NULL
, &table
))
696 purge_dead_tablejump_edges (bb
, table
);
699 /* Assume that frequency of basic block B is known. Compute frequencies
700 and probabilities of outgoing edges. */
703 compute_outgoing_frequencies (basic_block b
)
708 if (EDGE_COUNT (b
->succs
) == 2)
710 rtx note
= find_reg_note (BB_END (b
), REG_BR_PROB
, NULL
);
715 probability
= INTVAL (XEXP (note
, 0));
717 e
->probability
= probability
;
718 e
->count
= ((b
->count
* probability
+ REG_BR_PROB_BASE
/ 2)
720 f
= FALLTHRU_EDGE (b
);
721 f
->probability
= REG_BR_PROB_BASE
- probability
;
722 f
->count
= b
->count
- e
->count
;
727 if (single_succ_p (b
))
729 e
= single_succ_edge (b
);
730 e
->probability
= REG_BR_PROB_BASE
;
734 guess_outgoing_edge_probabilities (b
);
736 FOR_EACH_EDGE (e
, ei
, b
->succs
)
737 e
->count
= ((b
->count
* e
->probability
+ REG_BR_PROB_BASE
/ 2)
741 /* Assume that some pass has inserted labels or control flow
742 instructions within a basic block. Split basic blocks as needed
746 find_many_sub_basic_blocks (sbitmap blocks
)
748 basic_block bb
, min
, max
;
752 TEST_BIT (blocks
, bb
->index
) ? BLOCK_TO_SPLIT
: BLOCK_ORIGINAL
);
755 if (STATE (bb
) == BLOCK_TO_SPLIT
)
756 find_bb_boundaries (bb
);
759 if (STATE (bb
) != BLOCK_ORIGINAL
)
763 for (; bb
!= EXIT_BLOCK_PTR
; bb
= bb
->next_bb
)
764 if (STATE (bb
) != BLOCK_ORIGINAL
)
767 /* Now re-scan and wire in all edges. This expect simple (conditional)
768 jumps at the end of each new basic blocks. */
769 make_edges (min
, max
, 1);
771 /* Update branch probabilities. Expect only (un)conditional jumps
772 to be created with only the forward edges. */
773 if (profile_status
!= PROFILE_ABSENT
)
774 FOR_BB_BETWEEN (bb
, min
, max
->next_bb
, next_bb
)
779 if (STATE (bb
) == BLOCK_ORIGINAL
)
781 if (STATE (bb
) == BLOCK_NEW
)
785 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
787 bb
->count
+= e
->count
;
788 bb
->frequency
+= EDGE_FREQUENCY (e
);
792 compute_outgoing_frequencies (bb
);