1 /* Control flow graph building code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* find_basic_blocks divides the current function's rtl into basic
23 blocks and constructs the CFG. The blocks are recorded in the
24 basic_block_info array; the CFG exists in the edge structures
25 referenced by the blocks.
27 find_basic_blocks also finds any unreachable loops and deletes them.
29 Available functionality:
32 - Local CFG construction
33 find_sub_basic_blocks */
37 #include "coretypes.h"
41 #include "hard-reg-set.h"
42 #include "basic-block.h"
51 static int count_basic_blocks
PARAMS ((rtx
));
52 static void find_basic_blocks_1
PARAMS ((rtx
));
53 static rtx find_label_refs
PARAMS ((rtx
, rtx
));
54 static void make_edges
PARAMS ((rtx
, basic_block
,
56 static void make_label_edge
PARAMS ((sbitmap
*, basic_block
,
58 static void make_eh_edge
PARAMS ((sbitmap
*, basic_block
, rtx
));
59 static void find_bb_boundaries
PARAMS ((basic_block
));
60 static void compute_outgoing_frequencies
PARAMS ((basic_block
));
61 static bool inside_basic_block_p
PARAMS ((rtx
));
63 /* Return true if insn is something that should be contained inside basic
67 inside_basic_block_p (insn
)
70 switch (GET_CODE (insn
))
73 /* Avoid creating of basic block for jumptables. */
74 return (NEXT_INSN (insn
) == 0
75 || GET_CODE (NEXT_INSN (insn
)) != JUMP_INSN
76 || (GET_CODE (PATTERN (NEXT_INSN (insn
))) != ADDR_VEC
77 && GET_CODE (PATTERN (NEXT_INSN (insn
))) != ADDR_DIFF_VEC
));
80 return (GET_CODE (PATTERN (insn
)) != ADDR_VEC
81 && GET_CODE (PATTERN (insn
)) != ADDR_DIFF_VEC
);
96 /* Return true if INSN may cause control flow transfer, so it should be last in
100 control_flow_insn_p (insn
)
105 switch (GET_CODE (insn
))
112 /* Jump insn always causes control transfer except for tablejumps. */
113 return (GET_CODE (PATTERN (insn
)) != ADDR_VEC
114 && GET_CODE (PATTERN (insn
)) != ADDR_DIFF_VEC
);
117 /* Call insn may return to the nonlocal goto handler. */
118 return ((nonlocal_goto_handler_labels
119 && (0 == (note
= find_reg_note (insn
, REG_EH_REGION
,
121 || INTVAL (XEXP (note
, 0)) >= 0))
123 || can_throw_internal (insn
));
126 return (flag_non_call_exceptions
&& can_throw_internal (insn
));
129 /* It is nonsence to reach barrier when looking for the
130 end of basic block, but before dead code is eliminated
139 /* Count the basic blocks of the function. */
142 count_basic_blocks (f
)
146 bool saw_insn
= false;
149 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
151 /* Code labels and barriers causes current basic block to be
152 terminated at previous real insn. */
153 if ((GET_CODE (insn
) == CODE_LABEL
|| GET_CODE (insn
) == BARRIER
)
155 count
++, saw_insn
= false;
157 /* Start basic block if needed. */
158 if (!saw_insn
&& inside_basic_block_p (insn
))
161 /* Control flow insn causes current basic block to be terminated. */
162 if (saw_insn
&& control_flow_insn_p (insn
))
163 count
++, saw_insn
= false;
169 /* The rest of the compiler works a bit smoother when we don't have to
170 check for the edge case of do-nothing functions with no basic blocks. */
173 emit_insn (gen_rtx_USE (VOIDmode
, const0_rtx
));
180 /* Scan a list of insns for labels referred to other than by jumps.
181 This is used to scan the alternatives of a call placeholder. */
184 find_label_refs (f
, lvl
)
190 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
191 if (INSN_P (insn
) && GET_CODE (insn
) != JUMP_INSN
)
195 /* Make a list of all labels referred to other than by jumps
196 (which just don't have the REG_LABEL notes).
198 Make a special exception for labels followed by an ADDR*VEC,
199 as this would be a part of the tablejump setup code.
201 Make a special exception to registers loaded with label
202 values just before jump insns that use them. */
204 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
205 if (REG_NOTE_KIND (note
) == REG_LABEL
)
207 rtx lab
= XEXP (note
, 0), next
;
209 if ((next
= next_nonnote_insn (lab
)) != NULL
210 && GET_CODE (next
) == JUMP_INSN
211 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
212 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
))
214 else if (GET_CODE (lab
) == NOTE
)
216 else if (GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
217 && find_reg_note (NEXT_INSN (insn
), REG_LABEL
, lab
))
220 lvl
= alloc_EXPR_LIST (0, XEXP (note
, 0), lvl
);
227 /* Create an edge between two basic blocks. FLAGS are auxiliary information
228 about the edge that is accumulated between calls. */
230 /* Create an edge from a basic block to a label. */
233 make_label_edge (edge_cache
, src
, label
, flags
)
239 if (GET_CODE (label
) != CODE_LABEL
)
242 /* If the label was never emitted, this insn is junk, but avoid a
243 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
244 as a result of a syntax error and a diagnostic has already been
247 if (INSN_UID (label
) == 0)
250 cached_make_edge (edge_cache
, src
, BLOCK_FOR_INSN (label
), flags
);
253 /* Create the edges generated by INSN in REGION. */
256 make_eh_edge (edge_cache
, src
, insn
)
261 int is_call
= GET_CODE (insn
) == CALL_INSN
? EDGE_ABNORMAL_CALL
: 0;
264 handlers
= reachable_handlers (insn
);
266 for (i
= handlers
; i
; i
= XEXP (i
, 1))
267 make_label_edge (edge_cache
, src
, XEXP (i
, 0),
268 EDGE_ABNORMAL
| EDGE_EH
| is_call
);
270 free_INSN_LIST_list (&handlers
);
273 /* Identify the edges between basic blocks MIN to MAX.
275 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
276 that are otherwise unreachable may be reachable with a non-local goto.
278 BB_EH_END is an array indexed by basic block number in which we record
279 the list of exception regions active at the end of the basic block. */
282 make_edges (label_value_list
, min
, max
, update_p
)
283 rtx label_value_list
;
284 basic_block min
, max
;
288 sbitmap
*edge_cache
= NULL
;
290 /* Assume no computed jump; revise as we create edges. */
291 current_function_has_computed_jump
= 0;
293 /* Heavy use of computed goto in machine-generated code can lead to
294 nearly fully-connected CFGs. In that case we spend a significant
295 amount of time searching the edge lists for duplicates. */
296 if (forced_labels
|| label_value_list
|| cfun
->max_jumptable_ents
> 100)
298 edge_cache
= sbitmap_vector_alloc (last_basic_block
, last_basic_block
);
299 sbitmap_vector_zero (edge_cache
, last_basic_block
);
302 FOR_BB_BETWEEN (bb
, min
, max
->next_bb
, next_bb
)
306 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
307 if (e
->dest
!= EXIT_BLOCK_PTR
)
308 SET_BIT (edge_cache
[bb
->index
], e
->dest
->index
);
312 /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
313 is always the entry. */
314 if (min
== ENTRY_BLOCK_PTR
->next_bb
)
315 cached_make_edge (edge_cache
, ENTRY_BLOCK_PTR
, min
,
318 FOR_BB_BETWEEN (bb
, min
, max
->next_bb
, next_bb
)
322 int force_fallthru
= 0;
324 if (GET_CODE (bb
->head
) == CODE_LABEL
&& LABEL_ALT_ENTRY_P (bb
->head
))
325 cached_make_edge (NULL
, ENTRY_BLOCK_PTR
, bb
, 0);
327 /* Examine the last instruction of the block, and discover the
328 ways we can leave the block. */
331 code
= GET_CODE (insn
);
334 if (code
== JUMP_INSN
)
338 /* Recognize exception handling placeholders. */
339 if (GET_CODE (PATTERN (insn
)) == RESX
)
340 make_eh_edge (edge_cache
, bb
, insn
);
342 /* Recognize a non-local goto as a branch outside the
344 else if (find_reg_note (insn
, REG_NON_LOCAL_GOTO
, NULL_RTX
))
347 /* ??? Recognize a tablejump and do the right thing. */
348 else if ((tmp
= JUMP_LABEL (insn
)) != NULL_RTX
349 && (tmp
= NEXT_INSN (tmp
)) != NULL_RTX
350 && GET_CODE (tmp
) == JUMP_INSN
351 && (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
352 || GET_CODE (PATTERN (tmp
)) == ADDR_DIFF_VEC
))
357 if (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
)
358 vec
= XVEC (PATTERN (tmp
), 0);
360 vec
= XVEC (PATTERN (tmp
), 1);
362 for (j
= GET_NUM_ELEM (vec
) - 1; j
>= 0; --j
)
363 make_label_edge (edge_cache
, bb
,
364 XEXP (RTVEC_ELT (vec
, j
), 0), 0);
366 /* Some targets (eg, ARM) emit a conditional jump that also
367 contains the out-of-range target. Scan for these and
368 add an edge if necessary. */
369 if ((tmp
= single_set (insn
)) != NULL
370 && SET_DEST (tmp
) == pc_rtx
371 && GET_CODE (SET_SRC (tmp
)) == IF_THEN_ELSE
372 && GET_CODE (XEXP (SET_SRC (tmp
), 2)) == LABEL_REF
)
373 make_label_edge (edge_cache
, bb
,
374 XEXP (XEXP (SET_SRC (tmp
), 2), 0), 0);
376 #ifdef CASE_DROPS_THROUGH
377 /* Silly VAXen. The ADDR_VEC is going to be in the way of
378 us naturally detecting fallthru into the next block. */
383 /* If this is a computed jump, then mark it as reaching
384 everything on the label_value_list and forced_labels list. */
385 else if (computed_jump_p (insn
))
387 current_function_has_computed_jump
= 1;
389 for (x
= label_value_list
; x
; x
= XEXP (x
, 1))
390 make_label_edge (edge_cache
, bb
, XEXP (x
, 0), EDGE_ABNORMAL
);
392 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
393 make_label_edge (edge_cache
, bb
, XEXP (x
, 0), EDGE_ABNORMAL
);
396 /* Returns create an exit out. */
397 else if (returnjump_p (insn
))
398 cached_make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
, 0);
400 /* Otherwise, we have a plain conditional or unconditional jump. */
403 if (! JUMP_LABEL (insn
))
405 make_label_edge (edge_cache
, bb
, JUMP_LABEL (insn
), 0);
409 /* If this is a sibling call insn, then this is in effect a combined call
410 and return, and so we need an edge to the exit block. No need to
411 worry about EH edges, since we wouldn't have created the sibling call
412 in the first place. */
413 if (code
== CALL_INSN
&& SIBLING_CALL_P (insn
))
414 cached_make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
,
415 EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
);
417 /* If this is a CALL_INSN, then mark it as reaching the active EH
418 handler for this CALL_INSN. If we're handling non-call
419 exceptions then any insn can reach any of the active handlers.
420 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
421 else if (code
== CALL_INSN
|| flag_non_call_exceptions
)
423 /* Add any appropriate EH edges. */
424 make_eh_edge (edge_cache
, bb
, insn
);
426 if (code
== CALL_INSN
&& nonlocal_goto_handler_labels
)
428 /* ??? This could be made smarter: in some cases it's possible
429 to tell that certain calls will not do a nonlocal goto.
430 For example, if the nested functions that do the nonlocal
431 gotos do not have their addresses taken, then only calls to
432 those functions or to other nested functions that use them
433 could possibly do nonlocal gotos. */
435 /* We do know that a REG_EH_REGION note with a value less
436 than 0 is guaranteed not to perform a non-local goto. */
437 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
439 if (!note
|| INTVAL (XEXP (note
, 0)) >= 0)
440 for (x
= nonlocal_goto_handler_labels
; x
; x
= XEXP (x
, 1))
441 make_label_edge (edge_cache
, bb
, XEXP (x
, 0),
442 EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
);
446 /* Find out if we can drop through to the next block. */
447 insn
= next_nonnote_insn (insn
);
448 if (!insn
|| (bb
->next_bb
== EXIT_BLOCK_PTR
&& force_fallthru
))
449 cached_make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
, EDGE_FALLTHRU
);
450 else if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
452 rtx tmp
= bb
->next_bb
->head
;
453 if (GET_CODE (tmp
) == NOTE
)
454 tmp
= next_nonnote_insn (tmp
);
455 if (force_fallthru
|| insn
== tmp
)
456 cached_make_edge (edge_cache
, bb
, bb
->next_bb
, EDGE_FALLTHRU
);
461 sbitmap_vector_free (edge_cache
);
464 /* Find all basic blocks of the function whose first insn is F.
466 Collect and return a list of labels whose addresses are taken. This
467 will be used in make_edges for use with computed gotos. */
470 find_basic_blocks_1 (f
)
474 rtx bb_note
= NULL_RTX
;
479 basic_block prev
= ENTRY_BLOCK_PTR
;
481 /* We process the instructions in a slightly different way than we did
482 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
483 closed out the previous block, so that it gets attached at the proper
484 place. Since this form should be equivalent to the previous,
485 count_basic_blocks continues to use the old form as a check. */
487 for (insn
= f
; insn
; insn
= next
)
489 enum rtx_code code
= GET_CODE (insn
);
491 next
= NEXT_INSN (insn
);
493 if ((GET_CODE (insn
) == CODE_LABEL
|| GET_CODE (insn
) == BARRIER
)
496 prev
= create_basic_block_structure (head
, end
, bb_note
, prev
);
497 head
= end
= NULL_RTX
;
501 if (inside_basic_block_p (insn
))
503 if (head
== NULL_RTX
)
508 if (head
&& control_flow_insn_p (insn
))
510 prev
= create_basic_block_structure (head
, end
, bb_note
, prev
);
511 head
= end
= NULL_RTX
;
519 int kind
= NOTE_LINE_NUMBER (insn
);
521 /* Look for basic block notes with which to keep the
522 basic_block_info pointers stable. Unthread the note now;
523 we'll put it back at the right place in create_basic_block.
524 Or not at all if we've already found a note in this block. */
525 if (kind
== NOTE_INSN_BASIC_BLOCK
)
527 if (bb_note
== NULL_RTX
)
530 next
= delete_insn (insn
);
542 if (GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
544 /* Scan each of the alternatives for label refs. */
545 lvl
= find_label_refs (XEXP (PATTERN (insn
), 0), lvl
);
546 lvl
= find_label_refs (XEXP (PATTERN (insn
), 1), lvl
);
547 lvl
= find_label_refs (XEXP (PATTERN (insn
), 2), lvl
);
548 /* Record its tail recursion label, if any. */
549 if (XEXP (PATTERN (insn
), 3) != NULL_RTX
)
550 trll
= alloc_EXPR_LIST (0, XEXP (PATTERN (insn
), 3), trll
);
558 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
562 /* Make a list of all labels referred to other than by jumps.
564 Make a special exception for labels followed by an ADDR*VEC,
565 as this would be a part of the tablejump setup code.
567 Make a special exception to registers loaded with label
568 values just before jump insns that use them. */
570 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
571 if (REG_NOTE_KIND (note
) == REG_LABEL
)
573 rtx lab
= XEXP (note
, 0), next
;
575 if ((next
= next_nonnote_insn (lab
)) != NULL
576 && GET_CODE (next
) == JUMP_INSN
577 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
578 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
))
580 else if (GET_CODE (lab
) == NOTE
)
582 else if (GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
583 && find_reg_note (NEXT_INSN (insn
), REG_LABEL
, lab
))
586 lvl
= alloc_EXPR_LIST (0, XEXP (note
, 0), lvl
);
591 if (head
!= NULL_RTX
)
592 create_basic_block_structure (head
, end
, bb_note
, prev
);
594 delete_insn (bb_note
);
596 if (last_basic_block
!= n_basic_blocks
)
599 label_value_list
= lvl
;
600 tail_recursion_label_list
= trll
;
601 clear_aux_for_blocks ();
605 /* Find basic blocks of the current function.
606 F is the first insn of the function and NREGS the number of register
610 find_basic_blocks (f
, nregs
, file
)
612 int nregs ATTRIBUTE_UNUSED
;
613 FILE *file ATTRIBUTE_UNUSED
;
617 timevar_push (TV_CFG
);
619 /* Flush out existing data. */
620 if (basic_block_info
!= NULL
)
624 /* Clear bb->aux on all extant basic blocks. We'll use this as a
625 tag for reuse during create_basic_block, just in case some pass
626 copies around basic block notes improperly. */
630 VARRAY_FREE (basic_block_info
);
633 n_basic_blocks
= count_basic_blocks (f
);
634 last_basic_block
= 0;
635 ENTRY_BLOCK_PTR
->next_bb
= EXIT_BLOCK_PTR
;
636 EXIT_BLOCK_PTR
->prev_bb
= ENTRY_BLOCK_PTR
;
638 /* Size the basic block table. The actual structures will be allocated
639 by find_basic_blocks_1, since we want to keep the structure pointers
640 stable across calls to find_basic_blocks. */
641 /* ??? This whole issue would be much simpler if we called find_basic_blocks
642 exactly once, and thereafter we don't have a single long chain of
643 instructions at all until close to the end of compilation when we
644 actually lay them out. */
646 VARRAY_BB_INIT (basic_block_info
, n_basic_blocks
, "basic_block_info");
648 find_basic_blocks_1 (f
);
650 /* Discover the edges of our cfg. */
651 make_edges (label_value_list
, ENTRY_BLOCK_PTR
->next_bb
, EXIT_BLOCK_PTR
->prev_bb
, 0);
653 /* Do very simple cleanup now, for the benefit of code that runs between
654 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
655 tidy_fallthru_edges ();
657 #ifdef ENABLE_CHECKING
660 timevar_pop (TV_CFG
);
663 /* State of basic block as seen by find_sub_basic_blocks. */
664 enum state
{BLOCK_NEW
= 0, BLOCK_ORIGINAL
, BLOCK_TO_SPLIT
};
666 #define STATE(BB) (enum state) ((size_t) (BB)->aux)
667 #define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE))
669 /* Scan basic block BB for possible BB boundaries inside the block
670 and create new basic blocks in the progress. */
673 find_bb_boundaries (bb
)
678 rtx flow_transfer_insn
= NULL_RTX
;
679 edge fallthru
= NULL
;
684 if (GET_CODE (insn
) == CODE_LABEL
)
685 insn
= NEXT_INSN (insn
);
687 /* Scan insn chain and try to find new basic block boundaries. */
690 enum rtx_code code
= GET_CODE (insn
);
692 /* On code label, split current basic block. */
693 if (code
== CODE_LABEL
)
695 fallthru
= split_block (bb
, PREV_INSN (insn
));
696 if (flow_transfer_insn
)
697 bb
->end
= flow_transfer_insn
;
700 remove_edge (fallthru
);
701 flow_transfer_insn
= NULL_RTX
;
702 if (LABEL_ALT_ENTRY_P (insn
))
703 make_edge (ENTRY_BLOCK_PTR
, bb
, 0);
706 /* In case we've previously seen an insn that effects a control
707 flow transfer, split the block. */
708 if (flow_transfer_insn
&& inside_basic_block_p (insn
))
710 fallthru
= split_block (bb
, PREV_INSN (insn
));
711 bb
->end
= flow_transfer_insn
;
713 remove_edge (fallthru
);
714 flow_transfer_insn
= NULL_RTX
;
717 if (control_flow_insn_p (insn
))
718 flow_transfer_insn
= insn
;
721 insn
= NEXT_INSN (insn
);
724 /* In case expander replaced normal insn by sequence terminating by
725 return and barrier, or possibly other sequence not behaving like
726 ordinary jump, we need to take care and move basic block boundary. */
727 if (flow_transfer_insn
)
728 bb
->end
= flow_transfer_insn
;
730 /* We've possibly replaced the conditional jump by conditional jump
731 followed by cleanup at fallthru edge, so the outgoing edges may
733 purge_dead_edges (bb
);
736 /* Assume that frequency of basic block B is known. Compute frequencies
737 and probabilities of outgoing edges. */
740 compute_outgoing_frequencies (b
)
745 if (b
->succ
&& b
->succ
->succ_next
&& !b
->succ
->succ_next
->succ_next
)
747 rtx note
= find_reg_note (b
->end
, REG_BR_PROB
, NULL
);
753 probability
= INTVAL (XEXP (find_reg_note (b
->end
,
757 e
->probability
= probability
;
758 e
->count
= ((b
->count
* probability
+ REG_BR_PROB_BASE
/ 2)
760 f
= FALLTHRU_EDGE (b
);
761 f
->probability
= REG_BR_PROB_BASE
- probability
;
762 f
->count
= b
->count
- e
->count
;
765 if (b
->succ
&& !b
->succ
->succ_next
)
768 e
->probability
= REG_BR_PROB_BASE
;
773 /* Assume that someone emitted code with control flow instructions to the
774 basic block. Update the data structure. */
777 find_many_sub_basic_blocks (blocks
)
780 basic_block bb
, min
, max
;
784 TEST_BIT (blocks
, bb
->index
) ? BLOCK_TO_SPLIT
: BLOCK_ORIGINAL
);
787 if (STATE (bb
) == BLOCK_TO_SPLIT
)
788 find_bb_boundaries (bb
);
791 if (STATE (bb
) != BLOCK_ORIGINAL
)
795 for (; bb
!= EXIT_BLOCK_PTR
; bb
= bb
->next_bb
)
796 if (STATE (bb
) != BLOCK_ORIGINAL
)
799 /* Now re-scan and wire in all edges. This expect simple (conditional)
800 jumps at the end of each new basic blocks. */
801 make_edges (NULL
, min
, max
, 1);
803 /* Update branch probabilities. Expect only (un)conditional jumps
804 to be created with only the forward edges. */
805 FOR_BB_BETWEEN (bb
, min
, max
->next_bb
, next_bb
)
809 if (STATE (bb
) == BLOCK_ORIGINAL
)
811 if (STATE (bb
) == BLOCK_NEW
)
815 for (e
= bb
->pred
; e
; e
=e
->pred_next
)
817 bb
->count
+= e
->count
;
818 bb
->frequency
+= EDGE_FREQUENCY (e
);
822 compute_outgoing_frequencies (bb
);
829 /* Like above but for single basic block only. */
832 find_sub_basic_blocks (bb
)
835 basic_block min
, max
, b
;
836 basic_block next
= bb
->next_bb
;
839 find_bb_boundaries (bb
);
842 /* Now re-scan and wire in all edges. This expect simple (conditional)
843 jumps at the end of each new basic blocks. */
844 make_edges (NULL
, min
, max
, 1);
846 /* Update branch probabilities. Expect only (un)conditional jumps
847 to be created with only the forward edges. */
848 FOR_BB_BETWEEN (b
, min
, max
->next_bb
, next_bb
)
856 for (e
= b
->pred
; e
; e
=e
->pred_next
)
858 b
->count
+= e
->count
;
859 b
->frequency
+= EDGE_FREQUENCY (e
);
863 compute_outgoing_frequencies (b
);