1 /* Control flow graph building code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* find_basic_blocks divides the current function's rtl into basic
23 blocks and constructs the CFG. The blocks are recorded in the
24 basic_block_info array; the CFG exists in the edge structures
25 referenced by the blocks.
27 find_basic_blocks also finds any unreachable loops and deletes them.
29 Available functionality:
32 - Local CFG construction
33 find_sub_basic_blocks */
37 #include "coretypes.h"
41 #include "hard-reg-set.h"
42 #include "basic-block.h"
51 static int count_basic_blocks (rtx
);
52 static void find_basic_blocks_1 (rtx
);
53 static rtx
find_label_refs (rtx
, rtx
);
54 static void make_edges (rtx
, basic_block
, basic_block
, int);
55 static void make_label_edge (sbitmap
*, basic_block
, rtx
, int);
56 static void find_bb_boundaries (basic_block
);
57 static void compute_outgoing_frequencies (basic_block
);
59 /* Return true if insn is something that should be contained inside basic
63 inside_basic_block_p (rtx insn
)
65 switch (GET_CODE (insn
))
68 /* Avoid creating of basic block for jumptables. */
69 return (NEXT_INSN (insn
) == 0
70 || GET_CODE (NEXT_INSN (insn
)) != JUMP_INSN
71 || (GET_CODE (PATTERN (NEXT_INSN (insn
))) != ADDR_VEC
72 && GET_CODE (PATTERN (NEXT_INSN (insn
))) != ADDR_DIFF_VEC
));
75 return (GET_CODE (PATTERN (insn
)) != ADDR_VEC
76 && GET_CODE (PATTERN (insn
)) != ADDR_DIFF_VEC
);
91 /* Return true if INSN may cause control flow transfer, so it should be last in
95 control_flow_insn_p (rtx insn
)
99 switch (GET_CODE (insn
))
106 /* Jump insn always causes control transfer except for tablejumps. */
107 return (GET_CODE (PATTERN (insn
)) != ADDR_VEC
108 && GET_CODE (PATTERN (insn
)) != ADDR_DIFF_VEC
);
111 /* Call insn may return to the nonlocal goto handler. */
112 return ((nonlocal_goto_handler_labels
113 && (0 == (note
= find_reg_note (insn
, REG_EH_REGION
,
115 || INTVAL (XEXP (note
, 0)) >= 0))
117 || can_throw_internal (insn
));
120 return (flag_non_call_exceptions
&& can_throw_internal (insn
));
123 /* It is nonsense to reach barrier when looking for the
124 end of basic block, but before dead code is eliminated
133 /* Count the basic blocks of the function. */
136 count_basic_blocks (rtx f
)
139 bool saw_insn
= false;
142 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
144 /* Code labels and barriers causes current basic block to be
145 terminated at previous real insn. */
146 if ((GET_CODE (insn
) == CODE_LABEL
|| GET_CODE (insn
) == BARRIER
)
148 count
++, saw_insn
= false;
150 /* Start basic block if needed. */
151 if (!saw_insn
&& inside_basic_block_p (insn
))
154 /* Control flow insn causes current basic block to be terminated. */
155 if (saw_insn
&& control_flow_insn_p (insn
))
156 count
++, saw_insn
= false;
162 /* The rest of the compiler works a bit smoother when we don't have to
163 check for the edge case of do-nothing functions with no basic blocks. */
166 emit_insn (gen_rtx_USE (VOIDmode
, const0_rtx
));
173 /* Scan a list of insns for labels referred to other than by jumps.
174 This is used to scan the alternatives of a call placeholder. */
177 find_label_refs (rtx f
, rtx lvl
)
181 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
182 if (INSN_P (insn
) && GET_CODE (insn
) != JUMP_INSN
)
186 /* Make a list of all labels referred to other than by jumps
187 (which just don't have the REG_LABEL notes).
189 Make a special exception for labels followed by an ADDR*VEC,
190 as this would be a part of the tablejump setup code.
192 Make a special exception to registers loaded with label
193 values just before jump insns that use them. */
195 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
196 if (REG_NOTE_KIND (note
) == REG_LABEL
)
198 rtx lab
= XEXP (note
, 0), next
;
200 if ((next
= next_nonnote_insn (lab
)) != NULL
201 && GET_CODE (next
) == JUMP_INSN
202 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
203 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
))
205 else if (GET_CODE (lab
) == NOTE
)
207 else if (GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
208 && find_reg_note (NEXT_INSN (insn
), REG_LABEL
, lab
))
211 lvl
= alloc_EXPR_LIST (0, XEXP (note
, 0), lvl
);
218 /* Create an edge between two basic blocks. FLAGS are auxiliary information
219 about the edge that is accumulated between calls. */
221 /* Create an edge from a basic block to a label. */
224 make_label_edge (sbitmap
*edge_cache
, basic_block src
, rtx label
, int flags
)
226 if (GET_CODE (label
) != CODE_LABEL
)
229 /* If the label was never emitted, this insn is junk, but avoid a
230 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
231 as a result of a syntax error and a diagnostic has already been
234 if (INSN_UID (label
) == 0)
237 cached_make_edge (edge_cache
, src
, BLOCK_FOR_INSN (label
), flags
);
240 /* Create the edges generated by INSN in REGION. */
243 make_eh_edge (sbitmap
*edge_cache
, basic_block src
, rtx insn
)
245 int is_call
= GET_CODE (insn
) == CALL_INSN
? EDGE_ABNORMAL_CALL
: 0;
248 handlers
= reachable_handlers (insn
);
250 for (i
= handlers
; i
; i
= XEXP (i
, 1))
251 make_label_edge (edge_cache
, src
, XEXP (i
, 0),
252 EDGE_ABNORMAL
| EDGE_EH
| is_call
);
254 free_INSN_LIST_list (&handlers
);
257 /* Identify the edges between basic blocks MIN to MAX.
259 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
260 that are otherwise unreachable may be reachable with a non-local goto.
262 BB_EH_END is an array indexed by basic block number in which we record
263 the list of exception regions active at the end of the basic block. */
266 make_edges (rtx label_value_list
, basic_block min
, basic_block max
, int update_p
)
269 sbitmap
*edge_cache
= NULL
;
271 /* Assume no computed jump; revise as we create edges. */
272 current_function_has_computed_jump
= 0;
274 /* If we are partitioning hot and cold basic blocks into separate
275 sections, we cannot assume there is no computed jump. */
277 if (flag_reorder_blocks_and_partition
)
278 current_function_has_computed_jump
= 1;
280 /* Heavy use of computed goto in machine-generated code can lead to
281 nearly fully-connected CFGs. In that case we spend a significant
282 amount of time searching the edge lists for duplicates. */
283 if (forced_labels
|| label_value_list
|| cfun
->max_jumptable_ents
> 100)
285 edge_cache
= sbitmap_vector_alloc (last_basic_block
, last_basic_block
);
286 sbitmap_vector_zero (edge_cache
, last_basic_block
);
289 FOR_BB_BETWEEN (bb
, min
, max
->next_bb
, next_bb
)
293 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
294 if (e
->dest
!= EXIT_BLOCK_PTR
)
295 SET_BIT (edge_cache
[bb
->index
], e
->dest
->index
);
299 /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
300 is always the entry. */
301 if (min
== ENTRY_BLOCK_PTR
->next_bb
)
302 cached_make_edge (edge_cache
, ENTRY_BLOCK_PTR
, min
,
305 FOR_BB_BETWEEN (bb
, min
, max
->next_bb
, next_bb
)
309 int force_fallthru
= 0;
311 if (GET_CODE (BB_HEAD (bb
)) == CODE_LABEL
312 && LABEL_ALT_ENTRY_P (BB_HEAD (bb
)))
313 cached_make_edge (NULL
, ENTRY_BLOCK_PTR
, bb
, 0);
315 /* Examine the last instruction of the block, and discover the
316 ways we can leave the block. */
319 code
= GET_CODE (insn
);
322 if (code
== JUMP_INSN
)
326 /* Recognize exception handling placeholders. */
327 if (GET_CODE (PATTERN (insn
)) == RESX
)
328 make_eh_edge (edge_cache
, bb
, insn
);
330 /* Recognize a non-local goto as a branch outside the
332 else if (find_reg_note (insn
, REG_NON_LOCAL_GOTO
, NULL_RTX
))
335 /* Recognize a tablejump and do the right thing. */
336 else if (tablejump_p (insn
, NULL
, &tmp
))
341 if (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
)
342 vec
= XVEC (PATTERN (tmp
), 0);
344 vec
= XVEC (PATTERN (tmp
), 1);
346 for (j
= GET_NUM_ELEM (vec
) - 1; j
>= 0; --j
)
347 make_label_edge (edge_cache
, bb
,
348 XEXP (RTVEC_ELT (vec
, j
), 0), 0);
350 /* Some targets (eg, ARM) emit a conditional jump that also
351 contains the out-of-range target. Scan for these and
352 add an edge if necessary. */
353 if ((tmp
= single_set (insn
)) != NULL
354 && SET_DEST (tmp
) == pc_rtx
355 && GET_CODE (SET_SRC (tmp
)) == IF_THEN_ELSE
356 && GET_CODE (XEXP (SET_SRC (tmp
), 2)) == LABEL_REF
)
357 make_label_edge (edge_cache
, bb
,
358 XEXP (XEXP (SET_SRC (tmp
), 2), 0), 0);
360 #ifdef CASE_DROPS_THROUGH
361 /* Silly VAXen. The ADDR_VEC is going to be in the way of
362 us naturally detecting fallthru into the next block. */
367 /* If this is a computed jump, then mark it as reaching
368 everything on the label_value_list and forced_labels list. */
369 else if (computed_jump_p (insn
))
371 current_function_has_computed_jump
= 1;
373 for (x
= label_value_list
; x
; x
= XEXP (x
, 1))
374 make_label_edge (edge_cache
, bb
, XEXP (x
, 0), EDGE_ABNORMAL
);
376 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
377 make_label_edge (edge_cache
, bb
, XEXP (x
, 0), EDGE_ABNORMAL
);
380 /* Returns create an exit out. */
381 else if (returnjump_p (insn
))
382 cached_make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
, 0);
384 /* Otherwise, we have a plain conditional or unconditional jump. */
387 if (! JUMP_LABEL (insn
))
389 make_label_edge (edge_cache
, bb
, JUMP_LABEL (insn
), 0);
393 /* If this is a sibling call insn, then this is in effect a combined call
394 and return, and so we need an edge to the exit block. No need to
395 worry about EH edges, since we wouldn't have created the sibling call
396 in the first place. */
397 if (code
== CALL_INSN
&& SIBLING_CALL_P (insn
))
398 cached_make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
,
399 EDGE_SIBCALL
| EDGE_ABNORMAL
);
401 /* If this is a CALL_INSN, then mark it as reaching the active EH
402 handler for this CALL_INSN. If we're handling non-call
403 exceptions then any insn can reach any of the active handlers.
404 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
405 else if (code
== CALL_INSN
|| flag_non_call_exceptions
)
407 /* Add any appropriate EH edges. */
408 make_eh_edge (edge_cache
, bb
, insn
);
410 if (code
== CALL_INSN
&& nonlocal_goto_handler_labels
)
412 /* ??? This could be made smarter: in some cases it's possible
413 to tell that certain calls will not do a nonlocal goto.
414 For example, if the nested functions that do the nonlocal
415 gotos do not have their addresses taken, then only calls to
416 those functions or to other nested functions that use them
417 could possibly do nonlocal gotos. */
419 /* We do know that a REG_EH_REGION note with a value less
420 than 0 is guaranteed not to perform a non-local goto. */
421 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
423 if (!note
|| INTVAL (XEXP (note
, 0)) >= 0)
424 for (x
= nonlocal_goto_handler_labels
; x
; x
= XEXP (x
, 1))
425 make_label_edge (edge_cache
, bb
, XEXP (x
, 0),
426 EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
);
430 /* Find out if we can drop through to the next block. */
431 insn
= NEXT_INSN (insn
);
433 && GET_CODE (insn
) == NOTE
434 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BASIC_BLOCK
)
435 insn
= NEXT_INSN (insn
);
437 if (!insn
|| (bb
->next_bb
== EXIT_BLOCK_PTR
&& force_fallthru
))
438 cached_make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
, EDGE_FALLTHRU
);
439 else if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
441 if (force_fallthru
|| insn
== BB_HEAD (bb
->next_bb
))
442 cached_make_edge (edge_cache
, bb
, bb
->next_bb
, EDGE_FALLTHRU
);
447 sbitmap_vector_free (edge_cache
);
450 /* Find all basic blocks of the function whose first insn is F.
452 Collect and return a list of labels whose addresses are taken. This
453 will be used in make_edges for use with computed gotos. */
456 find_basic_blocks_1 (rtx f
)
459 rtx bb_note
= NULL_RTX
;
464 basic_block prev
= ENTRY_BLOCK_PTR
;
466 /* We process the instructions in a slightly different way than we did
467 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
468 closed out the previous block, so that it gets attached at the proper
469 place. Since this form should be equivalent to the previous,
470 count_basic_blocks continues to use the old form as a check. */
472 for (insn
= f
; insn
; insn
= next
)
474 enum rtx_code code
= GET_CODE (insn
);
476 next
= NEXT_INSN (insn
);
478 if ((GET_CODE (insn
) == CODE_LABEL
|| GET_CODE (insn
) == BARRIER
)
481 prev
= create_basic_block_structure (head
, end
, bb_note
, prev
);
482 head
= end
= NULL_RTX
;
486 if (inside_basic_block_p (insn
))
488 if (head
== NULL_RTX
)
493 if (head
&& control_flow_insn_p (insn
))
495 prev
= create_basic_block_structure (head
, end
, bb_note
, prev
);
496 head
= end
= NULL_RTX
;
504 int kind
= NOTE_LINE_NUMBER (insn
);
506 /* Look for basic block notes with which to keep the
507 basic_block_info pointers stable. Unthread the note now;
508 we'll put it back at the right place in create_basic_block.
509 Or not at all if we've already found a note in this block. */
510 if (kind
== NOTE_INSN_BASIC_BLOCK
)
512 if (bb_note
== NULL_RTX
)
515 next
= delete_insn (insn
);
527 if (GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
529 /* Scan each of the alternatives for label refs. */
530 lvl
= find_label_refs (XEXP (PATTERN (insn
), 0), lvl
);
531 lvl
= find_label_refs (XEXP (PATTERN (insn
), 1), lvl
);
532 lvl
= find_label_refs (XEXP (PATTERN (insn
), 2), lvl
);
533 /* Record its tail recursion label, if any. */
534 if (XEXP (PATTERN (insn
), 3) != NULL_RTX
)
535 trll
= alloc_EXPR_LIST (0, XEXP (PATTERN (insn
), 3), trll
);
543 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
547 /* Make a list of all labels referred to other than by jumps.
549 Make a special exception for labels followed by an ADDR*VEC,
550 as this would be a part of the tablejump setup code.
552 Make a special exception to registers loaded with label
553 values just before jump insns that use them. */
555 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
556 if (REG_NOTE_KIND (note
) == REG_LABEL
)
558 rtx lab
= XEXP (note
, 0), next
;
560 if ((next
= next_nonnote_insn (lab
)) != NULL
561 && GET_CODE (next
) == JUMP_INSN
562 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
563 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
))
565 else if (GET_CODE (lab
) == NOTE
)
567 else if (GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
568 && find_reg_note (NEXT_INSN (insn
), REG_LABEL
, lab
))
571 lvl
= alloc_EXPR_LIST (0, XEXP (note
, 0), lvl
);
576 if (head
!= NULL_RTX
)
577 create_basic_block_structure (head
, end
, bb_note
, prev
);
579 delete_insn (bb_note
);
581 if (last_basic_block
!= n_basic_blocks
)
584 label_value_list
= lvl
;
585 tail_recursion_label_list
= trll
;
586 clear_aux_for_blocks ();
590 /* Find basic blocks of the current function.
591 F is the first insn of the function and NREGS the number of register
595 find_basic_blocks (rtx f
, int nregs ATTRIBUTE_UNUSED
,
596 FILE *file ATTRIBUTE_UNUSED
)
600 timevar_push (TV_CFG
);
602 /* Flush out existing data. */
603 if (basic_block_info
!= NULL
)
607 /* Clear bb->aux on all extant basic blocks. We'll use this as a
608 tag for reuse during create_basic_block, just in case some pass
609 copies around basic block notes improperly. */
613 VARRAY_FREE (basic_block_info
);
616 n_basic_blocks
= count_basic_blocks (f
);
617 last_basic_block
= 0;
618 ENTRY_BLOCK_PTR
->next_bb
= EXIT_BLOCK_PTR
;
619 EXIT_BLOCK_PTR
->prev_bb
= ENTRY_BLOCK_PTR
;
621 /* Size the basic block table. The actual structures will be allocated
622 by find_basic_blocks_1, since we want to keep the structure pointers
623 stable across calls to find_basic_blocks. */
624 /* ??? This whole issue would be much simpler if we called find_basic_blocks
625 exactly once, and thereafter we don't have a single long chain of
626 instructions at all until close to the end of compilation when we
627 actually lay them out. */
629 VARRAY_BB_INIT (basic_block_info
, n_basic_blocks
, "basic_block_info");
631 find_basic_blocks_1 (f
);
633 /* Discover the edges of our cfg. */
634 make_edges (label_value_list
, ENTRY_BLOCK_PTR
->next_bb
, EXIT_BLOCK_PTR
->prev_bb
, 0);
636 /* Do very simple cleanup now, for the benefit of code that runs between
637 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
638 tidy_fallthru_edges ();
640 #ifdef ENABLE_CHECKING
643 timevar_pop (TV_CFG
);
646 /* State of basic block as seen by find_sub_basic_blocks. */
647 enum state
{BLOCK_NEW
= 0, BLOCK_ORIGINAL
, BLOCK_TO_SPLIT
};
649 #define STATE(BB) (enum state) ((size_t) (BB)->aux)
650 #define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE))
652 /* Scan basic block BB for possible BB boundaries inside the block
653 and create new basic blocks in the progress. */
656 find_bb_boundaries (basic_block bb
)
658 rtx insn
= BB_HEAD (bb
);
659 rtx end
= BB_END (bb
);
660 rtx flow_transfer_insn
= NULL_RTX
;
661 edge fallthru
= NULL
;
663 if (insn
== BB_END (bb
))
666 if (GET_CODE (insn
) == CODE_LABEL
)
667 insn
= NEXT_INSN (insn
);
669 /* Scan insn chain and try to find new basic block boundaries. */
672 enum rtx_code code
= GET_CODE (insn
);
674 /* On code label, split current basic block. */
675 if (code
== CODE_LABEL
)
677 fallthru
= split_block (bb
, PREV_INSN (insn
));
678 if (flow_transfer_insn
)
679 BB_END (bb
) = flow_transfer_insn
;
682 remove_edge (fallthru
);
683 flow_transfer_insn
= NULL_RTX
;
684 if (LABEL_ALT_ENTRY_P (insn
))
685 make_edge (ENTRY_BLOCK_PTR
, bb
, 0);
688 /* In case we've previously seen an insn that effects a control
689 flow transfer, split the block. */
690 if (flow_transfer_insn
&& inside_basic_block_p (insn
))
692 fallthru
= split_block (bb
, PREV_INSN (insn
));
693 BB_END (bb
) = flow_transfer_insn
;
695 remove_edge (fallthru
);
696 flow_transfer_insn
= NULL_RTX
;
699 if (control_flow_insn_p (insn
))
700 flow_transfer_insn
= insn
;
703 insn
= NEXT_INSN (insn
);
706 /* In case expander replaced normal insn by sequence terminating by
707 return and barrier, or possibly other sequence not behaving like
708 ordinary jump, we need to take care and move basic block boundary. */
709 if (flow_transfer_insn
)
710 BB_END (bb
) = flow_transfer_insn
;
712 /* We've possibly replaced the conditional jump by conditional jump
713 followed by cleanup at fallthru edge, so the outgoing edges may
715 purge_dead_edges (bb
);
718 /* Assume that frequency of basic block B is known. Compute frequencies
719 and probabilities of outgoing edges. */
722 compute_outgoing_frequencies (basic_block b
)
726 if (b
->succ
&& b
->succ
->succ_next
&& !b
->succ
->succ_next
->succ_next
)
728 rtx note
= find_reg_note (BB_END (b
), REG_BR_PROB
, NULL
);
734 probability
= INTVAL (XEXP (note
, 0));
736 e
->probability
= probability
;
737 e
->count
= ((b
->count
* probability
+ REG_BR_PROB_BASE
/ 2)
739 f
= FALLTHRU_EDGE (b
);
740 f
->probability
= REG_BR_PROB_BASE
- probability
;
741 f
->count
= b
->count
- e
->count
;
744 if (b
->succ
&& !b
->succ
->succ_next
)
747 e
->probability
= REG_BR_PROB_BASE
;
752 /* Assume that someone emitted code with control flow instructions to the
753 basic block. Update the data structure. */
756 find_many_sub_basic_blocks (sbitmap blocks
)
758 basic_block bb
, min
, max
;
762 TEST_BIT (blocks
, bb
->index
) ? BLOCK_TO_SPLIT
: BLOCK_ORIGINAL
);
765 if (STATE (bb
) == BLOCK_TO_SPLIT
)
766 find_bb_boundaries (bb
);
769 if (STATE (bb
) != BLOCK_ORIGINAL
)
773 for (; bb
!= EXIT_BLOCK_PTR
; bb
= bb
->next_bb
)
774 if (STATE (bb
) != BLOCK_ORIGINAL
)
777 /* Now re-scan and wire in all edges. This expect simple (conditional)
778 jumps at the end of each new basic blocks. */
779 make_edges (NULL
, min
, max
, 1);
781 /* Update branch probabilities. Expect only (un)conditional jumps
782 to be created with only the forward edges. */
783 FOR_BB_BETWEEN (bb
, min
, max
->next_bb
, next_bb
)
787 if (STATE (bb
) == BLOCK_ORIGINAL
)
789 if (STATE (bb
) == BLOCK_NEW
)
793 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
795 bb
->count
+= e
->count
;
796 bb
->frequency
+= EDGE_FREQUENCY (e
);
800 compute_outgoing_frequencies (bb
);
807 /* Like above but for single basic block only. */
810 find_sub_basic_blocks (basic_block bb
)
812 basic_block min
, max
, b
;
813 basic_block next
= bb
->next_bb
;
816 find_bb_boundaries (bb
);
819 /* Now re-scan and wire in all edges. This expect simple (conditional)
820 jumps at the end of each new basic blocks. */
821 make_edges (NULL
, min
, max
, 1);
823 /* Update branch probabilities. Expect only (un)conditional jumps
824 to be created with only the forward edges. */
825 FOR_BB_BETWEEN (b
, min
, max
->next_bb
, next_bb
)
833 for (e
= b
->pred
; e
; e
= e
->pred_next
)
835 b
->count
+= e
->count
;
836 b
->frequency
+= EDGE_FREQUENCY (e
);
840 compute_outgoing_frequencies (b
);