1 /* Control flow graph building code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* find_basic_blocks divides the current function's rtl into basic
23 blocks and constructs the CFG. The blocks are recorded in the
24 basic_block_info array; the CFG exists in the edge structures
25 referenced by the blocks.
27 find_basic_blocks also finds any unreachable loops and deletes them.
29 Available functionality:
32 - Local CFG construction
33 find_sub_basic_blocks */
37 #include "coretypes.h"
41 #include "hard-reg-set.h"
42 #include "basic-block.h"
51 static int count_basic_blocks
PARAMS ((rtx
));
52 static void find_basic_blocks_1
PARAMS ((rtx
));
53 static rtx find_label_refs
PARAMS ((rtx
, rtx
));
54 static void make_edges
PARAMS ((rtx
, basic_block
,
56 static void make_label_edge
PARAMS ((sbitmap
*, basic_block
,
58 static void make_eh_edge
PARAMS ((sbitmap
*, basic_block
, rtx
));
59 static void find_bb_boundaries
PARAMS ((basic_block
));
60 static void compute_outgoing_frequencies
PARAMS ((basic_block
));
62 /* Return true if insn is something that should be contained inside basic
66 inside_basic_block_p (insn
)
69 switch (GET_CODE (insn
))
72 /* Avoid creating of basic block for jumptables. */
73 return (NEXT_INSN (insn
) == 0
74 || GET_CODE (NEXT_INSN (insn
)) != JUMP_INSN
75 || (GET_CODE (PATTERN (NEXT_INSN (insn
))) != ADDR_VEC
76 && GET_CODE (PATTERN (NEXT_INSN (insn
))) != ADDR_DIFF_VEC
));
79 return (GET_CODE (PATTERN (insn
)) != ADDR_VEC
80 && GET_CODE (PATTERN (insn
)) != ADDR_DIFF_VEC
);
95 /* Return true if INSN may cause control flow transfer, so it should be last in
99 control_flow_insn_p (insn
)
104 switch (GET_CODE (insn
))
111 /* Jump insn always causes control transfer except for tablejumps. */
112 return (GET_CODE (PATTERN (insn
)) != ADDR_VEC
113 && GET_CODE (PATTERN (insn
)) != ADDR_DIFF_VEC
);
116 /* Call insn may return to the nonlocal goto handler. */
117 return ((nonlocal_goto_handler_labels
118 && (0 == (note
= find_reg_note (insn
, REG_EH_REGION
,
120 || INTVAL (XEXP (note
, 0)) >= 0))
122 || can_throw_internal (insn
));
125 return (flag_non_call_exceptions
&& can_throw_internal (insn
));
128 /* It is nonsence to reach barrier when looking for the
129 end of basic block, but before dead code is eliminated
138 /* Count the basic blocks of the function. */
141 count_basic_blocks (f
)
145 bool saw_insn
= false;
148 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
150 /* Code labels and barriers causes current basic block to be
151 terminated at previous real insn. */
152 if ((GET_CODE (insn
) == CODE_LABEL
|| GET_CODE (insn
) == BARRIER
)
154 count
++, saw_insn
= false;
156 /* Start basic block if needed. */
157 if (!saw_insn
&& inside_basic_block_p (insn
))
160 /* Control flow insn causes current basic block to be terminated. */
161 if (saw_insn
&& control_flow_insn_p (insn
))
162 count
++, saw_insn
= false;
168 /* The rest of the compiler works a bit smoother when we don't have to
169 check for the edge case of do-nothing functions with no basic blocks. */
172 emit_insn (gen_rtx_USE (VOIDmode
, const0_rtx
));
179 /* Scan a list of insns for labels referred to other than by jumps.
180 This is used to scan the alternatives of a call placeholder. */
183 find_label_refs (f
, lvl
)
189 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
190 if (INSN_P (insn
) && GET_CODE (insn
) != JUMP_INSN
)
194 /* Make a list of all labels referred to other than by jumps
195 (which just don't have the REG_LABEL notes).
197 Make a special exception for labels followed by an ADDR*VEC,
198 as this would be a part of the tablejump setup code.
200 Make a special exception to registers loaded with label
201 values just before jump insns that use them. */
203 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
204 if (REG_NOTE_KIND (note
) == REG_LABEL
)
206 rtx lab
= XEXP (note
, 0), next
;
208 if ((next
= next_nonnote_insn (lab
)) != NULL
209 && GET_CODE (next
) == JUMP_INSN
210 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
211 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
))
213 else if (GET_CODE (lab
) == NOTE
)
215 else if (GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
216 && find_reg_note (NEXT_INSN (insn
), REG_LABEL
, lab
))
219 lvl
= alloc_EXPR_LIST (0, XEXP (note
, 0), lvl
);
226 /* Create an edge between two basic blocks. FLAGS are auxiliary information
227 about the edge that is accumulated between calls. */
229 /* Create an edge from a basic block to a label. */
232 make_label_edge (edge_cache
, src
, label
, flags
)
238 if (GET_CODE (label
) != CODE_LABEL
)
241 /* If the label was never emitted, this insn is junk, but avoid a
242 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
243 as a result of a syntax error and a diagnostic has already been
246 if (INSN_UID (label
) == 0)
249 cached_make_edge (edge_cache
, src
, BLOCK_FOR_INSN (label
), flags
);
252 /* Create the edges generated by INSN in REGION. */
255 make_eh_edge (edge_cache
, src
, insn
)
260 int is_call
= GET_CODE (insn
) == CALL_INSN
? EDGE_ABNORMAL_CALL
: 0;
263 handlers
= reachable_handlers (insn
);
265 for (i
= handlers
; i
; i
= XEXP (i
, 1))
266 make_label_edge (edge_cache
, src
, XEXP (i
, 0),
267 EDGE_ABNORMAL
| EDGE_EH
| is_call
);
269 free_INSN_LIST_list (&handlers
);
272 /* Identify the edges between basic blocks MIN to MAX.
274 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
275 that are otherwise unreachable may be reachable with a non-local goto.
277 BB_EH_END is an array indexed by basic block number in which we record
278 the list of exception regions active at the end of the basic block. */
281 make_edges (label_value_list
, min
, max
, update_p
)
282 rtx label_value_list
;
283 basic_block min
, max
;
287 sbitmap
*edge_cache
= NULL
;
289 /* Assume no computed jump; revise as we create edges. */
290 current_function_has_computed_jump
= 0;
292 /* Heavy use of computed goto in machine-generated code can lead to
293 nearly fully-connected CFGs. In that case we spend a significant
294 amount of time searching the edge lists for duplicates. */
295 if (forced_labels
|| label_value_list
|| cfun
->max_jumptable_ents
> 100)
297 edge_cache
= sbitmap_vector_alloc (last_basic_block
, last_basic_block
);
298 sbitmap_vector_zero (edge_cache
, last_basic_block
);
301 FOR_BB_BETWEEN (bb
, min
, max
->next_bb
, next_bb
)
305 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
306 if (e
->dest
!= EXIT_BLOCK_PTR
)
307 SET_BIT (edge_cache
[bb
->index
], e
->dest
->index
);
311 /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
312 is always the entry. */
313 if (min
== ENTRY_BLOCK_PTR
->next_bb
)
314 cached_make_edge (edge_cache
, ENTRY_BLOCK_PTR
, min
,
317 FOR_BB_BETWEEN (bb
, min
, max
->next_bb
, next_bb
)
321 int force_fallthru
= 0;
323 if (GET_CODE (bb
->head
) == CODE_LABEL
&& LABEL_ALT_ENTRY_P (bb
->head
))
324 cached_make_edge (NULL
, ENTRY_BLOCK_PTR
, bb
, 0);
326 /* Examine the last instruction of the block, and discover the
327 ways we can leave the block. */
330 code
= GET_CODE (insn
);
333 if (code
== JUMP_INSN
)
337 /* Recognize exception handling placeholders. */
338 if (GET_CODE (PATTERN (insn
)) == RESX
)
339 make_eh_edge (edge_cache
, bb
, insn
);
341 /* Recognize a non-local goto as a branch outside the
343 else if (find_reg_note (insn
, REG_NON_LOCAL_GOTO
, NULL_RTX
))
346 /* Recognize a tablejump and do the right thing. */
347 else if (tablejump_p (insn
, NULL
, &tmp
))
352 if (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
)
353 vec
= XVEC (PATTERN (tmp
), 0);
355 vec
= XVEC (PATTERN (tmp
), 1);
357 for (j
= GET_NUM_ELEM (vec
) - 1; j
>= 0; --j
)
358 make_label_edge (edge_cache
, bb
,
359 XEXP (RTVEC_ELT (vec
, j
), 0), 0);
361 /* Some targets (eg, ARM) emit a conditional jump that also
362 contains the out-of-range target. Scan for these and
363 add an edge if necessary. */
364 if ((tmp
= single_set (insn
)) != NULL
365 && SET_DEST (tmp
) == pc_rtx
366 && GET_CODE (SET_SRC (tmp
)) == IF_THEN_ELSE
367 && GET_CODE (XEXP (SET_SRC (tmp
), 2)) == LABEL_REF
)
368 make_label_edge (edge_cache
, bb
,
369 XEXP (XEXP (SET_SRC (tmp
), 2), 0), 0);
371 #ifdef CASE_DROPS_THROUGH
372 /* Silly VAXen. The ADDR_VEC is going to be in the way of
373 us naturally detecting fallthru into the next block. */
378 /* If this is a computed jump, then mark it as reaching
379 everything on the label_value_list and forced_labels list. */
380 else if (computed_jump_p (insn
))
382 current_function_has_computed_jump
= 1;
384 for (x
= label_value_list
; x
; x
= XEXP (x
, 1))
385 make_label_edge (edge_cache
, bb
, XEXP (x
, 0), EDGE_ABNORMAL
);
387 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
388 make_label_edge (edge_cache
, bb
, XEXP (x
, 0), EDGE_ABNORMAL
);
391 /* Returns create an exit out. */
392 else if (returnjump_p (insn
))
393 cached_make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
, 0);
395 /* Otherwise, we have a plain conditional or unconditional jump. */
398 if (! JUMP_LABEL (insn
))
400 make_label_edge (edge_cache
, bb
, JUMP_LABEL (insn
), 0);
404 /* If this is a sibling call insn, then this is in effect a combined call
405 and return, and so we need an edge to the exit block. No need to
406 worry about EH edges, since we wouldn't have created the sibling call
407 in the first place. */
408 if (code
== CALL_INSN
&& SIBLING_CALL_P (insn
))
409 cached_make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
,
410 EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
);
412 /* If this is a CALL_INSN, then mark it as reaching the active EH
413 handler for this CALL_INSN. If we're handling non-call
414 exceptions then any insn can reach any of the active handlers.
415 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
416 else if (code
== CALL_INSN
|| flag_non_call_exceptions
)
418 /* Add any appropriate EH edges. */
419 make_eh_edge (edge_cache
, bb
, insn
);
421 if (code
== CALL_INSN
&& nonlocal_goto_handler_labels
)
423 /* ??? This could be made smarter: in some cases it's possible
424 to tell that certain calls will not do a nonlocal goto.
425 For example, if the nested functions that do the nonlocal
426 gotos do not have their addresses taken, then only calls to
427 those functions or to other nested functions that use them
428 could possibly do nonlocal gotos. */
430 /* We do know that a REG_EH_REGION note with a value less
431 than 0 is guaranteed not to perform a non-local goto. */
432 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
434 if (!note
|| INTVAL (XEXP (note
, 0)) >= 0)
435 for (x
= nonlocal_goto_handler_labels
; x
; x
= XEXP (x
, 1))
436 make_label_edge (edge_cache
, bb
, XEXP (x
, 0),
437 EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
);
441 /* Find out if we can drop through to the next block. */
442 insn
= next_nonnote_insn (insn
);
443 if (!insn
|| (bb
->next_bb
== EXIT_BLOCK_PTR
&& force_fallthru
))
444 cached_make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
, EDGE_FALLTHRU
);
445 else if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
447 rtx tmp
= bb
->next_bb
->head
;
448 if (GET_CODE (tmp
) == NOTE
)
449 tmp
= next_nonnote_insn (tmp
);
450 if (force_fallthru
|| insn
== tmp
)
451 cached_make_edge (edge_cache
, bb
, bb
->next_bb
, EDGE_FALLTHRU
);
456 sbitmap_vector_free (edge_cache
);
459 /* Find all basic blocks of the function whose first insn is F.
461 Collect and return a list of labels whose addresses are taken. This
462 will be used in make_edges for use with computed gotos. */
465 find_basic_blocks_1 (f
)
469 rtx bb_note
= NULL_RTX
;
474 basic_block prev
= ENTRY_BLOCK_PTR
;
476 /* We process the instructions in a slightly different way than we did
477 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
478 closed out the previous block, so that it gets attached at the proper
479 place. Since this form should be equivalent to the previous,
480 count_basic_blocks continues to use the old form as a check. */
482 for (insn
= f
; insn
; insn
= next
)
484 enum rtx_code code
= GET_CODE (insn
);
486 next
= NEXT_INSN (insn
);
488 if ((GET_CODE (insn
) == CODE_LABEL
|| GET_CODE (insn
) == BARRIER
)
491 prev
= create_basic_block_structure (head
, end
, bb_note
, prev
);
492 head
= end
= NULL_RTX
;
496 if (inside_basic_block_p (insn
))
498 if (head
== NULL_RTX
)
503 if (head
&& control_flow_insn_p (insn
))
505 prev
= create_basic_block_structure (head
, end
, bb_note
, prev
);
506 head
= end
= NULL_RTX
;
514 int kind
= NOTE_LINE_NUMBER (insn
);
516 /* Look for basic block notes with which to keep the
517 basic_block_info pointers stable. Unthread the note now;
518 we'll put it back at the right place in create_basic_block.
519 Or not at all if we've already found a note in this block. */
520 if (kind
== NOTE_INSN_BASIC_BLOCK
)
522 if (bb_note
== NULL_RTX
)
525 next
= delete_insn (insn
);
537 if (GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
539 /* Scan each of the alternatives for label refs. */
540 lvl
= find_label_refs (XEXP (PATTERN (insn
), 0), lvl
);
541 lvl
= find_label_refs (XEXP (PATTERN (insn
), 1), lvl
);
542 lvl
= find_label_refs (XEXP (PATTERN (insn
), 2), lvl
);
543 /* Record its tail recursion label, if any. */
544 if (XEXP (PATTERN (insn
), 3) != NULL_RTX
)
545 trll
= alloc_EXPR_LIST (0, XEXP (PATTERN (insn
), 3), trll
);
553 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
557 /* Make a list of all labels referred to other than by jumps.
559 Make a special exception for labels followed by an ADDR*VEC,
560 as this would be a part of the tablejump setup code.
562 Make a special exception to registers loaded with label
563 values just before jump insns that use them. */
565 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
566 if (REG_NOTE_KIND (note
) == REG_LABEL
)
568 rtx lab
= XEXP (note
, 0), next
;
570 if ((next
= next_nonnote_insn (lab
)) != NULL
571 && GET_CODE (next
) == JUMP_INSN
572 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
573 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
))
575 else if (GET_CODE (lab
) == NOTE
)
577 else if (GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
578 && find_reg_note (NEXT_INSN (insn
), REG_LABEL
, lab
))
581 lvl
= alloc_EXPR_LIST (0, XEXP (note
, 0), lvl
);
586 if (head
!= NULL_RTX
)
587 create_basic_block_structure (head
, end
, bb_note
, prev
);
589 delete_insn (bb_note
);
591 if (last_basic_block
!= n_basic_blocks
)
594 label_value_list
= lvl
;
595 tail_recursion_label_list
= trll
;
596 clear_aux_for_blocks ();
600 /* Find basic blocks of the current function.
601 F is the first insn of the function and NREGS the number of register
605 find_basic_blocks (f
, nregs
, file
)
607 int nregs ATTRIBUTE_UNUSED
;
608 FILE *file ATTRIBUTE_UNUSED
;
612 timevar_push (TV_CFG
);
614 /* Flush out existing data. */
615 if (basic_block_info
!= NULL
)
619 /* Clear bb->aux on all extant basic blocks. We'll use this as a
620 tag for reuse during create_basic_block, just in case some pass
621 copies around basic block notes improperly. */
625 VARRAY_FREE (basic_block_info
);
628 n_basic_blocks
= count_basic_blocks (f
);
629 last_basic_block
= 0;
630 ENTRY_BLOCK_PTR
->next_bb
= EXIT_BLOCK_PTR
;
631 EXIT_BLOCK_PTR
->prev_bb
= ENTRY_BLOCK_PTR
;
633 /* Size the basic block table. The actual structures will be allocated
634 by find_basic_blocks_1, since we want to keep the structure pointers
635 stable across calls to find_basic_blocks. */
636 /* ??? This whole issue would be much simpler if we called find_basic_blocks
637 exactly once, and thereafter we don't have a single long chain of
638 instructions at all until close to the end of compilation when we
639 actually lay them out. */
641 VARRAY_BB_INIT (basic_block_info
, n_basic_blocks
, "basic_block_info");
643 find_basic_blocks_1 (f
);
645 /* Discover the edges of our cfg. */
646 make_edges (label_value_list
, ENTRY_BLOCK_PTR
->next_bb
, EXIT_BLOCK_PTR
->prev_bb
, 0);
648 /* Do very simple cleanup now, for the benefit of code that runs between
649 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
650 tidy_fallthru_edges ();
652 #ifdef ENABLE_CHECKING
655 timevar_pop (TV_CFG
);
658 /* State of basic block as seen by find_sub_basic_blocks. */
659 enum state
{BLOCK_NEW
= 0, BLOCK_ORIGINAL
, BLOCK_TO_SPLIT
};
661 #define STATE(BB) (enum state) ((size_t) (BB)->aux)
662 #define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE))
664 /* Scan basic block BB for possible BB boundaries inside the block
665 and create new basic blocks in the progress. */
668 find_bb_boundaries (bb
)
673 rtx flow_transfer_insn
= NULL_RTX
;
674 edge fallthru
= NULL
;
679 if (GET_CODE (insn
) == CODE_LABEL
)
680 insn
= NEXT_INSN (insn
);
682 /* Scan insn chain and try to find new basic block boundaries. */
685 enum rtx_code code
= GET_CODE (insn
);
687 /* On code label, split current basic block. */
688 if (code
== CODE_LABEL
)
690 fallthru
= split_block (bb
, PREV_INSN (insn
));
691 if (flow_transfer_insn
)
692 bb
->end
= flow_transfer_insn
;
695 remove_edge (fallthru
);
696 flow_transfer_insn
= NULL_RTX
;
697 if (LABEL_ALT_ENTRY_P (insn
))
698 make_edge (ENTRY_BLOCK_PTR
, bb
, 0);
701 /* In case we've previously seen an insn that effects a control
702 flow transfer, split the block. */
703 if (flow_transfer_insn
&& inside_basic_block_p (insn
))
705 fallthru
= split_block (bb
, PREV_INSN (insn
));
706 bb
->end
= flow_transfer_insn
;
708 remove_edge (fallthru
);
709 flow_transfer_insn
= NULL_RTX
;
712 if (control_flow_insn_p (insn
))
713 flow_transfer_insn
= insn
;
716 insn
= NEXT_INSN (insn
);
719 /* In case expander replaced normal insn by sequence terminating by
720 return and barrier, or possibly other sequence not behaving like
721 ordinary jump, we need to take care and move basic block boundary. */
722 if (flow_transfer_insn
)
723 bb
->end
= flow_transfer_insn
;
725 /* We've possibly replaced the conditional jump by conditional jump
726 followed by cleanup at fallthru edge, so the outgoing edges may
728 purge_dead_edges (bb
);
731 /* Assume that frequency of basic block B is known. Compute frequencies
732 and probabilities of outgoing edges. */
735 compute_outgoing_frequencies (b
)
740 if (b
->succ
&& b
->succ
->succ_next
&& !b
->succ
->succ_next
->succ_next
)
742 rtx note
= find_reg_note (b
->end
, REG_BR_PROB
, NULL
);
748 probability
= INTVAL (XEXP (find_reg_note (b
->end
,
752 e
->probability
= probability
;
753 e
->count
= ((b
->count
* probability
+ REG_BR_PROB_BASE
/ 2)
755 f
= FALLTHRU_EDGE (b
);
756 f
->probability
= REG_BR_PROB_BASE
- probability
;
757 f
->count
= b
->count
- e
->count
;
760 if (b
->succ
&& !b
->succ
->succ_next
)
763 e
->probability
= REG_BR_PROB_BASE
;
768 /* Assume that someone emitted code with control flow instructions to the
769 basic block. Update the data structure. */
772 find_many_sub_basic_blocks (blocks
)
775 basic_block bb
, min
, max
;
779 TEST_BIT (blocks
, bb
->index
) ? BLOCK_TO_SPLIT
: BLOCK_ORIGINAL
);
782 if (STATE (bb
) == BLOCK_TO_SPLIT
)
783 find_bb_boundaries (bb
);
786 if (STATE (bb
) != BLOCK_ORIGINAL
)
790 for (; bb
!= EXIT_BLOCK_PTR
; bb
= bb
->next_bb
)
791 if (STATE (bb
) != BLOCK_ORIGINAL
)
794 /* Now re-scan and wire in all edges. This expect simple (conditional)
795 jumps at the end of each new basic blocks. */
796 make_edges (NULL
, min
, max
, 1);
798 /* Update branch probabilities. Expect only (un)conditional jumps
799 to be created with only the forward edges. */
800 FOR_BB_BETWEEN (bb
, min
, max
->next_bb
, next_bb
)
804 if (STATE (bb
) == BLOCK_ORIGINAL
)
806 if (STATE (bb
) == BLOCK_NEW
)
810 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
812 bb
->count
+= e
->count
;
813 bb
->frequency
+= EDGE_FREQUENCY (e
);
817 compute_outgoing_frequencies (bb
);
824 /* Like above but for single basic block only. */
827 find_sub_basic_blocks (bb
)
830 basic_block min
, max
, b
;
831 basic_block next
= bb
->next_bb
;
834 find_bb_boundaries (bb
);
837 /* Now re-scan and wire in all edges. This expect simple (conditional)
838 jumps at the end of each new basic blocks. */
839 make_edges (NULL
, min
, max
, 1);
841 /* Update branch probabilities. Expect only (un)conditional jumps
842 to be created with only the forward edges. */
843 FOR_BB_BETWEEN (b
, min
, max
->next_bb
, next_bb
)
851 for (e
= b
->pred
; e
; e
= e
->pred_next
)
853 b
->count
+= e
->count
;
854 b
->frequency
+= EDGE_FREQUENCY (e
);
858 compute_outgoing_frequencies (b
);