1 /* Control flow graph building code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* find_basic_blocks divides the current function's rtl into basic
23 blocks and constructs the CFG. The blocks are recorded in the
24 basic_block_info array; the CFG exists in the edge structures
25 referenced by the blocks.
27 find_basic_blocks also finds any unreachable loops and deletes them.
29 Available functionality:
32 - Local CFG construction
40 #include "hard-reg-set.h"
41 #include "basic-block.h"
51 static int count_basic_blocks
PARAMS ((rtx
));
52 static void find_basic_blocks_1
PARAMS ((rtx
));
53 static rtx find_label_refs
PARAMS ((rtx
, rtx
));
54 static void make_edges
PARAMS ((rtx
, int, int, int));
55 static void make_label_edge
PARAMS ((sbitmap
*, basic_block
,
57 static void make_eh_edge
PARAMS ((sbitmap
*, basic_block
, rtx
));
58 static void find_bb_boundaries
PARAMS ((basic_block
));
59 static void compute_outgoing_frequencies
PARAMS ((basic_block
));
60 static bool inside_basic_block_p
PARAMS ((rtx
));
61 static bool control_flow_insn_p
PARAMS ((rtx
));
63 /* Return true if insn is something that should be contained inside basic
67 inside_basic_block_p (insn
)
70 switch (GET_CODE (insn
))
73 /* Avoid creating of basic block for jumptables. */
75 && GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
76 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
77 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
82 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
83 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
100 /* Return true if INSN may cause control flow transfer, so
101 it should be last in the basic block. */
104 control_flow_insn_p (insn
)
108 switch (GET_CODE (insn
))
115 /* Jump insn always causes control transfer except for tablejumps. */
116 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
117 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
122 /* Call insn may return to the nonlocal goto handler. */
123 if (nonlocal_goto_handler_labels
124 && ((note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)) == 0
125 || INTVAL (XEXP (note
, 0)) >= 0))
128 return can_throw_internal (insn
);
131 return (flag_non_call_exceptions
132 && can_throw_internal (insn
));
135 /* It is nonsence to reach barrier when looking for the
136 end of basic block, but before dead code is eliminated
145 /* Count the basic blocks of the function. */
148 count_basic_blocks (f
)
152 bool saw_insn
= false;
155 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
157 /* Code labels and barriers causes curent basic block to be
158 terminated at previous real insn. */
160 if ((GET_CODE (insn
) == CODE_LABEL
|| GET_CODE (insn
) == BARRIER
)
162 count
++, saw_insn
= false;
164 /* Start basic block if needed. */
165 if (!saw_insn
&& inside_basic_block_p (insn
))
168 /* Control flow insn causes current basic block to be terminated. */
169 if (saw_insn
&& control_flow_insn_p (insn
))
170 count
++, saw_insn
= false;
175 /* The rest of the compiler works a bit smoother when we don't have to
176 check for the edge case of do-nothing functions with no basic blocks. */
179 emit_insn (gen_rtx_USE (VOIDmode
, const0_rtx
));
186 /* Scan a list of insns for labels referred to other than by jumps.
187 This is used to scan the alternatives of a call placeholder. */
189 find_label_refs (f
, lvl
)
195 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
196 if (INSN_P (insn
) && GET_CODE (insn
) != JUMP_INSN
)
200 /* Make a list of all labels referred to other than by jumps
201 (which just don't have the REG_LABEL notes).
203 Make a special exception for labels followed by an ADDR*VEC,
204 as this would be a part of the tablejump setup code.
206 Make a special exception to registers loaded with label
207 values just before jump insns that use them. */
209 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
210 if (REG_NOTE_KIND (note
) == REG_LABEL
)
212 rtx lab
= XEXP (note
, 0), next
;
214 if ((next
= next_nonnote_insn (lab
)) != NULL
215 && GET_CODE (next
) == JUMP_INSN
216 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
217 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
))
219 else if (GET_CODE (lab
) == NOTE
)
221 else if (GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
222 && find_reg_note (NEXT_INSN (insn
), REG_LABEL
, lab
))
225 lvl
= alloc_EXPR_LIST (0, XEXP (note
, 0), lvl
);
232 /* Create an edge between two basic blocks. FLAGS are auxiliary information
233 about the edge that is accumulated between calls. */
235 /* Create an edge from a basic block to a label. */
238 make_label_edge (edge_cache
, src
, label
, flags
)
244 if (GET_CODE (label
) != CODE_LABEL
)
247 /* If the label was never emitted, this insn is junk, but avoid a
248 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
249 as a result of a syntax error and a diagnostic has already been
252 if (INSN_UID (label
) == 0)
255 cached_make_edge (edge_cache
, src
, BLOCK_FOR_INSN (label
), flags
);
258 /* Create the edges generated by INSN in REGION. */
261 make_eh_edge (edge_cache
, src
, insn
)
266 int is_call
= (GET_CODE (insn
) == CALL_INSN
? EDGE_ABNORMAL_CALL
: 0);
269 handlers
= reachable_handlers (insn
);
271 for (i
= handlers
; i
; i
= XEXP (i
, 1))
272 make_label_edge (edge_cache
, src
, XEXP (i
, 0),
273 EDGE_ABNORMAL
| EDGE_EH
| is_call
);
275 free_INSN_LIST_list (&handlers
);
277 /* Identify the edges between basic blocks MIN to MAX.
279 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
280 that are otherwise unreachable may be reachable with a non-local goto.
282 BB_EH_END is an array indexed by basic block number in which we record
283 the list of exception regions active at the end of the basic block. */
286 make_edges (label_value_list
, min
, max
, update_p
)
287 rtx label_value_list
;
288 int min
, max
, update_p
;
291 sbitmap
*edge_cache
= NULL
;
293 /* Assume no computed jump; revise as we create edges. */
294 current_function_has_computed_jump
= 0;
296 /* Heavy use of computed goto in machine-generated code can lead to
297 nearly fully-connected CFGs. In that case we spend a significant
298 amount of time searching the edge lists for duplicates. */
299 if (forced_labels
|| label_value_list
)
301 edge_cache
= sbitmap_vector_alloc (n_basic_blocks
, n_basic_blocks
);
302 sbitmap_vector_zero (edge_cache
, n_basic_blocks
);
305 for (i
= min
; i
<= max
; ++i
)
308 for (e
= BASIC_BLOCK (i
)->succ
; e
; e
= e
->succ_next
)
309 if (e
->dest
!= EXIT_BLOCK_PTR
)
310 SET_BIT (edge_cache
[i
], e
->dest
->index
);
314 /* By nature of the way these get numbered, block 0 is always the entry. */
316 cached_make_edge (edge_cache
, ENTRY_BLOCK_PTR
, BASIC_BLOCK (0), EDGE_FALLTHRU
);
318 for (i
= min
; i
<= max
; ++i
)
320 basic_block bb
= BASIC_BLOCK (i
);
323 int force_fallthru
= 0;
325 if (GET_CODE (bb
->head
) == CODE_LABEL
326 && LABEL_ALTERNATE_NAME (bb
->head
))
327 cached_make_edge (NULL
, ENTRY_BLOCK_PTR
, bb
, 0);
329 /* Examine the last instruction of the block, and discover the
330 ways we can leave the block. */
333 code
= GET_CODE (insn
);
336 if (code
== JUMP_INSN
)
340 /* Recognize exception handling placeholders. */
341 if (GET_CODE (PATTERN (insn
)) == RESX
)
342 make_eh_edge (edge_cache
, bb
, insn
);
344 /* Recognize a non-local goto as a branch outside the
346 else if (find_reg_note (insn
, REG_NON_LOCAL_GOTO
, NULL_RTX
))
349 /* ??? Recognize a tablejump and do the right thing. */
350 else if ((tmp
= JUMP_LABEL (insn
)) != NULL_RTX
351 && (tmp
= NEXT_INSN (tmp
)) != NULL_RTX
352 && GET_CODE (tmp
) == JUMP_INSN
353 && (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
354 || GET_CODE (PATTERN (tmp
)) == ADDR_DIFF_VEC
))
359 if (GET_CODE (PATTERN (tmp
)) == ADDR_VEC
)
360 vec
= XVEC (PATTERN (tmp
), 0);
362 vec
= XVEC (PATTERN (tmp
), 1);
364 for (j
= GET_NUM_ELEM (vec
) - 1; j
>= 0; --j
)
365 make_label_edge (edge_cache
, bb
,
366 XEXP (RTVEC_ELT (vec
, j
), 0), 0);
368 /* Some targets (eg, ARM) emit a conditional jump that also
369 contains the out-of-range target. Scan for these and
370 add an edge if necessary. */
371 if ((tmp
= single_set (insn
)) != NULL
372 && SET_DEST (tmp
) == pc_rtx
373 && GET_CODE (SET_SRC (tmp
)) == IF_THEN_ELSE
374 && GET_CODE (XEXP (SET_SRC (tmp
), 2)) == LABEL_REF
)
375 make_label_edge (edge_cache
, bb
,
376 XEXP (XEXP (SET_SRC (tmp
), 2), 0), 0);
378 #ifdef CASE_DROPS_THROUGH
379 /* Silly VAXen. The ADDR_VEC is going to be in the way of
380 us naturally detecting fallthru into the next block. */
385 /* If this is a computed jump, then mark it as reaching
386 everything on the label_value_list and forced_labels list. */
387 else if (computed_jump_p (insn
))
389 current_function_has_computed_jump
= 1;
391 for (x
= label_value_list
; x
; x
= XEXP (x
, 1))
392 make_label_edge (edge_cache
, bb
, XEXP (x
, 0), EDGE_ABNORMAL
);
394 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
395 make_label_edge (edge_cache
, bb
, XEXP (x
, 0), EDGE_ABNORMAL
);
398 /* Returns create an exit out. */
399 else if (returnjump_p (insn
))
400 cached_make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
, 0);
402 /* Otherwise, we have a plain conditional or unconditional jump. */
405 if (! JUMP_LABEL (insn
))
407 make_label_edge (edge_cache
, bb
, JUMP_LABEL (insn
), 0);
411 /* If this is a sibling call insn, then this is in effect a
412 combined call and return, and so we need an edge to the
413 exit block. No need to worry about EH edges, since we
414 wouldn't have created the sibling call in the first place. */
416 if (code
== CALL_INSN
&& SIBLING_CALL_P (insn
))
417 cached_make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
,
418 EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
);
420 /* If this is a CALL_INSN, then mark it as reaching the active EH
421 handler for this CALL_INSN. If we're handling non-call
422 exceptions then any insn can reach any of the active handlers.
424 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
426 else if (code
== CALL_INSN
|| flag_non_call_exceptions
)
428 /* Add any appropriate EH edges. */
429 make_eh_edge (edge_cache
, bb
, insn
);
431 if (code
== CALL_INSN
&& nonlocal_goto_handler_labels
)
433 /* ??? This could be made smarter: in some cases it's possible
434 to tell that certain calls will not do a nonlocal goto.
436 For example, if the nested functions that do the nonlocal
437 gotos do not have their addresses taken, then only calls to
438 those functions or to other nested functions that use them
439 could possibly do nonlocal gotos. */
440 /* We do know that a REG_EH_REGION note with a value less
441 than 0 is guaranteed not to perform a non-local goto. */
442 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
443 if (!note
|| INTVAL (XEXP (note
, 0)) >= 0)
444 for (x
= nonlocal_goto_handler_labels
; x
; x
= XEXP (x
, 1))
445 make_label_edge (edge_cache
, bb
, XEXP (x
, 0),
446 EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
);
450 /* Find out if we can drop through to the next block. */
451 insn
= next_nonnote_insn (insn
);
452 if (!insn
|| (i
+ 1 == n_basic_blocks
&& force_fallthru
))
453 cached_make_edge (edge_cache
, bb
, EXIT_BLOCK_PTR
, EDGE_FALLTHRU
);
454 else if (i
+ 1 < n_basic_blocks
)
456 rtx tmp
= BLOCK_HEAD (i
+ 1);
457 if (GET_CODE (tmp
) == NOTE
)
458 tmp
= next_nonnote_insn (tmp
);
459 if (force_fallthru
|| insn
== tmp
)
460 cached_make_edge (edge_cache
, bb
, BASIC_BLOCK (i
+ 1), EDGE_FALLTHRU
);
465 sbitmap_vector_free (edge_cache
);
468 /* Find all basic blocks of the function whose first insn is F.
470 Collect and return a list of labels whose addresses are taken. This
471 will be used in make_edges for use with computed gotos. */
474 find_basic_blocks_1 (f
)
479 rtx bb_note
= NULL_RTX
;
485 /* We process the instructions in a slightly different way than we did
486 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
487 closed out the previous block, so that it gets attached at the proper
488 place. Since this form should be equivalent to the previous,
489 count_basic_blocks continues to use the old form as a check. */
491 for (insn
= f
; insn
; insn
= next
)
493 enum rtx_code code
= GET_CODE (insn
);
495 next
= NEXT_INSN (insn
);
497 if ((GET_CODE (insn
) == CODE_LABEL
|| GET_CODE (insn
) == BARRIER
)
500 create_basic_block_structure (i
++, head
, end
, bb_note
);
501 head
= end
= NULL_RTX
;
504 if (inside_basic_block_p (insn
))
506 if (head
== NULL_RTX
)
510 if (head
&& control_flow_insn_p (insn
))
512 create_basic_block_structure (i
++, head
, end
, bb_note
);
513 head
= end
= NULL_RTX
;
521 int kind
= NOTE_LINE_NUMBER (insn
);
523 /* Look for basic block notes with which to keep the
524 basic_block_info pointers stable. Unthread the note now;
525 we'll put it back at the right place in create_basic_block.
526 Or not at all if we've already found a note in this block. */
527 if (kind
== NOTE_INSN_BASIC_BLOCK
)
529 if (bb_note
== NULL_RTX
)
532 next
= delete_insn (insn
);
544 if (GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
546 /* Scan each of the alternatives for label refs. */
547 lvl
= find_label_refs (XEXP (PATTERN (insn
), 0), lvl
);
548 lvl
= find_label_refs (XEXP (PATTERN (insn
), 1), lvl
);
549 lvl
= find_label_refs (XEXP (PATTERN (insn
), 2), lvl
);
550 /* Record its tail recursion label, if any. */
551 if (XEXP (PATTERN (insn
), 3) != NULL_RTX
)
552 trll
= alloc_EXPR_LIST (0, XEXP (PATTERN (insn
), 3), trll
);
560 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
564 /* Make a list of all labels referred to other than by jumps.
566 Make a special exception for labels followed by an ADDR*VEC,
567 as this would be a part of the tablejump setup code.
569 Make a special exception to registers loaded with label
570 values just before jump insns that use them. */
572 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
573 if (REG_NOTE_KIND (note
) == REG_LABEL
)
575 rtx lab
= XEXP (note
, 0), next
;
577 if ((next
= next_nonnote_insn (lab
)) != NULL
578 && GET_CODE (next
) == JUMP_INSN
579 && (GET_CODE (PATTERN (next
)) == ADDR_VEC
580 || GET_CODE (PATTERN (next
)) == ADDR_DIFF_VEC
))
582 else if (GET_CODE (lab
) == NOTE
)
584 else if (GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
585 && find_reg_note (NEXT_INSN (insn
), REG_LABEL
, lab
))
588 lvl
= alloc_EXPR_LIST (0, XEXP (note
, 0), lvl
);
593 if (head
!= NULL_RTX
)
594 create_basic_block_structure (i
++, head
, end
, bb_note
);
596 delete_insn (bb_note
);
598 if (i
!= n_basic_blocks
)
601 label_value_list
= lvl
;
602 tail_recursion_label_list
= trll
;
606 /* Find basic blocks of the current function.
607 F is the first insn of the function and NREGS the number of register
611 find_basic_blocks (f
, nregs
, file
)
613 int nregs ATTRIBUTE_UNUSED
;
614 FILE *file ATTRIBUTE_UNUSED
;
617 timevar_push (TV_CFG
);
619 basic_block_for_insn
= 0;
621 /* Flush out existing data. */
622 if (basic_block_info
!= NULL
)
628 /* Clear bb->aux on all extant basic blocks. We'll use this as a
629 tag for reuse during create_basic_block, just in case some pass
630 copies around basic block notes improperly. */
631 for (i
= 0; i
< n_basic_blocks
; ++i
)
632 BASIC_BLOCK (i
)->aux
= NULL
;
634 VARRAY_FREE (basic_block_info
);
637 n_basic_blocks
= count_basic_blocks (f
);
639 /* Size the basic block table. The actual structures will be allocated
640 by find_basic_blocks_1, since we want to keep the structure pointers
641 stable across calls to find_basic_blocks. */
642 /* ??? This whole issue would be much simpler if we called find_basic_blocks
643 exactly once, and thereafter we don't have a single long chain of
644 instructions at all until close to the end of compilation when we
645 actually lay them out. */
647 VARRAY_BB_INIT (basic_block_info
, n_basic_blocks
, "basic_block_info");
649 find_basic_blocks_1 (f
);
651 /* Record the block to which an insn belongs. */
652 /* ??? This should be done another way, by which (perhaps) a label is
653 tagged directly with the basic block that it starts. It is used for
654 more than that currently, but IMO that is the only valid use. */
656 max_uid
= get_max_uid ();
658 /* Leave space for insns life_analysis makes in some cases for auto-inc.
659 These cases are rare, so we don't need too much space. */
660 max_uid
+= max_uid
/ 10;
663 compute_bb_for_insn (max_uid
);
665 /* Discover the edges of our cfg. */
666 make_edges (label_value_list
, 0, n_basic_blocks
- 1, 0);
668 /* Do very simple cleanup now, for the benefit of code that runs between
669 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
670 tidy_fallthru_edges ();
672 #ifdef ENABLE_CHECKING
675 timevar_pop (TV_CFG
);
678 /* State of basic block as seen by find_sub_basic_blocks. */
685 #define STATE(bb) (enum state)(size_t)(bb)->aux
686 #define SET_STATE(bb, state) (bb)->aux = (void *) (size_t) (state)
688 /* Scan basic block BB for possible BB boundaries inside the block
689 and create new basic blocks in the progress. */
692 find_bb_boundaries (bb
)
697 rtx flow_transfer_insn
= NULL_RTX
;
698 edge fallthru
= NULL
;
703 if (GET_CODE (insn
) == CODE_LABEL
)
704 insn
= NEXT_INSN (insn
);
706 /* Scan insn chain and try to find new basic block boundaries. */
709 enum rtx_code code
= GET_CODE (insn
);
711 /* On code label, split current basic block. */
712 if (code
== CODE_LABEL
)
714 fallthru
= split_block (bb
, PREV_INSN (insn
));
715 if (flow_transfer_insn
)
716 bb
->end
= flow_transfer_insn
;
718 remove_edge (fallthru
);
719 flow_transfer_insn
= NULL_RTX
;
720 if (LABEL_ALTERNATE_NAME (insn
))
721 make_edge (ENTRY_BLOCK_PTR
, bb
, 0);
723 /* In case we've previously seen an insn that effects a control
724 flow transfer, split the block. */
725 if (flow_transfer_insn
&& inside_basic_block_p (insn
))
727 fallthru
= split_block (bb
, PREV_INSN (insn
));
728 bb
->end
= flow_transfer_insn
;
730 remove_edge (fallthru
);
731 flow_transfer_insn
= NULL_RTX
;
733 if (control_flow_insn_p (insn
))
734 flow_transfer_insn
= insn
;
737 insn
= NEXT_INSN (insn
);
740 /* In case expander replaced normal insn by sequence terminating by
741 return and barrier, or possibly other sequence not behaving like
742 ordinary jump, we need to take care and move basic block boundary. */
743 if (flow_transfer_insn
)
744 bb
->end
= flow_transfer_insn
;
746 /* We've possibly replaced the conditional jump by conditional jump
747 followed by cleanup at fallthru edge, so the outgoing edges may
749 purge_dead_edges (bb
);
752 /* Assume that frequency of basic block B is known. Compute frequencies
753 and probabilities of outgoing edges. */
756 compute_outgoing_frequencies (b
)
760 if (b
->succ
&& b
->succ
->succ_next
&& !b
->succ
->succ_next
->succ_next
)
762 rtx note
= find_reg_note (b
->end
, REG_BR_PROB
, NULL
);
767 probability
= INTVAL (XEXP (find_reg_note (b
->end
,
768 REG_BR_PROB
, NULL
), 0));
770 e
->probability
= probability
;
771 e
->count
= ((b
->count
* probability
+ REG_BR_PROB_BASE
/ 2)
773 f
= FALLTHRU_EDGE (b
);
774 f
->probability
= REG_BR_PROB_BASE
- probability
;
775 f
->count
= b
->count
- e
->count
;
777 if (b
->succ
&& !b
->succ
->succ_next
)
780 e
->probability
= REG_BR_PROB_BASE
;
785 /* Assume that someone emitted code with control flow instructions to the
786 basic block. Update the data structure. */
789 find_many_sub_basic_blocks (blocks
)
795 for (i
= 0; i
< n_basic_blocks
; i
++)
796 SET_STATE (BASIC_BLOCK (i
),
797 TEST_BIT (blocks
, i
) ? BLOCK_TO_SPLIT
: BLOCK_ORIGINAL
);
799 for (i
= 0; i
< n_basic_blocks
; i
++)
801 basic_block bb
= BASIC_BLOCK (i
);
802 if (STATE (bb
) == BLOCK_TO_SPLIT
)
803 find_bb_boundaries (bb
);
806 for (i
= 0; i
< n_basic_blocks
; i
++)
807 if (STATE (BASIC_BLOCK (i
)) != BLOCK_ORIGINAL
)
810 for (; i
< n_basic_blocks
; i
++)
811 if (STATE (BASIC_BLOCK (i
)) != BLOCK_ORIGINAL
)
814 /* Now re-scan and wire in all edges. This expect simple (conditional)
815 jumps at the end of each new basic blocks. */
816 make_edges (NULL
, min
, max
, 1);
818 /* Update branch probabilities. Expect only (un)conditional jumps
819 to be created with only the forward edges. */
820 for (i
= min
; i
<= max
; i
++)
823 basic_block b
= BASIC_BLOCK (i
);
825 if (STATE (b
) == BLOCK_ORIGINAL
)
827 if (STATE (b
) == BLOCK_NEW
)
831 for (e
= b
->pred
; e
; e
=e
->pred_next
)
833 b
->count
+= e
->count
;
834 b
->frequency
+= EDGE_FREQUENCY (e
);
837 compute_outgoing_frequencies (b
);
839 for (i
= 0; i
< n_basic_blocks
; i
++)
840 SET_STATE (BASIC_BLOCK (i
), 0);
843 /* Like above but for single basic block only. */
846 find_sub_basic_blocks (bb
)
851 basic_block next
= (bb
->index
== n_basic_blocks
- 1
852 ? NULL
: BASIC_BLOCK (bb
->index
+ 1));
855 find_bb_boundaries (bb
);
856 max
= (next
? next
->index
: n_basic_blocks
) - 1;
858 /* Now re-scan and wire in all edges. This expect simple (conditional)
859 jumps at the end of each new basic blocks. */
860 make_edges (NULL
, min
, max
, 1);
862 /* Update branch probabilities. Expect only (un)conditional jumps
863 to be created with only the forward edges. */
864 for (i
= min
; i
<= max
; i
++)
867 basic_block b
= BASIC_BLOCK (i
);
873 for (e
= b
->pred
; e
; e
=e
->pred_next
)
875 b
->count
+= e
->count
;
876 b
->frequency
+= EDGE_FREQUENCY (e
);
879 compute_outgoing_frequencies (b
);