2002-08-22 Paolo Carlini <pcarlini@unitus.it>
[official-gcc.git] / gcc / cfgbuild.c
blob97552f9afd9a5412c4ce8dbe3269e206c5a0aaf8
1 /* Control flow graph building code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* find_basic_blocks divides the current function's rtl into basic
23 blocks and constructs the CFG. The blocks are recorded in the
24 basic_block_info array; the CFG exists in the edge structures
25 referenced by the blocks.
27 find_basic_blocks also finds any unreachable loops and deletes them.
29 Available functionality:
30 - CFG construction
31 find_basic_blocks
32 - Local CFG construction
33 find_sub_basic_blocks */
35 #include "config.h"
36 #include "system.h"
37 #include "tree.h"
38 #include "rtl.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
41 #include "regs.h"
42 #include "flags.h"
43 #include "output.h"
44 #include "function.h"
45 #include "except.h"
46 #include "toplev.h"
47 #include "timevar.h"
49 static int count_basic_blocks PARAMS ((rtx));
50 static void find_basic_blocks_1 PARAMS ((rtx));
51 static rtx find_label_refs PARAMS ((rtx, rtx));
52 static void make_edges PARAMS ((rtx, basic_block,
53 basic_block, int));
54 static void make_label_edge PARAMS ((sbitmap *, basic_block,
55 rtx, int));
56 static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
57 static void find_bb_boundaries PARAMS ((basic_block));
58 static void compute_outgoing_frequencies PARAMS ((basic_block));
59 static bool inside_basic_block_p PARAMS ((rtx));
60 static bool control_flow_insn_p PARAMS ((rtx));
62 /* Return true if insn is something that should be contained inside basic
63 block. */
65 static bool
66 inside_basic_block_p (insn)
67 rtx insn;
69 switch (GET_CODE (insn))
71 case CODE_LABEL:
72 /* Avoid creating of basic block for jumptables. */
73 return (NEXT_INSN (insn) == 0
74 || GET_CODE (NEXT_INSN (insn)) != JUMP_INSN
75 || (GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_VEC
76 && GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_DIFF_VEC));
78 case JUMP_INSN:
79 return (GET_CODE (PATTERN (insn)) != ADDR_VEC
80 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
82 case CALL_INSN:
83 case INSN:
84 return true;
86 case BARRIER:
87 case NOTE:
88 return false;
90 default:
91 abort ();
95 /* Return true if INSN may cause control flow transfer, so it should be last in
96 the basic block. */
98 static bool
99 control_flow_insn_p (insn)
100 rtx insn;
102 rtx note;
104 switch (GET_CODE (insn))
106 case NOTE:
107 case CODE_LABEL:
108 return false;
110 case JUMP_INSN:
111 /* Jump insn always causes control transfer except for tablejumps. */
112 return (GET_CODE (PATTERN (insn)) != ADDR_VEC
113 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
115 case CALL_INSN:
116 /* Call insn may return to the nonlocal goto handler. */
117 return ((nonlocal_goto_handler_labels
118 && (0 == (note = find_reg_note (insn, REG_EH_REGION,
119 NULL_RTX))
120 || INTVAL (XEXP (note, 0)) >= 0))
121 /* Or may trap. */
122 || can_throw_internal (insn));
124 case INSN:
125 return (flag_non_call_exceptions && can_throw_internal (insn));
127 case BARRIER:
128 /* It is nonsence to reach barrier when looking for the
129 end of basic block, but before dead code is eliminated
130 this may happen. */
131 return false;
133 default:
134 abort ();
138 /* Count the basic blocks of the function. */
140 static int
141 count_basic_blocks (f)
142 rtx f;
144 int count = 0;
145 bool saw_insn = false;
146 rtx insn;
148 for (insn = f; insn; insn = NEXT_INSN (insn))
150 /* Code labels and barriers causes curent basic block to be
151 terminated at previous real insn. */
152 if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER)
153 && saw_insn)
154 count++, saw_insn = false;
156 /* Start basic block if needed. */
157 if (!saw_insn && inside_basic_block_p (insn))
158 saw_insn = true;
160 /* Control flow insn causes current basic block to be terminated. */
161 if (saw_insn && control_flow_insn_p (insn))
162 count++, saw_insn = false;
165 if (saw_insn)
166 count++;
168 /* The rest of the compiler works a bit smoother when we don't have to
169 check for the edge case of do-nothing functions with no basic blocks. */
170 if (count == 0)
172 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
173 count = 1;
176 return count;
179 /* Scan a list of insns for labels referred to other than by jumps.
180 This is used to scan the alternatives of a call placeholder. */
182 static rtx
183 find_label_refs (f, lvl)
184 rtx f;
185 rtx lvl;
187 rtx insn;
189 for (insn = f; insn; insn = NEXT_INSN (insn))
190 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
192 rtx note;
194 /* Make a list of all labels referred to other than by jumps
195 (which just don't have the REG_LABEL notes).
197 Make a special exception for labels followed by an ADDR*VEC,
198 as this would be a part of the tablejump setup code.
200 Make a special exception to registers loaded with label
201 values just before jump insns that use them. */
203 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
204 if (REG_NOTE_KIND (note) == REG_LABEL)
206 rtx lab = XEXP (note, 0), next;
208 if ((next = next_nonnote_insn (lab)) != NULL
209 && GET_CODE (next) == JUMP_INSN
210 && (GET_CODE (PATTERN (next)) == ADDR_VEC
211 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
213 else if (GET_CODE (lab) == NOTE)
215 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
216 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
218 else
219 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
223 return lvl;
226 /* Create an edge between two basic blocks. FLAGS are auxiliary information
227 about the edge that is accumulated between calls. */
229 /* Create an edge from a basic block to a label. */
231 static void
232 make_label_edge (edge_cache, src, label, flags)
233 sbitmap *edge_cache;
234 basic_block src;
235 rtx label;
236 int flags;
238 if (GET_CODE (label) != CODE_LABEL)
239 abort ();
241 /* If the label was never emitted, this insn is junk, but avoid a
242 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
243 as a result of a syntax error and a diagnostic has already been
244 printed. */
246 if (INSN_UID (label) == 0)
247 return;
249 cached_make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
252 /* Create the edges generated by INSN in REGION. */
254 static void
255 make_eh_edge (edge_cache, src, insn)
256 sbitmap *edge_cache;
257 basic_block src;
258 rtx insn;
260 int is_call = GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0;
261 rtx handlers, i;
263 handlers = reachable_handlers (insn);
265 for (i = handlers; i; i = XEXP (i, 1))
266 make_label_edge (edge_cache, src, XEXP (i, 0),
267 EDGE_ABNORMAL | EDGE_EH | is_call);
269 free_INSN_LIST_list (&handlers);
272 /* Identify the edges between basic blocks MIN to MAX.
274 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
275 that are otherwise unreachable may be reachable with a non-local goto.
277 BB_EH_END is an array indexed by basic block number in which we record
278 the list of exception regions active at the end of the basic block. */
280 static void
281 make_edges (label_value_list, min, max, update_p)
282 rtx label_value_list;
283 basic_block min, max;
284 int update_p;
286 basic_block bb;
287 sbitmap *edge_cache = NULL;
289 /* Assume no computed jump; revise as we create edges. */
290 current_function_has_computed_jump = 0;
292 /* Heavy use of computed goto in machine-generated code can lead to
293 nearly fully-connected CFGs. In that case we spend a significant
294 amount of time searching the edge lists for duplicates. */
295 if (forced_labels || label_value_list)
297 edge_cache = sbitmap_vector_alloc (last_basic_block, last_basic_block);
298 sbitmap_vector_zero (edge_cache, last_basic_block);
300 if (update_p)
301 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
303 edge e;
305 for (e = bb->succ; e ; e = e->succ_next)
306 if (e->dest != EXIT_BLOCK_PTR)
307 SET_BIT (edge_cache[bb->index], e->dest->index);
311 /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
312 is always the entry. */
313 if (min == ENTRY_BLOCK_PTR->next_bb)
314 cached_make_edge (edge_cache, ENTRY_BLOCK_PTR, min,
315 EDGE_FALLTHRU);
317 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
319 rtx insn, x;
320 enum rtx_code code;
321 int force_fallthru = 0;
323 if (GET_CODE (bb->head) == CODE_LABEL && LABEL_ALT_ENTRY_P (bb->head))
324 cached_make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
326 /* Examine the last instruction of the block, and discover the
327 ways we can leave the block. */
329 insn = bb->end;
330 code = GET_CODE (insn);
332 /* A branch. */
333 if (code == JUMP_INSN)
335 rtx tmp;
337 /* Recognize exception handling placeholders. */
338 if (GET_CODE (PATTERN (insn)) == RESX)
339 make_eh_edge (edge_cache, bb, insn);
341 /* Recognize a non-local goto as a branch outside the
342 current function. */
343 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
346 /* ??? Recognize a tablejump and do the right thing. */
347 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
348 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
349 && GET_CODE (tmp) == JUMP_INSN
350 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
351 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
353 rtvec vec;
354 int j;
356 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
357 vec = XVEC (PATTERN (tmp), 0);
358 else
359 vec = XVEC (PATTERN (tmp), 1);
361 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
362 make_label_edge (edge_cache, bb,
363 XEXP (RTVEC_ELT (vec, j), 0), 0);
365 /* Some targets (eg, ARM) emit a conditional jump that also
366 contains the out-of-range target. Scan for these and
367 add an edge if necessary. */
368 if ((tmp = single_set (insn)) != NULL
369 && SET_DEST (tmp) == pc_rtx
370 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
371 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
372 make_label_edge (edge_cache, bb,
373 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
375 #ifdef CASE_DROPS_THROUGH
376 /* Silly VAXen. The ADDR_VEC is going to be in the way of
377 us naturally detecting fallthru into the next block. */
378 force_fallthru = 1;
379 #endif
382 /* If this is a computed jump, then mark it as reaching
383 everything on the label_value_list and forced_labels list. */
384 else if (computed_jump_p (insn))
386 current_function_has_computed_jump = 1;
388 for (x = label_value_list; x; x = XEXP (x, 1))
389 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
391 for (x = forced_labels; x; x = XEXP (x, 1))
392 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
395 /* Returns create an exit out. */
396 else if (returnjump_p (insn))
397 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
399 /* Otherwise, we have a plain conditional or unconditional jump. */
400 else
402 if (! JUMP_LABEL (insn))
403 abort ();
404 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
408 /* If this is a sibling call insn, then this is in effect a combined call
409 and return, and so we need an edge to the exit block. No need to
410 worry about EH edges, since we wouldn't have created the sibling call
411 in the first place. */
412 if (code == CALL_INSN && SIBLING_CALL_P (insn))
413 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
414 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
416 /* If this is a CALL_INSN, then mark it as reaching the active EH
417 handler for this CALL_INSN. If we're handling non-call
418 exceptions then any insn can reach any of the active handlers.
419 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
420 else if (code == CALL_INSN || flag_non_call_exceptions)
422 /* Add any appropriate EH edges. */
423 make_eh_edge (edge_cache, bb, insn);
425 if (code == CALL_INSN && nonlocal_goto_handler_labels)
427 /* ??? This could be made smarter: in some cases it's possible
428 to tell that certain calls will not do a nonlocal goto.
429 For example, if the nested functions that do the nonlocal
430 gotos do not have their addresses taken, then only calls to
431 those functions or to other nested functions that use them
432 could possibly do nonlocal gotos. */
434 /* We do know that a REG_EH_REGION note with a value less
435 than 0 is guaranteed not to perform a non-local goto. */
436 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
438 if (!note || INTVAL (XEXP (note, 0)) >= 0)
439 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
440 make_label_edge (edge_cache, bb, XEXP (x, 0),
441 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
445 /* Find out if we can drop through to the next block. */
446 insn = next_nonnote_insn (insn);
447 if (!insn || (bb->next_bb == EXIT_BLOCK_PTR && force_fallthru))
448 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
449 else if (bb->next_bb != EXIT_BLOCK_PTR)
451 rtx tmp = bb->next_bb->head;
452 if (GET_CODE (tmp) == NOTE)
453 tmp = next_nonnote_insn (tmp);
454 if (force_fallthru || insn == tmp)
455 cached_make_edge (edge_cache, bb, bb->next_bb, EDGE_FALLTHRU);
459 if (edge_cache)
460 sbitmap_vector_free (edge_cache);
463 /* Find all basic blocks of the function whose first insn is F.
465 Collect and return a list of labels whose addresses are taken. This
466 will be used in make_edges for use with computed gotos. */
468 static void
469 find_basic_blocks_1 (f)
470 rtx f;
472 rtx insn, next;
473 rtx bb_note = NULL_RTX;
474 rtx lvl = NULL_RTX;
475 rtx trll = NULL_RTX;
476 rtx head = NULL_RTX;
477 rtx end = NULL_RTX;
478 basic_block prev = ENTRY_BLOCK_PTR;
480 /* We process the instructions in a slightly different way than we did
481 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
482 closed out the previous block, so that it gets attached at the proper
483 place. Since this form should be equivalent to the previous,
484 count_basic_blocks continues to use the old form as a check. */
486 for (insn = f; insn; insn = next)
488 enum rtx_code code = GET_CODE (insn);
490 next = NEXT_INSN (insn);
492 if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER)
493 && head)
495 prev = create_basic_block_structure (head, end, bb_note, prev);
496 head = end = NULL_RTX;
497 bb_note = NULL_RTX;
500 if (inside_basic_block_p (insn))
502 if (head == NULL_RTX)
503 head = insn;
504 end = insn;
507 if (head && control_flow_insn_p (insn))
509 prev = create_basic_block_structure (head, end, bb_note, prev);
510 head = end = NULL_RTX;
511 bb_note = NULL_RTX;
514 switch (code)
516 case NOTE:
518 int kind = NOTE_LINE_NUMBER (insn);
520 /* Look for basic block notes with which to keep the
521 basic_block_info pointers stable. Unthread the note now;
522 we'll put it back at the right place in create_basic_block.
523 Or not at all if we've already found a note in this block. */
524 if (kind == NOTE_INSN_BASIC_BLOCK)
526 if (bb_note == NULL_RTX)
527 bb_note = insn;
528 else
529 next = delete_insn (insn);
531 break;
534 case CODE_LABEL:
535 case JUMP_INSN:
536 case INSN:
537 case BARRIER:
538 break;
540 case CALL_INSN:
541 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
543 /* Scan each of the alternatives for label refs. */
544 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
545 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
546 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
547 /* Record its tail recursion label, if any. */
548 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
549 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
551 break;
553 default:
554 abort ();
557 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
559 rtx note;
561 /* Make a list of all labels referred to other than by jumps.
563 Make a special exception for labels followed by an ADDR*VEC,
564 as this would be a part of the tablejump setup code.
566 Make a special exception to registers loaded with label
567 values just before jump insns that use them. */
569 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
570 if (REG_NOTE_KIND (note) == REG_LABEL)
572 rtx lab = XEXP (note, 0), next;
574 if ((next = next_nonnote_insn (lab)) != NULL
575 && GET_CODE (next) == JUMP_INSN
576 && (GET_CODE (PATTERN (next)) == ADDR_VEC
577 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
579 else if (GET_CODE (lab) == NOTE)
581 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
582 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
584 else
585 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
590 if (head != NULL_RTX)
591 create_basic_block_structure (head, end, bb_note, prev);
592 else if (bb_note)
593 delete_insn (bb_note);
595 if (last_basic_block != n_basic_blocks)
596 abort ();
598 label_value_list = lvl;
599 tail_recursion_label_list = trll;
600 clear_aux_for_blocks ();
604 /* Find basic blocks of the current function.
605 F is the first insn of the function and NREGS the number of register
606 numbers in use. */
608 void
609 find_basic_blocks (f, nregs, file)
610 rtx f;
611 int nregs ATTRIBUTE_UNUSED;
612 FILE *file ATTRIBUTE_UNUSED;
614 basic_block bb;
616 timevar_push (TV_CFG);
618 /* Flush out existing data. */
619 if (basic_block_info != NULL)
621 clear_edges ();
623 /* Clear bb->aux on all extant basic blocks. We'll use this as a
624 tag for reuse during create_basic_block, just in case some pass
625 copies around basic block notes improperly. */
626 FOR_EACH_BB (bb)
627 bb->aux = NULL;
629 VARRAY_FREE (basic_block_info);
632 n_basic_blocks = count_basic_blocks (f);
633 last_basic_block = 0;
634 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
635 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
637 /* Size the basic block table. The actual structures will be allocated
638 by find_basic_blocks_1, since we want to keep the structure pointers
639 stable across calls to find_basic_blocks. */
640 /* ??? This whole issue would be much simpler if we called find_basic_blocks
641 exactly once, and thereafter we don't have a single long chain of
642 instructions at all until close to the end of compilation when we
643 actually lay them out. */
645 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
647 find_basic_blocks_1 (f);
649 /* Discover the edges of our cfg. */
650 make_edges (label_value_list, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, 0);
652 /* Do very simple cleanup now, for the benefit of code that runs between
653 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
654 tidy_fallthru_edges ();
656 #ifdef ENABLE_CHECKING
657 verify_flow_info ();
658 #endif
659 timevar_pop (TV_CFG);
662 /* State of basic block as seen by find_sub_basic_blocks. */
663 enum state {BLOCK_NEW = 0, BLOCK_ORIGINAL, BLOCK_TO_SPLIT};
665 #define STATE(BB) (enum state) ((size_t) (BB)->aux)
666 #define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE))
668 /* Scan basic block BB for possible BB boundaries inside the block
669 and create new basic blocks in the progress. */
671 static void
672 find_bb_boundaries (bb)
673 basic_block bb;
675 rtx insn = bb->head;
676 rtx end = bb->end;
677 rtx flow_transfer_insn = NULL_RTX;
678 edge fallthru = NULL;
680 if (insn == bb->end)
681 return;
683 if (GET_CODE (insn) == CODE_LABEL)
684 insn = NEXT_INSN (insn);
686 /* Scan insn chain and try to find new basic block boundaries. */
687 while (1)
689 enum rtx_code code = GET_CODE (insn);
691 /* On code label, split current basic block. */
692 if (code == CODE_LABEL)
694 fallthru = split_block (bb, PREV_INSN (insn));
695 if (flow_transfer_insn)
696 bb->end = flow_transfer_insn;
698 bb = fallthru->dest;
699 remove_edge (fallthru);
700 flow_transfer_insn = NULL_RTX;
701 if (LABEL_ALT_ENTRY_P (insn))
702 make_edge (ENTRY_BLOCK_PTR, bb, 0);
705 /* In case we've previously seen an insn that effects a control
706 flow transfer, split the block. */
707 if (flow_transfer_insn && inside_basic_block_p (insn))
709 fallthru = split_block (bb, PREV_INSN (insn));
710 bb->end = flow_transfer_insn;
711 bb = fallthru->dest;
712 remove_edge (fallthru);
713 flow_transfer_insn = NULL_RTX;
716 if (control_flow_insn_p (insn))
717 flow_transfer_insn = insn;
718 if (insn == end)
719 break;
720 insn = NEXT_INSN (insn);
723 /* In case expander replaced normal insn by sequence terminating by
724 return and barrier, or possibly other sequence not behaving like
725 ordinary jump, we need to take care and move basic block boundary. */
726 if (flow_transfer_insn)
727 bb->end = flow_transfer_insn;
729 /* We've possibly replaced the conditional jump by conditional jump
730 followed by cleanup at fallthru edge, so the outgoing edges may
731 be dead. */
732 purge_dead_edges (bb);
735 /* Assume that frequency of basic block B is known. Compute frequencies
736 and probabilities of outgoing edges. */
738 static void
739 compute_outgoing_frequencies (b)
740 basic_block b;
742 edge e, f;
744 if (b->succ && b->succ->succ_next && !b->succ->succ_next->succ_next)
746 rtx note = find_reg_note (b->end, REG_BR_PROB, NULL);
747 int probability;
749 if (!note)
750 return;
752 probability = INTVAL (XEXP (find_reg_note (b->end,
753 REG_BR_PROB, NULL),
754 0));
755 e = BRANCH_EDGE (b);
756 e->probability = probability;
757 e->count = ((b->count * probability + REG_BR_PROB_BASE / 2)
758 / REG_BR_PROB_BASE);
759 f = FALLTHRU_EDGE (b);
760 f->probability = REG_BR_PROB_BASE - probability;
761 f->count = b->count - e->count;
764 if (b->succ && !b->succ->succ_next)
766 e = b->succ;
767 e->probability = REG_BR_PROB_BASE;
768 e->count = b->count;
772 /* Assume that someone emitted code with control flow instructions to the
773 basic block. Update the data structure. */
775 void
776 find_many_sub_basic_blocks (blocks)
777 sbitmap blocks;
779 basic_block bb, min, max;
781 FOR_EACH_BB (bb)
782 SET_STATE (bb,
783 TEST_BIT (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
785 FOR_EACH_BB (bb)
786 if (STATE (bb) == BLOCK_TO_SPLIT)
787 find_bb_boundaries (bb);
789 FOR_EACH_BB (bb)
790 if (STATE (bb) != BLOCK_ORIGINAL)
791 break;
793 min = max = bb;
794 for (; bb != EXIT_BLOCK_PTR; bb = bb->next_bb)
795 if (STATE (bb) != BLOCK_ORIGINAL)
796 max = bb;
798 /* Now re-scan and wire in all edges. This expect simple (conditional)
799 jumps at the end of each new basic blocks. */
800 make_edges (NULL, min, max, 1);
802 /* Update branch probabilities. Expect only (un)conditional jumps
803 to be created with only the forward edges. */
804 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
806 edge e;
808 if (STATE (bb) == BLOCK_ORIGINAL)
809 continue;
810 if (STATE (bb) == BLOCK_NEW)
812 bb->count = 0;
813 bb->frequency = 0;
814 for (e = bb->pred; e; e=e->pred_next)
816 bb->count += e->count;
817 bb->frequency += EDGE_FREQUENCY (e);
821 compute_outgoing_frequencies (bb);
824 FOR_EACH_BB (bb)
825 SET_STATE (bb, 0);
828 /* Like above but for single basic block only. */
830 void
831 find_sub_basic_blocks (bb)
832 basic_block bb;
834 basic_block min, max, b;
835 basic_block next = bb->next_bb;
837 min = bb;
838 find_bb_boundaries (bb);
839 max = next->prev_bb;
841 /* Now re-scan and wire in all edges. This expect simple (conditional)
842 jumps at the end of each new basic blocks. */
843 make_edges (NULL, min, max, 1);
845 /* Update branch probabilities. Expect only (un)conditional jumps
846 to be created with only the forward edges. */
847 FOR_BB_BETWEEN (b, min, max->next_bb, next_bb)
849 edge e;
851 if (b != min)
853 b->count = 0;
854 b->frequency = 0;
855 for (e = b->pred; e; e=e->pred_next)
857 b->count += e->count;
858 b->frequency += EDGE_FREQUENCY (e);
862 compute_outgoing_frequencies (b);