dma: bump man page date
[dragonfly.git] / contrib / gcc-4.4 / gcc / cfgbuild.c
blob519108e51dc6f94bbae8e96f8e135718ce8b24a8
1 /* Control flow graph building code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* find_basic_blocks divides the current function's rtl into basic
23 blocks and constructs the CFG. The blocks are recorded in the
24 basic_block_info array; the CFG exists in the edge structures
25 referenced by the blocks.
27 find_basic_blocks also finds any unreachable loops and deletes them.
29 Available functionality:
30 - CFG construction
31 find_basic_blocks */
33 #include "config.h"
34 #include "system.h"
35 #include "coretypes.h"
36 #include "tm.h"
37 #include "tree.h"
38 #include "rtl.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
41 #include "regs.h"
42 #include "flags.h"
43 #include "output.h"
44 #include "function.h"
45 #include "except.h"
46 #include "toplev.h"
47 #include "timevar.h"
49 static int count_basic_blocks (const_rtx);
50 static void find_basic_blocks_1 (rtx);
51 static void make_edges (basic_block, basic_block, int);
52 static void make_label_edge (sbitmap, basic_block, rtx, int);
53 static void find_bb_boundaries (basic_block);
54 static void compute_outgoing_frequencies (basic_block);
56 /* Return true if insn is something that should be contained inside basic
57 block. */
59 bool
60 inside_basic_block_p (const_rtx insn)
62 switch (GET_CODE (insn))
64 case CODE_LABEL:
65 /* Avoid creating of basic block for jumptables. */
66 return (NEXT_INSN (insn) == 0
67 || !JUMP_P (NEXT_INSN (insn))
68 || (GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_VEC
69 && GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_DIFF_VEC));
71 case JUMP_INSN:
72 return (GET_CODE (PATTERN (insn)) != ADDR_VEC
73 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
75 case CALL_INSN:
76 case INSN:
77 return true;
79 case BARRIER:
80 case NOTE:
81 return false;
83 default:
84 gcc_unreachable ();
88 /* Return true if INSN may cause control flow transfer, so it should be last in
89 the basic block. */
91 bool
92 control_flow_insn_p (const_rtx insn)
94 rtx note;
96 switch (GET_CODE (insn))
98 case NOTE:
99 case CODE_LABEL:
100 return false;
102 case JUMP_INSN:
103 /* Jump insn always causes control transfer except for tablejumps. */
104 return (GET_CODE (PATTERN (insn)) != ADDR_VEC
105 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
107 case CALL_INSN:
108 /* Noreturn and sibling call instructions terminate the basic blocks
109 (but only if they happen unconditionally). */
110 if ((SIBLING_CALL_P (insn)
111 || find_reg_note (insn, REG_NORETURN, 0))
112 && GET_CODE (PATTERN (insn)) != COND_EXEC)
113 return true;
114 /* Call insn may return to the nonlocal goto handler. */
115 return ((nonlocal_goto_handler_labels
116 && (0 == (note = find_reg_note (insn, REG_EH_REGION,
117 NULL_RTX))
118 || INTVAL (XEXP (note, 0)) >= 0))
119 /* Or may trap. */
120 || can_throw_internal (insn));
122 case INSN:
123 /* Treat trap instructions like noreturn calls (same provision). */
124 if (GET_CODE (PATTERN (insn)) == TRAP_IF
125 && XEXP (PATTERN (insn), 0) == const1_rtx)
126 return true;
128 return (flag_non_call_exceptions && can_throw_internal (insn));
130 case BARRIER:
131 /* It is nonsense to reach barrier when looking for the
132 end of basic block, but before dead code is eliminated
133 this may happen. */
134 return false;
136 default:
137 gcc_unreachable ();
141 /* Count the basic blocks of the function. */
143 static int
144 count_basic_blocks (const_rtx f)
146 int count = NUM_FIXED_BLOCKS;
147 bool saw_insn = false;
148 const_rtx insn;
150 for (insn = f; insn; insn = NEXT_INSN (insn))
152 /* Code labels and barriers causes current basic block to be
153 terminated at previous real insn. */
154 if ((LABEL_P (insn) || BARRIER_P (insn))
155 && saw_insn)
156 count++, saw_insn = false;
158 /* Start basic block if needed. */
159 if (!saw_insn && inside_basic_block_p (insn))
160 saw_insn = true;
162 /* Control flow insn causes current basic block to be terminated. */
163 if (saw_insn && control_flow_insn_p (insn))
164 count++, saw_insn = false;
167 if (saw_insn)
168 count++;
170 /* The rest of the compiler works a bit smoother when we don't have to
171 check for the edge case of do-nothing functions with no basic blocks. */
172 if (count == NUM_FIXED_BLOCKS)
174 emit_use (const0_rtx);
175 count = NUM_FIXED_BLOCKS + 1;
178 return count;
181 /* Create an edge between two basic blocks. FLAGS are auxiliary information
182 about the edge that is accumulated between calls. */
184 /* Create an edge from a basic block to a label. */
186 static void
187 make_label_edge (sbitmap edge_cache, basic_block src, rtx label, int flags)
189 gcc_assert (LABEL_P (label));
191 /* If the label was never emitted, this insn is junk, but avoid a
192 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
193 as a result of a syntax error and a diagnostic has already been
194 printed. */
196 if (INSN_UID (label) == 0)
197 return;
199 cached_make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
202 /* Create the edges generated by INSN in REGION. */
204 void
205 rtl_make_eh_edge (sbitmap edge_cache, basic_block src, rtx insn)
207 int is_call = CALL_P (insn) ? EDGE_ABNORMAL_CALL : 0;
208 rtx handlers, i;
210 handlers = reachable_handlers (insn);
212 for (i = handlers; i; i = XEXP (i, 1))
213 make_label_edge (edge_cache, src, XEXP (i, 0),
214 EDGE_ABNORMAL | EDGE_EH | is_call);
216 free_INSN_LIST_list (&handlers);
219 /* States of basic block as seen by find_many_sub_basic_blocks. */
220 enum state {
221 /* Basic blocks created via split_block belong to this state.
222 make_edges will examine these basic blocks to see if we need to
223 create edges going out of them. */
224 BLOCK_NEW = 0,
226 /* Basic blocks that do not need examining belong to this state.
227 These blocks will be left intact. In particular, make_edges will
228 not create edges going out of these basic blocks. */
229 BLOCK_ORIGINAL,
231 /* Basic blocks that may need splitting (due to a label appearing in
232 the middle, etc) belong to this state. After splitting them,
233 make_edges will create edges going out of them as needed. */
234 BLOCK_TO_SPLIT
237 #define STATE(BB) (enum state) ((size_t) (BB)->aux)
238 #define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE))
240 /* Used internally by purge_dead_tablejump_edges, ORed into state. */
241 #define BLOCK_USED_BY_TABLEJUMP 32
242 #define FULL_STATE(BB) ((size_t) (BB)->aux)
244 /* Identify the edges going out of basic blocks between MIN and MAX,
245 inclusive, that have their states set to BLOCK_NEW or
246 BLOCK_TO_SPLIT.
248 UPDATE_P should be nonzero if we are updating CFG and zero if we
249 are building CFG from scratch. */
251 static void
252 make_edges (basic_block min, basic_block max, int update_p)
254 basic_block bb;
255 sbitmap edge_cache = NULL;
257 /* Heavy use of computed goto in machine-generated code can lead to
258 nearly fully-connected CFGs. In that case we spend a significant
259 amount of time searching the edge lists for duplicates. */
260 if (forced_labels || cfun->cfg->max_jumptable_ents > 100)
261 edge_cache = sbitmap_alloc (last_basic_block);
263 /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
264 is always the entry. */
265 if (min == ENTRY_BLOCK_PTR->next_bb)
266 make_edge (ENTRY_BLOCK_PTR, min, EDGE_FALLTHRU);
268 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
270 rtx insn, x;
271 enum rtx_code code;
272 edge e;
273 edge_iterator ei;
275 if (STATE (bb) == BLOCK_ORIGINAL)
276 continue;
278 /* If we have an edge cache, cache edges going out of BB. */
279 if (edge_cache)
281 sbitmap_zero (edge_cache);
282 if (update_p)
284 FOR_EACH_EDGE (e, ei, bb->succs)
285 if (e->dest != EXIT_BLOCK_PTR)
286 SET_BIT (edge_cache, e->dest->index);
290 if (LABEL_P (BB_HEAD (bb))
291 && LABEL_ALT_ENTRY_P (BB_HEAD (bb)))
292 cached_make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
294 /* Examine the last instruction of the block, and discover the
295 ways we can leave the block. */
297 insn = BB_END (bb);
298 code = GET_CODE (insn);
300 /* A branch. */
301 if (code == JUMP_INSN)
303 rtx tmp;
305 /* Recognize exception handling placeholders. */
306 if (GET_CODE (PATTERN (insn)) == RESX)
307 rtl_make_eh_edge (edge_cache, bb, insn);
309 /* Recognize a non-local goto as a branch outside the
310 current function. */
311 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
314 /* Recognize a tablejump and do the right thing. */
315 else if (tablejump_p (insn, NULL, &tmp))
317 rtvec vec;
318 int j;
320 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
321 vec = XVEC (PATTERN (tmp), 0);
322 else
323 vec = XVEC (PATTERN (tmp), 1);
325 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
326 make_label_edge (edge_cache, bb,
327 XEXP (RTVEC_ELT (vec, j), 0), 0);
329 /* Some targets (eg, ARM) emit a conditional jump that also
330 contains the out-of-range target. Scan for these and
331 add an edge if necessary. */
332 if ((tmp = single_set (insn)) != NULL
333 && SET_DEST (tmp) == pc_rtx
334 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
335 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
336 make_label_edge (edge_cache, bb,
337 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
340 /* If this is a computed jump, then mark it as reaching
341 everything on the forced_labels list. */
342 else if (computed_jump_p (insn))
344 for (x = forced_labels; x; x = XEXP (x, 1))
345 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
348 /* Returns create an exit out. */
349 else if (returnjump_p (insn))
350 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
352 /* Otherwise, we have a plain conditional or unconditional jump. */
353 else
355 gcc_assert (JUMP_LABEL (insn));
356 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
360 /* If this is a sibling call insn, then this is in effect a combined call
361 and return, and so we need an edge to the exit block. No need to
362 worry about EH edges, since we wouldn't have created the sibling call
363 in the first place. */
364 if (code == CALL_INSN && SIBLING_CALL_P (insn))
365 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
366 EDGE_SIBCALL | EDGE_ABNORMAL);
368 /* If this is a CALL_INSN, then mark it as reaching the active EH
369 handler for this CALL_INSN. If we're handling non-call
370 exceptions then any insn can reach any of the active handlers.
371 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
372 else if (code == CALL_INSN || flag_non_call_exceptions)
374 /* Add any appropriate EH edges. */
375 rtl_make_eh_edge (edge_cache, bb, insn);
377 if (code == CALL_INSN && nonlocal_goto_handler_labels)
379 /* ??? This could be made smarter: in some cases it's possible
380 to tell that certain calls will not do a nonlocal goto.
381 For example, if the nested functions that do the nonlocal
382 gotos do not have their addresses taken, then only calls to
383 those functions or to other nested functions that use them
384 could possibly do nonlocal gotos. */
386 /* We do know that a REG_EH_REGION note with a value less
387 than 0 is guaranteed not to perform a non-local goto. */
388 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
390 if (!note || INTVAL (XEXP (note, 0)) >= 0)
391 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
392 make_label_edge (edge_cache, bb, XEXP (x, 0),
393 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
397 /* Find out if we can drop through to the next block. */
398 insn = NEXT_INSN (insn);
399 e = find_edge (bb, EXIT_BLOCK_PTR);
400 if (e && e->flags & EDGE_FALLTHRU)
401 insn = NULL;
403 while (insn
404 && NOTE_P (insn)
405 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK)
406 insn = NEXT_INSN (insn);
408 if (!insn)
409 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
410 else if (bb->next_bb != EXIT_BLOCK_PTR)
412 if (insn == BB_HEAD (bb->next_bb))
413 cached_make_edge (edge_cache, bb, bb->next_bb, EDGE_FALLTHRU);
417 if (edge_cache)
418 sbitmap_vector_free (edge_cache);
421 /* Find all basic blocks of the function whose first insn is F.
423 Collect and return a list of labels whose addresses are taken. This
424 will be used in make_edges for use with computed gotos. */
426 static void
427 find_basic_blocks_1 (rtx f)
429 rtx insn, next;
430 rtx bb_note = NULL_RTX;
431 rtx head = NULL_RTX;
432 rtx end = NULL_RTX;
433 basic_block prev = ENTRY_BLOCK_PTR;
435 /* We process the instructions in a slightly different way than we did
436 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
437 closed out the previous block, so that it gets attached at the proper
438 place. Since this form should be equivalent to the previous,
439 count_basic_blocks continues to use the old form as a check. */
441 for (insn = f; insn; insn = next)
443 enum rtx_code code = GET_CODE (insn);
445 next = NEXT_INSN (insn);
447 if ((LABEL_P (insn) || BARRIER_P (insn))
448 && head)
450 prev = create_basic_block_structure (head, end, bb_note, prev);
451 head = end = NULL_RTX;
452 bb_note = NULL_RTX;
455 if (inside_basic_block_p (insn))
457 if (head == NULL_RTX)
458 head = insn;
459 end = insn;
462 if (head && control_flow_insn_p (insn))
464 prev = create_basic_block_structure (head, end, bb_note, prev);
465 head = end = NULL_RTX;
466 bb_note = NULL_RTX;
469 switch (code)
471 case NOTE:
472 /* Look for basic block notes with which to keep the
473 basic_block_info pointers stable. Unthread the note now;
474 we'll put it back at the right place in create_basic_block.
475 Or not at all if we've already found a note in this block. */
476 if (NOTE_INSN_BASIC_BLOCK_P (insn))
478 if (bb_note == NULL_RTX)
479 bb_note = insn;
480 else
481 next = delete_insn (insn);
483 break;
485 case CODE_LABEL:
486 case JUMP_INSN:
487 case CALL_INSN:
488 case INSN:
489 case BARRIER:
490 break;
492 default:
493 gcc_unreachable ();
497 if (head != NULL_RTX)
498 create_basic_block_structure (head, end, bb_note, prev);
499 else if (bb_note)
500 delete_insn (bb_note);
502 gcc_assert (last_basic_block == n_basic_blocks);
504 clear_aux_for_blocks ();
508 /* Find basic blocks of the current function.
509 F is the first insn of the function. */
511 void
512 find_basic_blocks (rtx f)
514 basic_block bb;
516 timevar_push (TV_CFG);
518 /* Flush out existing data. */
519 if (basic_block_info != NULL)
521 clear_edges ();
523 /* Clear bb->aux on all extant basic blocks. We'll use this as a
524 tag for reuse during create_basic_block, just in case some pass
525 copies around basic block notes improperly. */
526 FOR_EACH_BB (bb)
527 bb->aux = NULL;
529 basic_block_info = NULL;
532 n_basic_blocks = count_basic_blocks (f);
533 last_basic_block = NUM_FIXED_BLOCKS;
534 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
535 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
538 /* Size the basic block table. The actual structures will be allocated
539 by find_basic_blocks_1, since we want to keep the structure pointers
540 stable across calls to find_basic_blocks. */
541 /* ??? This whole issue would be much simpler if we called find_basic_blocks
542 exactly once, and thereafter we don't have a single long chain of
543 instructions at all until close to the end of compilation when we
544 actually lay them out. */
546 basic_block_info = VEC_alloc (basic_block, gc, n_basic_blocks);
547 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
548 SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
549 SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
551 find_basic_blocks_1 (f);
553 profile_status = PROFILE_ABSENT;
555 /* Tell make_edges to examine every block for out-going edges. */
556 FOR_EACH_BB (bb)
557 SET_STATE (bb, BLOCK_NEW);
559 /* Discover the edges of our cfg. */
560 make_edges (ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, 0);
562 /* Do very simple cleanup now, for the benefit of code that runs between
563 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
564 tidy_fallthru_edges ();
566 #ifdef ENABLE_CHECKING
567 verify_flow_info ();
568 #endif
569 timevar_pop (TV_CFG);
572 static void
573 mark_tablejump_edge (rtx label)
575 basic_block bb;
577 gcc_assert (LABEL_P (label));
578 /* See comment in make_label_edge. */
579 if (INSN_UID (label) == 0)
580 return;
581 bb = BLOCK_FOR_INSN (label);
582 SET_STATE (bb, FULL_STATE (bb) | BLOCK_USED_BY_TABLEJUMP);
585 static void
586 purge_dead_tablejump_edges (basic_block bb, rtx table)
588 rtx insn = BB_END (bb), tmp;
589 rtvec vec;
590 int j;
591 edge_iterator ei;
592 edge e;
594 if (GET_CODE (PATTERN (table)) == ADDR_VEC)
595 vec = XVEC (PATTERN (table), 0);
596 else
597 vec = XVEC (PATTERN (table), 1);
599 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
600 mark_tablejump_edge (XEXP (RTVEC_ELT (vec, j), 0));
602 /* Some targets (eg, ARM) emit a conditional jump that also
603 contains the out-of-range target. Scan for these and
604 add an edge if necessary. */
605 if ((tmp = single_set (insn)) != NULL
606 && SET_DEST (tmp) == pc_rtx
607 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
608 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
609 mark_tablejump_edge (XEXP (XEXP (SET_SRC (tmp), 2), 0));
611 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
613 if (FULL_STATE (e->dest) & BLOCK_USED_BY_TABLEJUMP)
614 SET_STATE (e->dest, FULL_STATE (e->dest)
615 & ~(size_t) BLOCK_USED_BY_TABLEJUMP);
616 else if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
618 remove_edge (e);
619 continue;
621 ei_next (&ei);
625 /* Scan basic block BB for possible BB boundaries inside the block
626 and create new basic blocks in the progress. */
628 static void
629 find_bb_boundaries (basic_block bb)
631 basic_block orig_bb = bb;
632 rtx insn = BB_HEAD (bb);
633 rtx end = BB_END (bb), x;
634 rtx table;
635 rtx flow_transfer_insn = NULL_RTX;
636 edge fallthru = NULL;
638 if (insn == BB_END (bb))
639 return;
641 if (LABEL_P (insn))
642 insn = NEXT_INSN (insn);
644 /* Scan insn chain and try to find new basic block boundaries. */
645 while (1)
647 enum rtx_code code = GET_CODE (insn);
649 /* On code label, split current basic block. */
650 if (code == CODE_LABEL)
652 fallthru = split_block (bb, PREV_INSN (insn));
653 if (flow_transfer_insn)
655 BB_END (bb) = flow_transfer_insn;
657 /* Clean up the bb field for the insns between the blocks. */
658 for (x = NEXT_INSN (flow_transfer_insn);
659 x != BB_HEAD (fallthru->dest);
660 x = NEXT_INSN (x))
661 if (!BARRIER_P (x))
662 set_block_for_insn (x, NULL);
665 bb = fallthru->dest;
666 remove_edge (fallthru);
667 flow_transfer_insn = NULL_RTX;
668 if (LABEL_ALT_ENTRY_P (insn))
669 make_edge (ENTRY_BLOCK_PTR, bb, 0);
672 /* In case we've previously seen an insn that effects a control
673 flow transfer, split the block. */
674 if (flow_transfer_insn && inside_basic_block_p (insn))
676 fallthru = split_block (bb, PREV_INSN (insn));
677 BB_END (bb) = flow_transfer_insn;
679 /* Clean up the bb field for the insns between the blocks. */
680 for (x = NEXT_INSN (flow_transfer_insn);
681 x != BB_HEAD (fallthru->dest);
682 x = NEXT_INSN (x))
683 if (!BARRIER_P (x))
684 set_block_for_insn (x, NULL);
686 bb = fallthru->dest;
687 remove_edge (fallthru);
688 flow_transfer_insn = NULL_RTX;
691 if (control_flow_insn_p (insn))
692 flow_transfer_insn = insn;
693 if (insn == end)
694 break;
695 insn = NEXT_INSN (insn);
698 /* In case expander replaced normal insn by sequence terminating by
699 return and barrier, or possibly other sequence not behaving like
700 ordinary jump, we need to take care and move basic block boundary. */
701 if (flow_transfer_insn)
703 BB_END (bb) = flow_transfer_insn;
705 /* Clean up the bb field for the insns that do not belong to BB. */
706 x = flow_transfer_insn;
707 while (x != end)
709 x = NEXT_INSN (x);
710 if (!BARRIER_P (x))
711 set_block_for_insn (x, NULL);
715 /* We've possibly replaced the conditional jump by conditional jump
716 followed by cleanup at fallthru edge, so the outgoing edges may
717 be dead. */
718 purge_dead_edges (bb);
720 /* purge_dead_edges doesn't handle tablejump's, but if we have split the
721 basic block, we might need to kill some edges. */
722 if (bb != orig_bb && tablejump_p (BB_END (bb), NULL, &table))
723 purge_dead_tablejump_edges (bb, table);
726 /* Assume that frequency of basic block B is known. Compute frequencies
727 and probabilities of outgoing edges. */
729 static void
730 compute_outgoing_frequencies (basic_block b)
732 edge e, f;
733 edge_iterator ei;
735 if (EDGE_COUNT (b->succs) == 2)
737 rtx note = find_reg_note (BB_END (b), REG_BR_PROB, NULL);
738 int probability;
740 if (note)
742 probability = INTVAL (XEXP (note, 0));
743 e = BRANCH_EDGE (b);
744 e->probability = probability;
745 e->count = ((b->count * probability + REG_BR_PROB_BASE / 2)
746 / REG_BR_PROB_BASE);
747 f = FALLTHRU_EDGE (b);
748 f->probability = REG_BR_PROB_BASE - probability;
749 f->count = b->count - e->count;
750 return;
754 if (single_succ_p (b))
756 e = single_succ_edge (b);
757 e->probability = REG_BR_PROB_BASE;
758 e->count = b->count;
759 return;
761 guess_outgoing_edge_probabilities (b);
762 if (b->count)
763 FOR_EACH_EDGE (e, ei, b->succs)
764 e->count = ((b->count * e->probability + REG_BR_PROB_BASE / 2)
765 / REG_BR_PROB_BASE);
768 /* Assume that some pass has inserted labels or control flow
769 instructions within a basic block. Split basic blocks as needed
770 and create edges. */
772 void
773 find_many_sub_basic_blocks (sbitmap blocks)
775 basic_block bb, min, max;
777 FOR_EACH_BB (bb)
778 SET_STATE (bb,
779 TEST_BIT (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
781 FOR_EACH_BB (bb)
782 if (STATE (bb) == BLOCK_TO_SPLIT)
783 find_bb_boundaries (bb);
785 FOR_EACH_BB (bb)
786 if (STATE (bb) != BLOCK_ORIGINAL)
787 break;
789 min = max = bb;
790 for (; bb != EXIT_BLOCK_PTR; bb = bb->next_bb)
791 if (STATE (bb) != BLOCK_ORIGINAL)
792 max = bb;
794 /* Now re-scan and wire in all edges. This expect simple (conditional)
795 jumps at the end of each new basic blocks. */
796 make_edges (min, max, 1);
798 /* Update branch probabilities. Expect only (un)conditional jumps
799 to be created with only the forward edges. */
800 if (profile_status != PROFILE_ABSENT)
801 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
803 edge e;
804 edge_iterator ei;
806 if (STATE (bb) == BLOCK_ORIGINAL)
807 continue;
808 if (STATE (bb) == BLOCK_NEW)
810 bb->count = 0;
811 bb->frequency = 0;
812 FOR_EACH_EDGE (e, ei, bb->preds)
814 bb->count += e->count;
815 bb->frequency += EDGE_FREQUENCY (e);
819 compute_outgoing_frequencies (bb);
822 FOR_EACH_BB (bb)
823 SET_STATE (bb, 0);