Fix incomplete stack traces by gdb.
[dragonfly.git] / contrib / gcc-3.4 / gcc / cfgbuild.c
blob5805e587ec82204731264b79bcbaa39d8a748223
1 /* Control flow graph building code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* find_basic_blocks divides the current function's rtl into basic
23 blocks and constructs the CFG. The blocks are recorded in the
24 basic_block_info array; the CFG exists in the edge structures
25 referenced by the blocks.
27 find_basic_blocks also finds any unreachable loops and deletes them.
29 Available functionality:
30 - CFG construction
31 find_basic_blocks
32 - Local CFG construction
33 find_sub_basic_blocks */
35 #include "config.h"
36 #include "system.h"
37 #include "coretypes.h"
38 #include "tm.h"
39 #include "tree.h"
40 #include "rtl.h"
41 #include "hard-reg-set.h"
42 #include "basic-block.h"
43 #include "regs.h"
44 #include "flags.h"
45 #include "output.h"
46 #include "function.h"
47 #include "except.h"
48 #include "toplev.h"
49 #include "timevar.h"
51 static int count_basic_blocks (rtx);
52 static void find_basic_blocks_1 (rtx);
53 static rtx find_label_refs (rtx, rtx);
54 static void make_edges (rtx, basic_block, basic_block, int);
55 static void make_label_edge (sbitmap *, basic_block, rtx, int);
56 static void make_eh_edge (sbitmap *, basic_block, rtx);
57 static void find_bb_boundaries (basic_block);
58 static void compute_outgoing_frequencies (basic_block);
60 /* Return true if insn is something that should be contained inside basic
61 block. */
63 bool
64 inside_basic_block_p (rtx insn)
66 switch (GET_CODE (insn))
68 case CODE_LABEL:
69 /* Avoid creating of basic block for jumptables. */
70 return (NEXT_INSN (insn) == 0
71 || GET_CODE (NEXT_INSN (insn)) != JUMP_INSN
72 || (GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_VEC
73 && GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_DIFF_VEC));
75 case JUMP_INSN:
76 return (GET_CODE (PATTERN (insn)) != ADDR_VEC
77 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
79 case CALL_INSN:
80 case INSN:
81 return true;
83 case BARRIER:
84 case NOTE:
85 return false;
87 default:
88 abort ();
92 /* Return true if INSN may cause control flow transfer, so it should be last in
93 the basic block. */
95 bool
96 control_flow_insn_p (rtx insn)
98 rtx note;
100 switch (GET_CODE (insn))
102 case NOTE:
103 case CODE_LABEL:
104 return false;
106 case JUMP_INSN:
107 /* Jump insn always causes control transfer except for tablejumps. */
108 return (GET_CODE (PATTERN (insn)) != ADDR_VEC
109 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
111 case CALL_INSN:
112 /* Call insn may return to the nonlocal goto handler. */
113 return ((nonlocal_goto_handler_labels
114 && (0 == (note = find_reg_note (insn, REG_EH_REGION,
115 NULL_RTX))
116 || INTVAL (XEXP (note, 0)) >= 0))
117 /* Or may trap. */
118 || can_throw_internal (insn));
120 case INSN:
121 return (flag_non_call_exceptions && can_throw_internal (insn));
123 case BARRIER:
124 /* It is nonsense to reach barrier when looking for the
125 end of basic block, but before dead code is eliminated
126 this may happen. */
127 return false;
129 default:
130 abort ();
134 /* Count the basic blocks of the function. */
136 static int
137 count_basic_blocks (rtx f)
139 int count = 0;
140 bool saw_insn = false;
141 rtx insn;
143 for (insn = f; insn; insn = NEXT_INSN (insn))
145 /* Code labels and barriers causes current basic block to be
146 terminated at previous real insn. */
147 if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER)
148 && saw_insn)
149 count++, saw_insn = false;
151 /* Start basic block if needed. */
152 if (!saw_insn && inside_basic_block_p (insn))
153 saw_insn = true;
155 /* Control flow insn causes current basic block to be terminated. */
156 if (saw_insn && control_flow_insn_p (insn))
157 count++, saw_insn = false;
160 if (saw_insn)
161 count++;
163 /* The rest of the compiler works a bit smoother when we don't have to
164 check for the edge case of do-nothing functions with no basic blocks. */
165 if (count == 0)
167 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
168 count = 1;
171 return count;
174 /* Scan a list of insns for labels referred to other than by jumps.
175 This is used to scan the alternatives of a call placeholder. */
177 static rtx
178 find_label_refs (rtx f, rtx lvl)
180 rtx insn;
182 for (insn = f; insn; insn = NEXT_INSN (insn))
183 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
185 rtx note;
187 /* Make a list of all labels referred to other than by jumps
188 (which just don't have the REG_LABEL notes).
190 Make a special exception for labels followed by an ADDR*VEC,
191 as this would be a part of the tablejump setup code.
193 Make a special exception to registers loaded with label
194 values just before jump insns that use them. */
196 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
197 if (REG_NOTE_KIND (note) == REG_LABEL)
199 rtx lab = XEXP (note, 0), next;
201 if ((next = next_nonnote_insn (lab)) != NULL
202 && GET_CODE (next) == JUMP_INSN
203 && (GET_CODE (PATTERN (next)) == ADDR_VEC
204 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
206 else if (GET_CODE (lab) == NOTE)
208 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
209 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
211 else
212 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
216 return lvl;
219 /* Create an edge between two basic blocks. FLAGS are auxiliary information
220 about the edge that is accumulated between calls. */
222 /* Create an edge from a basic block to a label. */
224 static void
225 make_label_edge (sbitmap *edge_cache, basic_block src, rtx label, int flags)
227 if (GET_CODE (label) != CODE_LABEL)
228 abort ();
230 /* If the label was never emitted, this insn is junk, but avoid a
231 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
232 as a result of a syntax error and a diagnostic has already been
233 printed. */
235 if (INSN_UID (label) == 0)
236 return;
238 cached_make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
241 /* Create the edges generated by INSN in REGION. */
243 static void
244 make_eh_edge (sbitmap *edge_cache, basic_block src, rtx insn)
246 int is_call = GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0;
247 rtx handlers, i;
249 handlers = reachable_handlers (insn);
251 for (i = handlers; i; i = XEXP (i, 1))
252 make_label_edge (edge_cache, src, XEXP (i, 0),
253 EDGE_ABNORMAL | EDGE_EH | is_call);
255 free_INSN_LIST_list (&handlers);
258 /* Identify the edges between basic blocks MIN to MAX.
260 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
261 that are otherwise unreachable may be reachable with a non-local goto.
263 BB_EH_END is an array indexed by basic block number in which we record
264 the list of exception regions active at the end of the basic block. */
266 static void
267 make_edges (rtx label_value_list, basic_block min, basic_block max, int update_p)
269 basic_block bb;
270 sbitmap *edge_cache = NULL;
272 /* Assume no computed jump; revise as we create edges. */
273 current_function_has_computed_jump = 0;
275 /* Heavy use of computed goto in machine-generated code can lead to
276 nearly fully-connected CFGs. In that case we spend a significant
277 amount of time searching the edge lists for duplicates. */
278 if (forced_labels || label_value_list || cfun->max_jumptable_ents > 100)
280 edge_cache = sbitmap_vector_alloc (last_basic_block, last_basic_block);
281 sbitmap_vector_zero (edge_cache, last_basic_block);
283 if (update_p)
284 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
286 edge e;
288 for (e = bb->succ; e ; e = e->succ_next)
289 if (e->dest != EXIT_BLOCK_PTR)
290 SET_BIT (edge_cache[bb->index], e->dest->index);
294 /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
295 is always the entry. */
296 if (min == ENTRY_BLOCK_PTR->next_bb)
297 cached_make_edge (edge_cache, ENTRY_BLOCK_PTR, min,
298 EDGE_FALLTHRU);
300 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
302 rtx insn, x;
303 enum rtx_code code;
304 int force_fallthru = 0;
306 if (GET_CODE (BB_HEAD (bb)) == CODE_LABEL
307 && LABEL_ALT_ENTRY_P (BB_HEAD (bb)))
308 cached_make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
310 /* Examine the last instruction of the block, and discover the
311 ways we can leave the block. */
313 insn = BB_END (bb);
314 code = GET_CODE (insn);
316 /* A branch. */
317 if (code == JUMP_INSN)
319 rtx tmp;
321 /* Recognize exception handling placeholders. */
322 if (GET_CODE (PATTERN (insn)) == RESX)
323 make_eh_edge (edge_cache, bb, insn);
325 /* Recognize a non-local goto as a branch outside the
326 current function. */
327 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
330 /* Recognize a tablejump and do the right thing. */
331 else if (tablejump_p (insn, NULL, &tmp))
333 rtvec vec;
334 int j;
336 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
337 vec = XVEC (PATTERN (tmp), 0);
338 else
339 vec = XVEC (PATTERN (tmp), 1);
341 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
342 make_label_edge (edge_cache, bb,
343 XEXP (RTVEC_ELT (vec, j), 0), 0);
345 /* Some targets (eg, ARM) emit a conditional jump that also
346 contains the out-of-range target. Scan for these and
347 add an edge if necessary. */
348 if ((tmp = single_set (insn)) != NULL
349 && SET_DEST (tmp) == pc_rtx
350 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
351 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
352 make_label_edge (edge_cache, bb,
353 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
355 #ifdef CASE_DROPS_THROUGH
356 /* Silly VAXen. The ADDR_VEC is going to be in the way of
357 us naturally detecting fallthru into the next block. */
358 force_fallthru = 1;
359 #endif
362 /* If this is a computed jump, then mark it as reaching
363 everything on the label_value_list and forced_labels list. */
364 else if (computed_jump_p (insn))
366 current_function_has_computed_jump = 1;
368 for (x = label_value_list; x; x = XEXP (x, 1))
369 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
371 for (x = forced_labels; x; x = XEXP (x, 1))
372 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
375 /* Returns create an exit out. */
376 else if (returnjump_p (insn))
377 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
379 /* Otherwise, we have a plain conditional or unconditional jump. */
380 else
382 if (! JUMP_LABEL (insn))
383 abort ();
384 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
388 /* If this is a sibling call insn, then this is in effect a combined call
389 and return, and so we need an edge to the exit block. No need to
390 worry about EH edges, since we wouldn't have created the sibling call
391 in the first place. */
392 if (code == CALL_INSN && SIBLING_CALL_P (insn))
393 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
394 EDGE_SIBCALL | EDGE_ABNORMAL);
396 /* If this is a CALL_INSN, then mark it as reaching the active EH
397 handler for this CALL_INSN. If we're handling non-call
398 exceptions then any insn can reach any of the active handlers.
399 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
400 else if (code == CALL_INSN || flag_non_call_exceptions)
402 /* Add any appropriate EH edges. */
403 make_eh_edge (edge_cache, bb, insn);
405 if (code == CALL_INSN && nonlocal_goto_handler_labels)
407 /* ??? This could be made smarter: in some cases it's possible
408 to tell that certain calls will not do a nonlocal goto.
409 For example, if the nested functions that do the nonlocal
410 gotos do not have their addresses taken, then only calls to
411 those functions or to other nested functions that use them
412 could possibly do nonlocal gotos. */
414 /* We do know that a REG_EH_REGION note with a value less
415 than 0 is guaranteed not to perform a non-local goto. */
416 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
418 if (!note || INTVAL (XEXP (note, 0)) >= 0)
419 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
420 make_label_edge (edge_cache, bb, XEXP (x, 0),
421 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
425 /* Find out if we can drop through to the next block. */
426 insn = NEXT_INSN (insn);
427 while (insn
428 && GET_CODE (insn) == NOTE
429 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
430 insn = NEXT_INSN (insn);
432 if (!insn || (bb->next_bb == EXIT_BLOCK_PTR && force_fallthru))
433 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
434 else if (bb->next_bb != EXIT_BLOCK_PTR)
436 if (force_fallthru || insn == BB_HEAD (bb->next_bb))
437 cached_make_edge (edge_cache, bb, bb->next_bb, EDGE_FALLTHRU);
441 if (edge_cache)
442 sbitmap_vector_free (edge_cache);
445 /* Find all basic blocks of the function whose first insn is F.
447 Collect and return a list of labels whose addresses are taken. This
448 will be used in make_edges for use with computed gotos. */
450 static void
451 find_basic_blocks_1 (rtx f)
453 rtx insn, next;
454 rtx bb_note = NULL_RTX;
455 rtx lvl = NULL_RTX;
456 rtx trll = NULL_RTX;
457 rtx head = NULL_RTX;
458 rtx end = NULL_RTX;
459 basic_block prev = ENTRY_BLOCK_PTR;
461 /* We process the instructions in a slightly different way than we did
462 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
463 closed out the previous block, so that it gets attached at the proper
464 place. Since this form should be equivalent to the previous,
465 count_basic_blocks continues to use the old form as a check. */
467 for (insn = f; insn; insn = next)
469 enum rtx_code code = GET_CODE (insn);
471 next = NEXT_INSN (insn);
473 if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER)
474 && head)
476 prev = create_basic_block_structure (head, end, bb_note, prev);
477 head = end = NULL_RTX;
478 bb_note = NULL_RTX;
481 if (inside_basic_block_p (insn))
483 if (head == NULL_RTX)
484 head = insn;
485 end = insn;
488 if (head && control_flow_insn_p (insn))
490 prev = create_basic_block_structure (head, end, bb_note, prev);
491 head = end = NULL_RTX;
492 bb_note = NULL_RTX;
495 switch (code)
497 case NOTE:
499 int kind = NOTE_LINE_NUMBER (insn);
501 /* Look for basic block notes with which to keep the
502 basic_block_info pointers stable. Unthread the note now;
503 we'll put it back at the right place in create_basic_block.
504 Or not at all if we've already found a note in this block. */
505 if (kind == NOTE_INSN_BASIC_BLOCK)
507 if (bb_note == NULL_RTX)
508 bb_note = insn;
509 else
510 next = delete_insn (insn);
512 break;
515 case CODE_LABEL:
516 case JUMP_INSN:
517 case INSN:
518 case BARRIER:
519 break;
521 case CALL_INSN:
522 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
524 /* Scan each of the alternatives for label refs. */
525 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
526 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
527 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
528 /* Record its tail recursion label, if any. */
529 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
530 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
532 break;
534 default:
535 abort ();
538 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
540 rtx note;
542 /* Make a list of all labels referred to other than by jumps.
544 Make a special exception for labels followed by an ADDR*VEC,
545 as this would be a part of the tablejump setup code.
547 Make a special exception to registers loaded with label
548 values just before jump insns that use them. */
550 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
551 if (REG_NOTE_KIND (note) == REG_LABEL)
553 rtx lab = XEXP (note, 0), next;
555 if ((next = next_nonnote_insn (lab)) != NULL
556 && GET_CODE (next) == JUMP_INSN
557 && (GET_CODE (PATTERN (next)) == ADDR_VEC
558 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
560 else if (GET_CODE (lab) == NOTE)
562 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
563 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
565 else
566 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
571 if (head != NULL_RTX)
572 create_basic_block_structure (head, end, bb_note, prev);
573 else if (bb_note)
574 delete_insn (bb_note);
576 if (last_basic_block != n_basic_blocks)
577 abort ();
579 label_value_list = lvl;
580 tail_recursion_label_list = trll;
581 clear_aux_for_blocks ();
585 /* Find basic blocks of the current function.
586 F is the first insn of the function and NREGS the number of register
587 numbers in use. */
589 void
590 find_basic_blocks (rtx f, int nregs ATTRIBUTE_UNUSED,
591 FILE *file ATTRIBUTE_UNUSED)
593 basic_block bb;
595 timevar_push (TV_CFG);
597 /* Flush out existing data. */
598 if (basic_block_info != NULL)
600 clear_edges ();
602 /* Clear bb->aux on all extant basic blocks. We'll use this as a
603 tag for reuse during create_basic_block, just in case some pass
604 copies around basic block notes improperly. */
605 FOR_EACH_BB (bb)
606 bb->aux = NULL;
608 VARRAY_FREE (basic_block_info);
611 n_basic_blocks = count_basic_blocks (f);
612 last_basic_block = 0;
613 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
614 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
616 /* Size the basic block table. The actual structures will be allocated
617 by find_basic_blocks_1, since we want to keep the structure pointers
618 stable across calls to find_basic_blocks. */
619 /* ??? This whole issue would be much simpler if we called find_basic_blocks
620 exactly once, and thereafter we don't have a single long chain of
621 instructions at all until close to the end of compilation when we
622 actually lay them out. */
624 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
626 find_basic_blocks_1 (f);
628 /* Discover the edges of our cfg. */
629 make_edges (label_value_list, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, 0);
631 /* Do very simple cleanup now, for the benefit of code that runs between
632 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
633 tidy_fallthru_edges ();
635 #ifdef ENABLE_CHECKING
636 verify_flow_info ();
637 #endif
638 timevar_pop (TV_CFG);
641 /* State of basic block as seen by find_sub_basic_blocks. */
642 enum state {BLOCK_NEW = 0, BLOCK_ORIGINAL, BLOCK_TO_SPLIT};
644 #define STATE(BB) (enum state) ((size_t) (BB)->aux)
645 #define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE))
647 /* Scan basic block BB for possible BB boundaries inside the block
648 and create new basic blocks in the progress. */
650 static void
651 find_bb_boundaries (basic_block bb)
653 rtx insn = BB_HEAD (bb);
654 rtx end = BB_END (bb);
655 rtx flow_transfer_insn = NULL_RTX;
656 edge fallthru = NULL;
658 if (insn == BB_END (bb))
659 return;
661 if (GET_CODE (insn) == CODE_LABEL)
662 insn = NEXT_INSN (insn);
664 /* Scan insn chain and try to find new basic block boundaries. */
665 while (1)
667 enum rtx_code code = GET_CODE (insn);
669 /* On code label, split current basic block. */
670 if (code == CODE_LABEL)
672 fallthru = split_block (bb, PREV_INSN (insn));
673 if (flow_transfer_insn)
674 BB_END (bb) = flow_transfer_insn;
676 bb = fallthru->dest;
677 remove_edge (fallthru);
678 flow_transfer_insn = NULL_RTX;
679 if (LABEL_ALT_ENTRY_P (insn))
680 make_edge (ENTRY_BLOCK_PTR, bb, 0);
683 /* In case we've previously seen an insn that effects a control
684 flow transfer, split the block. */
685 if (flow_transfer_insn && inside_basic_block_p (insn))
687 fallthru = split_block (bb, PREV_INSN (insn));
688 BB_END (bb) = flow_transfer_insn;
689 bb = fallthru->dest;
690 remove_edge (fallthru);
691 flow_transfer_insn = NULL_RTX;
694 if (control_flow_insn_p (insn))
695 flow_transfer_insn = insn;
696 if (insn == end)
697 break;
698 insn = NEXT_INSN (insn);
701 /* In case expander replaced normal insn by sequence terminating by
702 return and barrier, or possibly other sequence not behaving like
703 ordinary jump, we need to take care and move basic block boundary. */
704 if (flow_transfer_insn)
705 BB_END (bb) = flow_transfer_insn;
707 /* We've possibly replaced the conditional jump by conditional jump
708 followed by cleanup at fallthru edge, so the outgoing edges may
709 be dead. */
710 purge_dead_edges (bb);
713 /* Assume that frequency of basic block B is known. Compute frequencies
714 and probabilities of outgoing edges. */
716 static void
717 compute_outgoing_frequencies (basic_block b)
719 edge e, f;
721 if (b->succ && b->succ->succ_next && !b->succ->succ_next->succ_next)
723 rtx note = find_reg_note (BB_END (b), REG_BR_PROB, NULL);
724 int probability;
726 if (!note)
727 return;
729 probability = INTVAL (XEXP (note, 0));
730 e = BRANCH_EDGE (b);
731 e->probability = probability;
732 e->count = ((b->count * probability + REG_BR_PROB_BASE / 2)
733 / REG_BR_PROB_BASE);
734 f = FALLTHRU_EDGE (b);
735 f->probability = REG_BR_PROB_BASE - probability;
736 f->count = b->count - e->count;
739 if (b->succ && !b->succ->succ_next)
741 e = b->succ;
742 e->probability = REG_BR_PROB_BASE;
743 e->count = b->count;
747 /* Assume that someone emitted code with control flow instructions to the
748 basic block. Update the data structure. */
750 void
751 find_many_sub_basic_blocks (sbitmap blocks)
753 basic_block bb, min, max;
755 FOR_EACH_BB (bb)
756 SET_STATE (bb,
757 TEST_BIT (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
759 FOR_EACH_BB (bb)
760 if (STATE (bb) == BLOCK_TO_SPLIT)
761 find_bb_boundaries (bb);
763 FOR_EACH_BB (bb)
764 if (STATE (bb) != BLOCK_ORIGINAL)
765 break;
767 min = max = bb;
768 for (; bb != EXIT_BLOCK_PTR; bb = bb->next_bb)
769 if (STATE (bb) != BLOCK_ORIGINAL)
770 max = bb;
772 /* Now re-scan and wire in all edges. This expect simple (conditional)
773 jumps at the end of each new basic blocks. */
774 make_edges (NULL, min, max, 1);
776 /* Update branch probabilities. Expect only (un)conditional jumps
777 to be created with only the forward edges. */
778 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
780 edge e;
782 if (STATE (bb) == BLOCK_ORIGINAL)
783 continue;
784 if (STATE (bb) == BLOCK_NEW)
786 bb->count = 0;
787 bb->frequency = 0;
788 for (e = bb->pred; e; e = e->pred_next)
790 bb->count += e->count;
791 bb->frequency += EDGE_FREQUENCY (e);
795 compute_outgoing_frequencies (bb);
798 FOR_EACH_BB (bb)
799 SET_STATE (bb, 0);
802 /* Like above but for single basic block only. */
804 void
805 find_sub_basic_blocks (basic_block bb)
807 basic_block min, max, b;
808 basic_block next = bb->next_bb;
810 min = bb;
811 find_bb_boundaries (bb);
812 max = next->prev_bb;
814 /* Now re-scan and wire in all edges. This expect simple (conditional)
815 jumps at the end of each new basic blocks. */
816 make_edges (NULL, min, max, 1);
818 /* Update branch probabilities. Expect only (un)conditional jumps
819 to be created with only the forward edges. */
820 FOR_BB_BETWEEN (b, min, max->next_bb, next_bb)
822 edge e;
824 if (b != min)
826 b->count = 0;
827 b->frequency = 0;
828 for (e = b->pred; e; e = e->pred_next)
830 b->count += e->count;
831 b->frequency += EDGE_FREQUENCY (e);
835 compute_outgoing_frequencies (b);