1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
23 [1] "Branch Prediction for Free"
24 Ball and Larus; PLDI '93.
25 [2] "Static Branch Frequency and Program Profile Analysis"
26 Wu and Larus; MICRO-27.
27 [3] "Corpus-based Static Branch Prediction"
28 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95.
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "insn-config.h"
51 /* Random guesstimation given names. */
52 #define PROB_NEVER (0)
53 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 10 - 1)
54 #define PROB_UNLIKELY (REG_BR_PROB_BASE * 4 / 10 - 1)
55 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
56 #define PROB_LIKELY (REG_BR_PROB_BASE - PROB_UNLIKELY)
57 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
58 #define PROB_ALWAYS (REG_BR_PROB_BASE)
60 static void combine_predictions_for_insn
PARAMS ((rtx
, basic_block
));
61 static void dump_prediction
PARAMS ((enum br_predictor
, int,
63 static void estimate_loops_at_level
PARAMS ((struct loop
*loop
));
64 static void propagate_freq
PARAMS ((basic_block
));
65 static void estimate_bb_frequencies
PARAMS ((struct loops
*));
66 static void counts_to_freqs
PARAMS ((void));
68 /* Information we hold about each branch predictor.
69 Filled using information from predict.def. */
72 const char *name
; /* Name used in the debugging dumps. */
73 int hitrate
; /* Expected hitrate used by
74 predict_insn_def call. */
78 /* Use given predictor without Dempster-Shaffer theory if it matches
79 using first_match heuristics. */
80 #define PRED_FLAG_FIRST_MATCH 1
82 /* Recompute hitrate in percent to our representation. */
84 #define HITRATE(VAL) ((int)((VAL) * REG_BR_PROB_BASE + 50) / 100)
86 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
87 struct predictor_info predictor_info
[] = {
88 #include "predict.def"
90 /* Upper bound on predictors. */
96 predict_insn (insn
, predictor
, probability
)
99 enum br_predictor predictor
;
101 if (!any_condjump_p (insn
))
104 = gen_rtx_EXPR_LIST (REG_BR_PRED
,
105 gen_rtx_CONCAT (VOIDmode
,
106 GEN_INT ((int) predictor
),
107 GEN_INT ((int) probability
)),
111 /* Predict insn by given predictor. */
113 predict_insn_def (insn
, predictor
, taken
)
115 enum br_predictor predictor
;
116 enum prediction taken
;
118 int probability
= predictor_info
[(int) predictor
].hitrate
;
120 probability
= REG_BR_PROB_BASE
- probability
;
121 predict_insn (insn
, predictor
, probability
);
124 /* Predict edge E with given probability if possible. */
126 predict_edge (e
, predictor
, probability
)
129 enum br_predictor predictor
;
132 last_insn
= e
->src
->end
;
134 /* We can store the branch prediction information only about
135 conditional jumps. */
136 if (!any_condjump_p (last_insn
))
139 /* We always store probability of branching. */
140 if (e
->flags
& EDGE_FALLTHRU
)
141 probability
= REG_BR_PROB_BASE
- probability
;
143 predict_insn (last_insn
, predictor
, probability
);
146 /* Predict edge E by given predictor if possible. */
148 predict_edge_def (e
, predictor
, taken
)
150 enum br_predictor predictor
;
151 enum prediction taken
;
153 int probability
= predictor_info
[(int) predictor
].hitrate
;
156 probability
= REG_BR_PROB_BASE
- probability
;
157 predict_edge (e
, predictor
, probability
);
160 /* Invert all branch predictions or probability notes in the INSN. This needs
161 to be done each time we invert the condition used by the jump. */
163 invert_br_probabilities (insn
)
166 rtx note
= REG_NOTES (insn
);
170 if (REG_NOTE_KIND (note
) == REG_BR_PROB
)
171 XEXP (note
, 0) = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (note
, 0)));
172 else if (REG_NOTE_KIND (note
) == REG_BR_PRED
)
173 XEXP (XEXP (note
, 0), 1)
174 = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (XEXP (note
, 0), 1)));
175 note
= XEXP (note
, 1);
179 /* Dump information about the branch prediction to the output file. */
181 dump_prediction (predictor
, probability
, bb
)
182 enum br_predictor predictor
;
191 while (e
->flags
& EDGE_FALLTHRU
)
194 fprintf (rtl_dump_file
, " %s heuristics: %.1f%%",
195 predictor_info
[predictor
].name
,
196 probability
* 100.0 / REG_BR_PROB_BASE
);
200 fprintf (rtl_dump_file
, " exec ");
201 fprintf (rtl_dump_file
, HOST_WIDEST_INT_PRINT_DEC
,
202 (HOST_WIDEST_INT
) bb
->count
);
203 fprintf (rtl_dump_file
, " hit ");
204 fprintf (rtl_dump_file
, HOST_WIDEST_INT_PRINT_DEC
,
205 (HOST_WIDEST_INT
) e
->count
);
206 fprintf (rtl_dump_file
, " (%.1f%%)",
207 e
->count
* 100.0 / bb
->count
);
209 fprintf (rtl_dump_file
, "\n");
212 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
213 note if not already present. Remove now useless REG_BR_PRED notes. */
215 combine_predictions_for_insn (insn
, bb
)
219 rtx prob_note
= find_reg_note (insn
, REG_BR_PROB
, 0);
220 rtx
*pnote
= ®_NOTES (insn
);
221 int best_probability
= PROB_EVEN
;
222 int best_predictor
= END_PREDICTORS
;
223 int combined_probability
= REG_BR_PROB_BASE
/ 2;
227 fprintf (rtl_dump_file
, "Predictions for insn %i bb %i\n", INSN_UID (insn
),
230 /* We implement "first match" heuristics and use probability guessed
231 by predictor with smallest index. In future we will use better
232 probability combination techniques. */
235 if (REG_NOTE_KIND (*pnote
) == REG_BR_PRED
)
237 int predictor
= INTVAL (XEXP (XEXP (*pnote
, 0), 0));
238 int probability
= INTVAL (XEXP (XEXP (*pnote
, 0), 1));
240 dump_prediction (predictor
, probability
, bb
);
241 if (best_predictor
> predictor
)
242 best_probability
= probability
, best_predictor
= predictor
;
243 *pnote
= XEXP (*pnote
, 1);
245 d
= (combined_probability
* probability
246 + (REG_BR_PROB_BASE
- combined_probability
)
247 * (REG_BR_PROB_BASE
- probability
));
248 /* An FP math to avoid overflows of 32bit integers. */
249 combined_probability
= (((double)combined_probability
) * probability
250 * REG_BR_PROB_BASE
/ d
+ 0.5);
253 pnote
= &XEXP (*pnote
, 1);
255 if (predictor_info
[best_predictor
].flags
& PRED_FLAG_FIRST_MATCH
)
256 combined_probability
= best_probability
;
257 dump_prediction (PRED_FIRST_MATCH
, best_probability
, bb
);
258 dump_prediction (PRED_COMBINED
, combined_probability
, bb
);
262 = gen_rtx_EXPR_LIST (REG_BR_PROB
,
263 GEN_INT (combined_probability
), REG_NOTES (insn
));
264 /* Save the prediction into CFG in case we are seeing non-degenerated
266 if (bb
->succ
->succ_next
)
268 BRANCH_EDGE (bb
)->probability
= combined_probability
;
269 FALLTHRU_EDGE (bb
)->probability
= REG_BR_PROB_BASE
- combined_probability
;
274 /* Statically estimate the probability that a branch will be taken.
275 ??? In the next revision there will be a number of other predictors added
276 from the above references. Further, each heuristic will be factored out
277 into its own function for clarity (and to facilitate the combination of
281 estimate_probability (loops_info
)
282 struct loops
*loops_info
;
284 sbitmap
*dominators
, *post_dominators
;
286 int found_noreturn
= 0;
288 dominators
= sbitmap_vector_alloc (n_basic_blocks
, n_basic_blocks
);
289 post_dominators
= sbitmap_vector_alloc (n_basic_blocks
, n_basic_blocks
);
290 calculate_dominance_info (NULL
, dominators
, 0);
291 calculate_dominance_info (NULL
, post_dominators
, 1);
293 /* Try to predict out blocks in a loop that are not part of a
295 for (i
= 0; i
< loops_info
->num
; i
++)
299 for (j
= loops_info
->array
[i
].first
->index
;
300 j
<= loops_info
->array
[i
].last
->index
;
303 if (TEST_BIT (loops_info
->array
[i
].nodes
, j
))
305 int header_found
= 0;
308 /* Loop branch heruistics - predict as taken an edge back to
310 for (e
= BASIC_BLOCK(j
)->succ
; e
; e
= e
->succ_next
)
311 if (e
->dest
== loops_info
->array
[i
].header
312 && e
->src
== loops_info
->array
[i
].latch
)
315 predict_edge_def (e
, PRED_LOOP_BRANCH
, TAKEN
);
317 /* Loop exit heruistics - predict as not taken an edge exiting
318 the loop if the conditinal has no loop header successors */
320 for (e
= BASIC_BLOCK(j
)->succ
; e
; e
= e
->succ_next
)
321 if (e
->dest
->index
<= 0
322 || !TEST_BIT (loops_info
->array
[i
].nodes
, e
->dest
->index
))
323 predict_edge_def (e
, PRED_LOOP_EXIT
, NOT_TAKEN
);
328 /* Attempt to predict conditional jumps using a number of heuristics. */
329 for (i
= 0; i
< n_basic_blocks
; i
++)
331 basic_block bb
= BASIC_BLOCK (i
);
332 rtx last_insn
= bb
->end
;
336 /* If block has no sucessor, predict all possible paths to
337 it as improbable, as the block contains a call to a noreturn
338 function and thus can be executed only once. */
339 if (bb
->succ
== NULL
&& !found_noreturn
)
343 /* ??? Postdominator claims each noreturn block to be postdominated
344 by each, so we need to run only once. This needs to be changed
345 once postdominace algorithm is updated to say something more sane.
348 for (y
= 0; y
< n_basic_blocks
; y
++)
349 if (!TEST_BIT (post_dominators
[y
], i
))
351 for (e
= BASIC_BLOCK (y
)->succ
; e
; e
= e
->succ_next
)
352 if (e
->dest
->index
>= 0
353 && TEST_BIT (post_dominators
[e
->dest
->index
], i
))
354 predict_edge_def (e
, PRED_NORETURN
, NOT_TAKEN
);
358 if (GET_CODE (last_insn
) != JUMP_INSN
359 || ! any_condjump_p (last_insn
))
362 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
364 /* Predict edges to blocks that return immediately to be
365 improbable. These are usually used to signal error states. */
366 if (e
->dest
== EXIT_BLOCK_PTR
367 || (e
->dest
->succ
&& !e
->dest
->succ
->succ_next
368 && e
->dest
->succ
->dest
== EXIT_BLOCK_PTR
))
369 predict_edge_def (e
, PRED_ERROR_RETURN
, NOT_TAKEN
);
371 /* Look for block we are guarding (ie we dominate it,
372 but it doesn't postdominate us). */
373 if (e
->dest
!= EXIT_BLOCK_PTR
375 && TEST_BIT (dominators
[e
->dest
->index
], e
->src
->index
)
376 && !TEST_BIT (post_dominators
[e
->src
->index
], e
->dest
->index
))
379 /* The call heuristic claims that a guarded function call
380 is improbable. This is because such calls are often used
381 to signal exceptional situations such as printing error
383 for (insn
= e
->dest
->head
; insn
!= NEXT_INSN (e
->dest
->end
);
384 insn
= NEXT_INSN (insn
))
385 if (GET_CODE (insn
) == CALL_INSN
386 /* Constant and pure calls are hardly used to signalize
387 something exceptional. */
388 && ! CONST_CALL_P (insn
))
390 predict_edge_def (e
, PRED_CALL
, NOT_TAKEN
);
396 cond
= get_condition (last_insn
, &earliest
);
400 /* Try "pointer heuristic."
401 A comparison ptr == 0 is predicted as false.
402 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
403 switch (GET_CODE (cond
))
406 if (GET_CODE (XEXP (cond
, 0)) == REG
407 && REG_POINTER (XEXP (cond
, 0))
408 && (XEXP (cond
, 1) == const0_rtx
409 || (GET_CODE (XEXP (cond
, 1)) == REG
410 && REG_POINTER (XEXP (cond
, 1)))))
412 predict_insn_def (last_insn
, PRED_POINTER
, NOT_TAKEN
);
415 if (GET_CODE (XEXP (cond
, 0)) == REG
416 && REG_POINTER (XEXP (cond
, 0))
417 && (XEXP (cond
, 1) == const0_rtx
418 || (GET_CODE (XEXP (cond
, 1)) == REG
419 && REG_POINTER (XEXP (cond
, 1)))))
420 predict_insn_def (last_insn
, PRED_POINTER
, TAKEN
);
427 /* Try "opcode heuristic."
428 EQ tests are usually false and NE tests are usually true. Also,
429 most quantities are positive, so we can make the appropriate guesses
430 about signed comparisons against zero. */
431 switch (GET_CODE (cond
))
434 /* Unconditional branch. */
435 predict_insn_def (last_insn
, PRED_UNCONDITIONAL
,
436 cond
== const0_rtx
? NOT_TAKEN
: TAKEN
);
441 predict_insn_def (last_insn
, PRED_OPCODE
, NOT_TAKEN
);
445 predict_insn_def (last_insn
, PRED_OPCODE
, TAKEN
);
448 predict_insn_def (last_insn
, PRED_OPCODE
, TAKEN
);
451 predict_insn_def (last_insn
, PRED_OPCODE
, NOT_TAKEN
);
455 if (XEXP (cond
, 1) == const0_rtx
456 || (GET_CODE (XEXP (cond
, 1)) == CONST_INT
457 && INTVAL (XEXP (cond
, 1)) == -1))
458 predict_insn_def (last_insn
, PRED_OPCODE
, NOT_TAKEN
);
462 if (XEXP (cond
, 1) == const0_rtx
463 || (GET_CODE (XEXP (cond
, 1)) == CONST_INT
464 && INTVAL (XEXP (cond
, 1)) == -1))
465 predict_insn_def (last_insn
, PRED_OPCODE
, TAKEN
);
473 /* Attach the combined probability to each conditional jump. */
474 for (i
= 0; i
< n_basic_blocks
; i
++)
476 rtx last_insn
= BLOCK_END (i
);
478 if (GET_CODE (last_insn
) != JUMP_INSN
479 || ! any_condjump_p (last_insn
))
481 combine_predictions_for_insn (last_insn
, BASIC_BLOCK (i
));
483 sbitmap_vector_free (post_dominators
);
484 sbitmap_vector_free (dominators
);
486 estimate_bb_frequencies (loops_info
);
489 /* __builtin_expect dropped tokens into the insn stream describing
490 expected values of registers. Generate branch probabilities
491 based off these values. */
494 expected_value_to_br_prob ()
496 rtx insn
, cond
, ev
= NULL_RTX
, ev_reg
= NULL_RTX
;
498 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
500 switch (GET_CODE (insn
))
503 /* Look for expected value notes. */
504 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EXPECTED_VALUE
)
506 ev
= NOTE_EXPECTED_VALUE (insn
);
507 ev_reg
= XEXP (ev
, 0);
512 /* Never propagate across labels. */
517 /* Look for insns that clobber the EV register. */
518 if (ev
&& reg_set_p (ev_reg
, insn
))
523 /* Look for simple conditional branches. If we havn't got an
524 expected value yet, no point going further. */
525 if (GET_CODE (insn
) != JUMP_INSN
|| ev
== NULL_RTX
)
527 if (! any_condjump_p (insn
))
532 /* Collect the branch condition, hopefully relative to EV_REG. */
533 /* ??? At present we'll miss things like
534 (expected_value (eq r70 0))
536 (set r80 (lt r70 r71))
537 (set pc (if_then_else (ne r80 0) ...))
538 as canonicalize_condition will render this to us as
540 Could use cselib to try and reduce this further. */
541 cond
= XEXP (SET_SRC (PATTERN (insn
)), 0);
542 cond
= canonicalize_condition (insn
, cond
, 0, NULL
, ev_reg
);
544 || XEXP (cond
, 0) != ev_reg
545 || GET_CODE (XEXP (cond
, 1)) != CONST_INT
)
548 /* Substitute and simplify. Given that the expression we're
549 building involves two constants, we should wind up with either
551 cond
= gen_rtx_fmt_ee (GET_CODE (cond
), VOIDmode
,
552 XEXP (ev
, 1), XEXP (cond
, 1));
553 cond
= simplify_rtx (cond
);
555 /* Turn the condition into a scaled branch probability. */
556 if (cond
!= const_true_rtx
&& cond
!= const0_rtx
)
558 predict_insn_def (insn
, PRED_BUILTIN_EXPECT
,
559 cond
== const_true_rtx
? TAKEN
: NOT_TAKEN
);
563 /* This is used to carry information about basic blocks. It is
564 attached to the AUX field of the standard CFG block. */
566 typedef struct block_info_def
568 /* Estimated frequency of execution of basic_block. */
571 /* To keep queue of basic blocks to process. */
574 /* True if block already converted. */
577 /* Number of block proceeded before adding basic block to the queue. Used
578 to recognize irregular regions. */
582 /* Similar information for edges. */
583 typedef struct edge_info_def
585 /* In case edge is an loopback edge, the probability edge will be reached
586 in case header is. Estimated number of iterations of the loop can be
587 then computed as 1 / (1 - back_edge_prob). */
588 double back_edge_prob
;
589 /* True if the edge is an loopback edge in the natural loop. */
593 #define BLOCK_INFO(B) ((block_info) (B)->aux)
594 #define EDGE_INFO(E) ((edge_info) (E)->aux)
596 /* Helper function for estimate_bb_frequencies.
597 Propagate the frequencies for loops headed by HEAD. */
599 propagate_freq (head
)
602 basic_block bb
= head
;
603 basic_block last
= bb
;
608 BLOCK_INFO (head
)->frequency
= 1;
609 for (; bb
; bb
= nextbb
)
611 double cyclic_probability
= 0, frequency
= 0;
613 nextbb
= BLOCK_INFO (bb
)->next
;
614 BLOCK_INFO (bb
)->next
= NULL
;
616 /* Compute frequency of basic block. */
619 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
620 if (!BLOCK_INFO (e
->src
)->visited
&& !EDGE_INFO (e
)->back_edge
)
623 /* We didn't proceeded all predecesors of edge e yet. These may
624 be waiting in the queue or we may hit irreducible region.
626 To avoid infinite looping on irrecudible regions, count number
627 of block proceeded at the time basic block has been queued. In the
628 case number didn't changed, we've hit irreducible region and we
629 forget the backward edge. This can increase time complexity
630 by the number of irreducible blocks, but in same way standard the
631 loop does, so it should not result in noticeable slowodwn.
633 Alternativly we may distinquish backward and cross edges in the
634 DFS tree by preprocesing pass and ignore existence of non-loop
636 if (e
&& BLOCK_INFO (bb
)->nvisited
!= nvisited
)
641 BLOCK_INFO (last
)->next
= e
->dest
;
642 BLOCK_INFO (last
)->nvisited
= nvisited
;
646 else if (e
&& rtl_dump_file
)
647 fprintf (rtl_dump_file
, "Irreducible region hit, ignoring edge to bb %i\n",
650 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
651 if (EDGE_INFO (e
)->back_edge
)
652 cyclic_probability
+= EDGE_INFO (e
)->back_edge_prob
;
653 else if (BLOCK_INFO (e
->src
)->visited
)
654 frequency
+= (e
->probability
655 * BLOCK_INFO (e
->src
)->frequency
/
658 if (cyclic_probability
> 1.0 - 1.0 / REG_BR_PROB_BASE
)
659 cyclic_probability
= 1.0 - 1.0 / REG_BR_PROB_BASE
;
661 BLOCK_INFO (bb
)->frequency
= frequency
/ (1 - cyclic_probability
);
664 BLOCK_INFO (bb
)->visited
= 1;
666 /* Compute back edge frequencies. */
667 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
669 EDGE_INFO (e
)->back_edge_prob
= (e
->probability
670 * BLOCK_INFO (bb
)->frequency
673 /* Propagate to succesor blocks. */
674 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
675 if (!EDGE_INFO (e
)->back_edge
676 && !BLOCK_INFO (e
->dest
)->visited
677 && !BLOCK_INFO (e
->dest
)->next
&& e
->dest
!= last
)
682 BLOCK_INFO (last
)->next
= e
->dest
;
683 BLOCK_INFO (last
)->nvisited
= nvisited
;
690 /* Estimate probabilities of the loopback edges in loops at same nest level. */
692 estimate_loops_at_level (first_loop
)
693 struct loop
*first_loop
;
695 struct loop
*l
, *loop
= first_loop
;
697 for (loop
= first_loop
; loop
; loop
= loop
->next
)
702 estimate_loops_at_level (loop
->inner
);
704 /* find current loop back edge and mark it. */
705 for (e
= loop
->latch
->succ
; e
->dest
!= loop
->header
; e
= e
->succ_next
);
707 EDGE_INFO (e
)->back_edge
= 1;
709 /* In case loop header is shared, ensure that it is the last one sharing
710 same header, so we avoid redundant work. */
713 for (l
= loop
->next
; l
; l
= l
->next
)
714 if (l
->header
== loop
->header
)
720 /* Now merge all nodes of all loops with given header as not visited. */
721 for (l
= loop
->shared
? first_loop
: loop
; l
!= loop
->next
; l
= l
->next
)
722 if (loop
->header
== l
->header
)
723 EXECUTE_IF_SET_IN_SBITMAP (l
->nodes
, 0, n
,
724 BLOCK_INFO (BASIC_BLOCK (n
))->visited
=
726 propagate_freq (loop
->header
);
730 /* Convert counts measured by profile driven feedback to frequencies. */
734 HOST_WIDEST_INT count_max
= 1;
737 for (i
= 0; i
< n_basic_blocks
; i
++)
738 if (BASIC_BLOCK (i
)->count
> count_max
)
739 count_max
= BASIC_BLOCK (i
)->count
;
741 for (i
= -2; i
< n_basic_blocks
; i
++)
745 bb
= ENTRY_BLOCK_PTR
;
749 bb
= BASIC_BLOCK (i
);
750 bb
->frequency
= ((bb
->count
* BB_FREQ_MAX
+ count_max
/ 2)
755 /* Estimate basic blocks frequency by given branch probabilities. */
757 estimate_bb_frequencies (loops
)
766 if (flag_branch_probabilities
)
772 /* Fill in the probability values in flowgraph based on the REG_BR_PROB
774 for (i
= 0; i
< n_basic_blocks
; i
++)
776 rtx last_insn
= BLOCK_END (i
);
778 edge fallthru
, branch
;
780 if (GET_CODE (last_insn
) != JUMP_INSN
|| !any_condjump_p (last_insn
)
781 /* Avoid handling of conditionals jump jumping to fallthru edge. */
782 || BASIC_BLOCK (i
)->succ
->succ_next
== NULL
)
784 /* We can predict only conditional jumps at the moment.
785 Expect each edge to be equall probable.
786 ?? In future we want to make abnormal edges improbable. */
790 for (e
= BASIC_BLOCK (i
)->succ
; e
; e
= e
->succ_next
)
793 if (e
->probability
!= 0)
797 for (e
= BASIC_BLOCK (i
)->succ
; e
; e
= e
->succ_next
)
798 e
->probability
= (REG_BR_PROB_BASE
+ nedges
/ 2) / nedges
;
802 probability
= INTVAL (XEXP (find_reg_note (last_insn
,
803 REG_BR_PROB
, 0), 0));
804 fallthru
= BASIC_BLOCK (i
)->succ
;
805 if (!fallthru
->flags
& EDGE_FALLTHRU
)
806 fallthru
= fallthru
->succ_next
;
807 branch
= BASIC_BLOCK (i
)->succ
;
808 if (branch
->flags
& EDGE_FALLTHRU
)
809 branch
= branch
->succ_next
;
811 branch
->probability
= probability
;
812 fallthru
->probability
= REG_BR_PROB_BASE
- probability
;
815 ENTRY_BLOCK_PTR
->succ
->probability
= REG_BR_PROB_BASE
;
817 /* Set up block info for each basic block. */
818 bi
= (block_info
) xcalloc ((n_basic_blocks
+ 2), sizeof (*bi
));
819 ei
= (edge_info
) xcalloc ((n_edges
), sizeof (*ei
));
820 for (i
= -2; i
< n_basic_blocks
; i
++)
826 bb
= ENTRY_BLOCK_PTR
;
830 bb
= BASIC_BLOCK (i
);
831 bb
->aux
= bi
+ i
+ 2;
832 BLOCK_INFO (bb
)->visited
= 1;
833 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
835 e
->aux
= ei
+ edgenum
, edgenum
++;
836 EDGE_INFO (e
)->back_edge_prob
= ((double) e
->probability
840 /* First compute probabilities locally for each loop from innermost
841 to outermost to examine probabilities for back edges. */
842 estimate_loops_at_level (loops
->tree_root
);
844 /* Now fake loop around whole function to finalize probabilities. */
845 for (i
= 0; i
< n_basic_blocks
; i
++)
846 BLOCK_INFO (BASIC_BLOCK (i
))->visited
= 0;
847 BLOCK_INFO (ENTRY_BLOCK_PTR
)->visited
= 0;
848 BLOCK_INFO (EXIT_BLOCK_PTR
)->visited
= 0;
849 propagate_freq (ENTRY_BLOCK_PTR
);
851 for (i
= 0; i
< n_basic_blocks
; i
++)
852 if (BLOCK_INFO (BASIC_BLOCK (i
))->frequency
> freq_max
)
853 freq_max
= BLOCK_INFO (BASIC_BLOCK (i
))->frequency
;
854 for (i
= -2; i
< n_basic_blocks
; i
++)
858 bb
= ENTRY_BLOCK_PTR
;
862 bb
= BASIC_BLOCK (i
);
863 bb
->frequency
= (BLOCK_INFO (bb
)->frequency
* BB_FREQ_MAX
/ freq_max