1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 [1] "Branch Prediction for Free"
24 Ball and Larus; PLDI '93.
25 [2] "Static Branch Frequency and Program Profile Analysis"
26 Wu and Larus; MICRO-27.
27 [3] "Corpus-based Static Branch Prediction"
28 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95.
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "insn-config.h"
51 /* Random guesstimation given names. */
52 #define PROB_NEVER (0)
53 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 10 - 1)
54 #define PROB_UNLIKELY (REG_BR_PROB_BASE * 4 / 10 - 1)
55 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
56 #define PROB_LIKELY (REG_BR_PROB_BASE - PROB_UNLIKELY)
57 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
58 #define PROB_ALWAYS (REG_BR_PROB_BASE)
60 static void combine_predictions_for_insn
PARAMS ((rtx
, basic_block
));
61 static void dump_prediction
PARAMS ((enum br_predictor
, int,
63 static void estimate_loops_at_level
PARAMS ((struct loop
*loop
));
64 static void propagate_freq
PARAMS ((basic_block
));
65 static void estimate_bb_frequencies
PARAMS ((struct loops
*));
66 static void counts_to_freqs
PARAMS ((void));
68 /* Information we hold about each branch predictor.
69 Filled using information from predict.def. */
72 const char *const name
; /* Name used in the debugging dumps. */
73 const int hitrate
; /* Expected hitrate used by
74 predict_insn_def call. */
78 /* Use given predictor without Dempster-Shaffer theory if it matches
79 using first_match heuristics. */
80 #define PRED_FLAG_FIRST_MATCH 1
82 /* Recompute hitrate in percent to our representation. */
84 #define HITRATE(VAL) ((int)((VAL) * REG_BR_PROB_BASE + 50) / 100)
86 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
87 static const struct predictor_info predictor_info
[] = {
88 #include "predict.def"
90 /* Upper bound on predictors. */
96 predict_insn (insn
, predictor
, probability
)
99 enum br_predictor predictor
;
101 if (!any_condjump_p (insn
))
104 = gen_rtx_EXPR_LIST (REG_BR_PRED
,
105 gen_rtx_CONCAT (VOIDmode
,
106 GEN_INT ((int) predictor
),
107 GEN_INT ((int) probability
)),
111 /* Predict insn by given predictor. */
113 predict_insn_def (insn
, predictor
, taken
)
115 enum br_predictor predictor
;
116 enum prediction taken
;
118 int probability
= predictor_info
[(int) predictor
].hitrate
;
120 probability
= REG_BR_PROB_BASE
- probability
;
121 predict_insn (insn
, predictor
, probability
);
124 /* Predict edge E with given probability if possible. */
126 predict_edge (e
, predictor
, probability
)
129 enum br_predictor predictor
;
132 last_insn
= e
->src
->end
;
134 /* We can store the branch prediction information only about
135 conditional jumps. */
136 if (!any_condjump_p (last_insn
))
139 /* We always store probability of branching. */
140 if (e
->flags
& EDGE_FALLTHRU
)
141 probability
= REG_BR_PROB_BASE
- probability
;
143 predict_insn (last_insn
, predictor
, probability
);
146 /* Predict edge E by given predictor if possible. */
148 predict_edge_def (e
, predictor
, taken
)
150 enum br_predictor predictor
;
151 enum prediction taken
;
153 int probability
= predictor_info
[(int) predictor
].hitrate
;
156 probability
= REG_BR_PROB_BASE
- probability
;
157 predict_edge (e
, predictor
, probability
);
160 /* Invert all branch predictions or probability notes in the INSN. This needs
161 to be done each time we invert the condition used by the jump. */
163 invert_br_probabilities (insn
)
166 rtx note
= REG_NOTES (insn
);
170 if (REG_NOTE_KIND (note
) == REG_BR_PROB
)
171 XEXP (note
, 0) = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (note
, 0)));
172 else if (REG_NOTE_KIND (note
) == REG_BR_PRED
)
173 XEXP (XEXP (note
, 0), 1)
174 = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (XEXP (note
, 0), 1)));
175 note
= XEXP (note
, 1);
179 /* Dump information about the branch prediction to the output file. */
181 dump_prediction (predictor
, probability
, bb
, used
)
182 enum br_predictor predictor
;
192 while (e
->flags
& EDGE_FALLTHRU
)
195 fprintf (rtl_dump_file
, " %s heuristics%s: %.1f%%",
196 predictor_info
[predictor
].name
,
197 used
? "" : " (ignored)",
198 probability
* 100.0 / REG_BR_PROB_BASE
);
202 fprintf (rtl_dump_file
, " exec ");
203 fprintf (rtl_dump_file
, HOST_WIDEST_INT_PRINT_DEC
,
204 (HOST_WIDEST_INT
) bb
->count
);
205 fprintf (rtl_dump_file
, " hit ");
206 fprintf (rtl_dump_file
, HOST_WIDEST_INT_PRINT_DEC
,
207 (HOST_WIDEST_INT
) e
->count
);
208 fprintf (rtl_dump_file
, " (%.1f%%)",
209 e
->count
* 100.0 / bb
->count
);
211 fprintf (rtl_dump_file
, "\n");
214 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
215 note if not already present. Remove now useless REG_BR_PRED notes. */
217 combine_predictions_for_insn (insn
, bb
)
221 rtx prob_note
= find_reg_note (insn
, REG_BR_PROB
, 0);
222 rtx
*pnote
= ®_NOTES (insn
);
223 rtx note
= REG_NOTES (insn
);
224 int best_probability
= PROB_EVEN
;
225 int best_predictor
= END_PREDICTORS
;
226 int combined_probability
= REG_BR_PROB_BASE
/ 2;
228 bool first_match
= false;
232 fprintf (rtl_dump_file
, "Predictions for insn %i bb %i\n", INSN_UID (insn
),
235 /* We implement "first match" heuristics and use probability guessed
236 by predictor with smallest index. In the future we will use better
237 probability combination techniques. */
240 if (REG_NOTE_KIND (note
) == REG_BR_PRED
)
242 int predictor
= INTVAL (XEXP (XEXP (note
, 0), 0));
243 int probability
= INTVAL (XEXP (XEXP (note
, 0), 1));
246 if (best_predictor
> predictor
)
247 best_probability
= probability
, best_predictor
= predictor
;
249 d
= (combined_probability
* probability
250 + (REG_BR_PROB_BASE
- combined_probability
)
251 * (REG_BR_PROB_BASE
- probability
));
252 /* An FP math to avoid overflows of 32bit integers. */
253 combined_probability
= (((double)combined_probability
) * probability
254 * REG_BR_PROB_BASE
/ d
+ 0.5);
256 note
= XEXP (note
, 1);
259 /* Decide heuristic to use. In case we didn't match anything, use
260 no_prediction heuristic, in case we did match, use either
261 first match or Dempster-Shaffer theory depending on the flags. */
263 if (predictor_info
[best_predictor
].flags
& PRED_FLAG_FIRST_MATCH
)
267 dump_prediction (PRED_NO_PREDICTION
, combined_probability
, bb
, true);
270 dump_prediction (PRED_DS_THEORY
, combined_probability
, bb
,
272 dump_prediction (PRED_FIRST_MATCH
, best_probability
, bb
, first_match
);
276 combined_probability
= best_probability
;
277 dump_prediction (PRED_COMBINED
, combined_probability
, bb
, true);
281 if (REG_NOTE_KIND (*pnote
) == REG_BR_PRED
)
283 int predictor
= INTVAL (XEXP (XEXP (*pnote
, 0), 0));
284 int probability
= INTVAL (XEXP (XEXP (*pnote
, 0), 1));
286 dump_prediction (predictor
, probability
, bb
,
287 !first_match
|| best_predictor
== predictor
);
288 *pnote
= XEXP (*pnote
, 1);
291 pnote
= &XEXP (*pnote
, 1);
296 = gen_rtx_EXPR_LIST (REG_BR_PROB
,
297 GEN_INT (combined_probability
), REG_NOTES (insn
));
298 /* Save the prediction into CFG in case we are seeing non-degenerated
300 if (bb
->succ
->succ_next
)
302 BRANCH_EDGE (bb
)->probability
= combined_probability
;
303 FALLTHRU_EDGE (bb
)->probability
= REG_BR_PROB_BASE
- combined_probability
;
308 /* Statically estimate the probability that a branch will be taken.
309 ??? In the next revision there will be a number of other predictors added
310 from the above references. Further, each heuristic will be factored out
311 into its own function for clarity (and to facilitate the combination of
315 estimate_probability (loops_info
)
316 struct loops
*loops_info
;
318 sbitmap
*dominators
, *post_dominators
;
320 int found_noreturn
= 0;
322 dominators
= sbitmap_vector_alloc (n_basic_blocks
, n_basic_blocks
);
323 post_dominators
= sbitmap_vector_alloc (n_basic_blocks
, n_basic_blocks
);
324 calculate_dominance_info (NULL
, dominators
, CDI_DOMINATORS
);
325 calculate_dominance_info (NULL
, post_dominators
, CDI_POST_DOMINATORS
);
327 /* Try to predict out blocks in a loop that are not part of a
329 for (i
= 0; i
< loops_info
->num
; i
++)
333 for (j
= loops_info
->array
[i
].first
->index
;
334 j
<= loops_info
->array
[i
].last
->index
;
337 if (TEST_BIT (loops_info
->array
[i
].nodes
, j
))
339 int header_found
= 0;
342 /* Loop branch heuristics - predict as taken an edge back to
344 for (e
= BASIC_BLOCK(j
)->succ
; e
; e
= e
->succ_next
)
345 if (e
->dest
== loops_info
->array
[i
].header
346 && e
->src
== loops_info
->array
[i
].latch
)
349 predict_edge_def (e
, PRED_LOOP_BRANCH
, TAKEN
);
351 /* Loop exit heuristics - predict as not taken an edge
352 exiting the loop if the conditinal has no loop header
355 for (e
= BASIC_BLOCK(j
)->succ
; e
; e
= e
->succ_next
)
356 if (e
->dest
->index
<= 0
357 || !TEST_BIT (loops_info
->array
[i
].nodes
, e
->dest
->index
))
358 predict_edge_def (e
, PRED_LOOP_EXIT
, NOT_TAKEN
);
363 /* Attempt to predict conditional jumps using a number of heuristics. */
364 for (i
= 0; i
< n_basic_blocks
; i
++)
366 basic_block bb
= BASIC_BLOCK (i
);
367 rtx last_insn
= bb
->end
;
371 /* If block has no successor, predict all possible paths to
372 it as improbable, as the block contains a call to a noreturn
373 function and thus can be executed only once. */
374 if (bb
->succ
== NULL
&& !found_noreturn
)
378 /* ??? Postdominator claims each noreturn block to be postdominated
379 by each, so we need to run only once. This needs to be changed
380 once postdominace algorithm is updated to say something more sane.
383 for (y
= 0; y
< n_basic_blocks
; y
++)
384 if (!TEST_BIT (post_dominators
[y
], i
))
386 for (e
= BASIC_BLOCK (y
)->succ
; e
; e
= e
->succ_next
)
387 if (e
->dest
->index
>= 0
388 && TEST_BIT (post_dominators
[e
->dest
->index
], i
))
389 predict_edge_def (e
, PRED_NORETURN
, NOT_TAKEN
);
393 if (GET_CODE (last_insn
) != JUMP_INSN
394 || ! any_condjump_p (last_insn
))
397 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
399 /* Predict edges to blocks that return immediately to be
400 improbable. These are usually used to signal error states. */
401 if (e
->dest
== EXIT_BLOCK_PTR
402 || (e
->dest
->succ
&& !e
->dest
->succ
->succ_next
403 && e
->dest
->succ
->dest
== EXIT_BLOCK_PTR
))
404 predict_edge_def (e
, PRED_ERROR_RETURN
, NOT_TAKEN
);
406 /* Look for block we are guarding (ie we dominate it,
407 but it doesn't postdominate us). */
408 if (e
->dest
!= EXIT_BLOCK_PTR
410 && TEST_BIT (dominators
[e
->dest
->index
], e
->src
->index
)
411 && !TEST_BIT (post_dominators
[e
->src
->index
], e
->dest
->index
))
414 /* The call heuristic claims that a guarded function call
415 is improbable. This is because such calls are often used
416 to signal exceptional situations such as printing error
418 for (insn
= e
->dest
->head
; insn
!= NEXT_INSN (e
->dest
->end
);
419 insn
= NEXT_INSN (insn
))
420 if (GET_CODE (insn
) == CALL_INSN
421 /* Constant and pure calls are hardly used to signalize
422 something exceptional. */
423 && ! CONST_OR_PURE_CALL_P (insn
))
425 predict_edge_def (e
, PRED_CALL
, NOT_TAKEN
);
431 cond
= get_condition (last_insn
, &earliest
);
435 /* Try "pointer heuristic."
436 A comparison ptr == 0 is predicted as false.
437 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
438 switch (GET_CODE (cond
))
441 if (GET_CODE (XEXP (cond
, 0)) == REG
442 && REG_POINTER (XEXP (cond
, 0))
443 && (XEXP (cond
, 1) == const0_rtx
444 || (GET_CODE (XEXP (cond
, 1)) == REG
445 && REG_POINTER (XEXP (cond
, 1)))))
447 predict_insn_def (last_insn
, PRED_POINTER
, NOT_TAKEN
);
450 if (GET_CODE (XEXP (cond
, 0)) == REG
451 && REG_POINTER (XEXP (cond
, 0))
452 && (XEXP (cond
, 1) == const0_rtx
453 || (GET_CODE (XEXP (cond
, 1)) == REG
454 && REG_POINTER (XEXP (cond
, 1)))))
455 predict_insn_def (last_insn
, PRED_POINTER
, TAKEN
);
462 /* Try "opcode heuristic."
463 EQ tests are usually false and NE tests are usually true. Also,
464 most quantities are positive, so we can make the appropriate guesses
465 about signed comparisons against zero. */
466 switch (GET_CODE (cond
))
469 /* Unconditional branch. */
470 predict_insn_def (last_insn
, PRED_UNCONDITIONAL
,
471 cond
== const0_rtx
? NOT_TAKEN
: TAKEN
);
476 predict_insn_def (last_insn
, PRED_OPCODE
, NOT_TAKEN
);
480 predict_insn_def (last_insn
, PRED_OPCODE
, TAKEN
);
483 predict_insn_def (last_insn
, PRED_OPCODE
, TAKEN
);
486 predict_insn_def (last_insn
, PRED_OPCODE
, NOT_TAKEN
);
490 if (XEXP (cond
, 1) == const0_rtx
491 || (GET_CODE (XEXP (cond
, 1)) == CONST_INT
492 && INTVAL (XEXP (cond
, 1)) == -1))
493 predict_insn_def (last_insn
, PRED_OPCODE
, NOT_TAKEN
);
497 if (XEXP (cond
, 1) == const0_rtx
498 || (GET_CODE (XEXP (cond
, 1)) == CONST_INT
499 && INTVAL (XEXP (cond
, 1)) == -1))
500 predict_insn_def (last_insn
, PRED_OPCODE
, TAKEN
);
508 /* Attach the combined probability to each conditional jump. */
509 for (i
= 0; i
< n_basic_blocks
; i
++)
511 rtx last_insn
= BLOCK_END (i
);
513 if (GET_CODE (last_insn
) != JUMP_INSN
514 || ! any_condjump_p (last_insn
))
516 combine_predictions_for_insn (last_insn
, BASIC_BLOCK (i
));
518 sbitmap_vector_free (post_dominators
);
519 sbitmap_vector_free (dominators
);
521 estimate_bb_frequencies (loops_info
);
524 /* __builtin_expect dropped tokens into the insn stream describing
525 expected values of registers. Generate branch probabilities
526 based off these values. */
529 expected_value_to_br_prob ()
531 rtx insn
, cond
, ev
= NULL_RTX
, ev_reg
= NULL_RTX
;
533 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
535 switch (GET_CODE (insn
))
538 /* Look for expected value notes. */
539 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EXPECTED_VALUE
)
541 ev
= NOTE_EXPECTED_VALUE (insn
);
542 ev_reg
= XEXP (ev
, 0);
548 /* Never propagate across labels. */
553 /* Look for insns that clobber the EV register. */
554 if (ev
&& reg_set_p (ev_reg
, insn
))
559 /* Look for simple conditional branches. If we haven't got an
560 expected value yet, no point going further. */
561 if (GET_CODE (insn
) != JUMP_INSN
|| ev
== NULL_RTX
)
563 if (! any_condjump_p (insn
))
568 /* Collect the branch condition, hopefully relative to EV_REG. */
569 /* ??? At present we'll miss things like
570 (expected_value (eq r70 0))
572 (set r80 (lt r70 r71))
573 (set pc (if_then_else (ne r80 0) ...))
574 as canonicalize_condition will render this to us as
576 Could use cselib to try and reduce this further. */
577 cond
= XEXP (SET_SRC (pc_set (insn
)), 0);
578 cond
= canonicalize_condition (insn
, cond
, 0, NULL
, ev_reg
);
580 || XEXP (cond
, 0) != ev_reg
581 || GET_CODE (XEXP (cond
, 1)) != CONST_INT
)
584 /* Substitute and simplify. Given that the expression we're
585 building involves two constants, we should wind up with either
587 cond
= gen_rtx_fmt_ee (GET_CODE (cond
), VOIDmode
,
588 XEXP (ev
, 1), XEXP (cond
, 1));
589 cond
= simplify_rtx (cond
);
591 /* Turn the condition into a scaled branch probability. */
592 if (cond
!= const_true_rtx
&& cond
!= const0_rtx
)
594 predict_insn_def (insn
, PRED_BUILTIN_EXPECT
,
595 cond
== const_true_rtx
? TAKEN
: NOT_TAKEN
);
599 /* This is used to carry information about basic blocks. It is
600 attached to the AUX field of the standard CFG block. */
602 typedef struct block_info_def
604 /* Estimated frequency of execution of basic_block. */
605 volatile double frequency
;
607 /* To keep queue of basic blocks to process. */
610 /* True if block needs to be visited in prop_freqency. */
613 /* Number of predecessors we need to visit first. */
617 /* Similar information for edges. */
618 typedef struct edge_info_def
620 /* In case edge is an loopback edge, the probability edge will be reached
621 in case header is. Estimated number of iterations of the loop can be
622 then computed as 1 / (1 - back_edge_prob).
624 Volatile is needed to avoid differences in the optimized and unoptimized
625 builds on machines where FP registers are wider than double. */
626 volatile double back_edge_prob
;
627 /* True if the edge is an loopback edge in the natural loop. */
631 #define BLOCK_INFO(B) ((block_info) (B)->aux)
632 #define EDGE_INFO(E) ((edge_info) (E)->aux)
634 /* Helper function for estimate_bb_frequencies.
635 Propagate the frequencies for loops headed by HEAD. */
637 propagate_freq (head
)
640 basic_block bb
= head
;
641 basic_block last
= bb
;
646 /* For each basic block we need to visit count number of his predecessors
647 we need to visit first. */
648 for (n
= 0; n
< n_basic_blocks
; n
++)
650 basic_block bb
= BASIC_BLOCK (n
);
651 if (BLOCK_INFO (bb
)->tovisit
)
654 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
655 if (BLOCK_INFO (e
->src
)->tovisit
&& !(e
->flags
& EDGE_DFS_BACK
))
657 else if (BLOCK_INFO (e
->src
)->tovisit
658 && rtl_dump_file
&& !EDGE_INFO (e
)->back_edge
)
659 fprintf (rtl_dump_file
,
660 "Irreducible region hit, ignoring edge to %i->%i\n",
661 e
->src
->index
, bb
->index
);
662 BLOCK_INFO (bb
)->npredecessors
= count
;
666 BLOCK_INFO (head
)->frequency
= 1;
667 for (; bb
; bb
= nextbb
)
669 volatile double cyclic_probability
= 0, frequency
= 0;
671 nextbb
= BLOCK_INFO (bb
)->next
;
672 BLOCK_INFO (bb
)->next
= NULL
;
674 /* Compute frequency of basic block. */
677 #ifdef ENABLE_CHECKING
678 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
679 if (BLOCK_INFO (e
->src
)->tovisit
&& !(e
->flags
& EDGE_DFS_BACK
))
683 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
684 if (EDGE_INFO (e
)->back_edge
)
685 cyclic_probability
+= EDGE_INFO (e
)->back_edge_prob
;
686 else if (!(e
->flags
& EDGE_DFS_BACK
))
687 frequency
+= (e
->probability
688 * BLOCK_INFO (e
->src
)->frequency
/
691 if (cyclic_probability
> 1.0 - 1.0 / REG_BR_PROB_BASE
)
692 cyclic_probability
= 1.0 - 1.0 / REG_BR_PROB_BASE
;
694 BLOCK_INFO (bb
)->frequency
= frequency
/ (1 - cyclic_probability
);
697 BLOCK_INFO (bb
)->tovisit
= 0;
699 /* Compute back edge frequencies. */
700 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
702 EDGE_INFO (e
)->back_edge_prob
= (e
->probability
703 * BLOCK_INFO (bb
)->frequency
706 /* Propagate to successor blocks. */
707 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
708 if (!(e
->flags
& EDGE_DFS_BACK
)
709 && BLOCK_INFO (e
->dest
)->npredecessors
)
711 BLOCK_INFO (e
->dest
)->npredecessors
--;
712 if (!BLOCK_INFO (e
->dest
)->npredecessors
)
717 BLOCK_INFO (last
)->next
= e
->dest
;
724 /* Estimate probabilities of loopback edges in loops at same nest level. */
726 estimate_loops_at_level (first_loop
)
727 struct loop
*first_loop
;
729 struct loop
*l
, *loop
= first_loop
;
731 for (loop
= first_loop
; loop
; loop
= loop
->next
)
736 estimate_loops_at_level (loop
->inner
);
738 /* Find current loop back edge and mark it. */
739 for (e
= loop
->latch
->succ
; e
->dest
!= loop
->header
; e
= e
->succ_next
);
741 EDGE_INFO (e
)->back_edge
= 1;
743 /* In case the loop header is shared, ensure that it is the last
744 one sharing the same header, so we avoid redundant work. */
747 for (l
= loop
->next
; l
; l
= l
->next
)
748 if (l
->header
== loop
->header
)
754 /* Now merge all nodes of all loops with given header as not visited. */
755 for (l
= loop
->shared
? first_loop
: loop
; l
!= loop
->next
; l
= l
->next
)
756 if (loop
->header
== l
->header
)
757 EXECUTE_IF_SET_IN_SBITMAP (l
->nodes
, 0, n
,
758 BLOCK_INFO (BASIC_BLOCK (n
))->tovisit
= 1
760 propagate_freq (loop
->header
);
764 /* Convert counts measured by profile driven feedback to frequencies. */
768 HOST_WIDEST_INT count_max
= 1;
771 for (i
= 0; i
< n_basic_blocks
; i
++)
772 if (BASIC_BLOCK (i
)->count
> count_max
)
773 count_max
= BASIC_BLOCK (i
)->count
;
775 for (i
= -2; i
< n_basic_blocks
; i
++)
779 bb
= ENTRY_BLOCK_PTR
;
783 bb
= BASIC_BLOCK (i
);
784 bb
->frequency
= ((bb
->count
* BB_FREQ_MAX
+ count_max
/ 2)
789 /* Return true if function is likely to be expensive, so there is no point
790 to optimizer performance of prologue, epilogue or do inlining at the
791 expense of code size growth. THRESHOLD is the limit of number
792 of isntructions function can execute at average to be still considered
795 expensive_function_p (threshold
)
798 unsigned int sum
= 0;
802 /* We can not compute accurately for large thresholds due to scaled
804 if (threshold
> BB_FREQ_MAX
)
807 /* Frequencies are out of range. This either means that function contains
808 internal loop executing more than BB_FREQ_MAX times or profile feedback
809 is available and function has not been executed at all. */
810 if (ENTRY_BLOCK_PTR
->frequency
== 0)
813 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
814 limit
= ENTRY_BLOCK_PTR
->frequency
* threshold
;
815 for (i
= 0; i
< n_basic_blocks
; i
++)
817 basic_block bb
= BASIC_BLOCK (i
);
820 for (insn
= bb
->head
; insn
!= NEXT_INSN (bb
->end
);
821 insn
= NEXT_INSN (insn
))
823 if (active_insn_p (insn
))
825 sum
+= bb
->frequency
;
834 /* Estimate basic blocks frequency by given branch probabilities. */
836 estimate_bb_frequencies (loops
)
842 mark_dfs_back_edges ();
843 if (flag_branch_probabilities
)
849 /* Fill in the probability values in flowgraph based on the REG_BR_PROB
851 for (i
= 0; i
< n_basic_blocks
; i
++)
853 rtx last_insn
= BLOCK_END (i
);
855 edge fallthru
, branch
;
857 if (GET_CODE (last_insn
) != JUMP_INSN
|| !any_condjump_p (last_insn
)
858 /* Avoid handling of conditional jumps jumping to fallthru edge. */
859 || BASIC_BLOCK (i
)->succ
->succ_next
== NULL
)
861 /* We can predict only conditional jumps at the moment.
862 Expect each edge to be equally probable.
863 ?? In the future we want to make abnormal edges improbable. */
867 for (e
= BASIC_BLOCK (i
)->succ
; e
; e
= e
->succ_next
)
870 if (e
->probability
!= 0)
874 for (e
= BASIC_BLOCK (i
)->succ
; e
; e
= e
->succ_next
)
875 e
->probability
= (REG_BR_PROB_BASE
+ nedges
/ 2) / nedges
;
879 probability
= INTVAL (XEXP (find_reg_note (last_insn
,
880 REG_BR_PROB
, 0), 0));
881 fallthru
= BASIC_BLOCK (i
)->succ
;
882 if (!fallthru
->flags
& EDGE_FALLTHRU
)
883 fallthru
= fallthru
->succ_next
;
884 branch
= BASIC_BLOCK (i
)->succ
;
885 if (branch
->flags
& EDGE_FALLTHRU
)
886 branch
= branch
->succ_next
;
888 branch
->probability
= probability
;
889 fallthru
->probability
= REG_BR_PROB_BASE
- probability
;
892 ENTRY_BLOCK_PTR
->succ
->probability
= REG_BR_PROB_BASE
;
894 /* Set up block info for each basic block. */
895 alloc_aux_for_blocks (sizeof (struct block_info_def
));
896 alloc_aux_for_edges (sizeof (struct edge_info_def
));
897 for (i
= -2; i
< n_basic_blocks
; i
++)
903 bb
= ENTRY_BLOCK_PTR
;
907 bb
= BASIC_BLOCK (i
);
908 BLOCK_INFO (bb
)->tovisit
= 0;
909 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
910 EDGE_INFO (e
)->back_edge_prob
= ((double) e
->probability
913 /* First compute probabilities locally for each loop from innermost
914 to outermost to examine probabilities for back edges. */
915 estimate_loops_at_level (loops
->tree_root
);
917 /* Now fake loop around whole function to finalize probabilities. */
918 for (i
= 0; i
< n_basic_blocks
; i
++)
919 BLOCK_INFO (BASIC_BLOCK (i
))->tovisit
= 1;
920 BLOCK_INFO (ENTRY_BLOCK_PTR
)->tovisit
= 1;
921 BLOCK_INFO (EXIT_BLOCK_PTR
)->tovisit
= 1;
922 propagate_freq (ENTRY_BLOCK_PTR
);
924 for (i
= 0; i
< n_basic_blocks
; i
++)
925 if (BLOCK_INFO (BASIC_BLOCK (i
))->frequency
> freq_max
)
926 freq_max
= BLOCK_INFO (BASIC_BLOCK (i
))->frequency
;
927 for (i
= -2; i
< n_basic_blocks
; i
++)
931 bb
= ENTRY_BLOCK_PTR
;
935 bb
= BASIC_BLOCK (i
);
936 bb
->frequency
= (BLOCK_INFO (bb
)->frequency
* BB_FREQ_MAX
/ freq_max
940 free_aux_for_blocks ();
941 free_aux_for_edges ();