index.html: Correct link to libg++ information.
[official-gcc.git] / gcc / predict.c
blob09bc562e6c1bfbf7e826686293a84a769796d5bd
1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 /* References:
23 [1] "Branch Prediction for Free"
24 Ball and Larus; PLDI '93.
25 [2] "Static Branch Frequency and Program Profile Analysis"
26 Wu and Larus; MICRO-27.
27 [3] "Corpus-based Static Branch Prediction"
28 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
31 #include "config.h"
32 #include "system.h"
33 #include "coretypes.h"
34 #include "tm.h"
35 #include "tree.h"
36 #include "rtl.h"
37 #include "tm_p.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "insn-config.h"
41 #include "regs.h"
42 #include "flags.h"
43 #include "output.h"
44 #include "function.h"
45 #include "except.h"
46 #include "toplev.h"
47 #include "recog.h"
48 #include "expr.h"
49 #include "predict.h"
50 #include "profile.h"
51 #include "real.h"
52 #include "params.h"
53 #include "target.h"
54 #include "loop.h"
55 #include "cfgloop.h"
57 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
58 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
59 static REAL_VALUE_TYPE real_zero, real_one, real_almost_one, real_br_prob_base,
60 real_inv_br_prob_base, real_one_half, real_bb_freq_max;
62 /* Random guesstimation given names. */
63 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 10 - 1)
64 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
65 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
66 #define PROB_ALWAYS (REG_BR_PROB_BASE)
68 static bool predicted_by_p PARAMS ((basic_block,
69 enum br_predictor));
70 static void combine_predictions_for_insn PARAMS ((rtx, basic_block));
71 static void dump_prediction PARAMS ((enum br_predictor, int,
72 basic_block, int));
73 static void estimate_loops_at_level PARAMS ((struct loop *loop));
74 static void propagate_freq PARAMS ((struct loop *));
75 static void estimate_bb_frequencies PARAMS ((struct loops *));
76 static void counts_to_freqs PARAMS ((void));
77 static void process_note_predictions PARAMS ((basic_block, int *,
78 dominance_info,
79 dominance_info));
80 static void process_note_prediction PARAMS ((basic_block, int *,
81 dominance_info,
82 dominance_info, int, int));
83 static bool last_basic_block_p PARAMS ((basic_block));
84 static void compute_function_frequency PARAMS ((void));
85 static void choose_function_section PARAMS ((void));
86 static bool can_predict_insn_p PARAMS ((rtx));
88 /* Information we hold about each branch predictor.
89 Filled using information from predict.def. */
91 struct predictor_info
93 const char *const name; /* Name used in the debugging dumps. */
94 const int hitrate; /* Expected hitrate used by
95 predict_insn_def call. */
96 const int flags;
99 /* Use given predictor without Dempster-Shaffer theory if it matches
100 using first_match heuristics. */
101 #define PRED_FLAG_FIRST_MATCH 1
103 /* Recompute hitrate in percent to our representation. */
105 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
107 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
108 static const struct predictor_info predictor_info[]= {
109 #include "predict.def"
111 /* Upper bound on predictors. */
112 {NULL, 0, 0}
114 #undef DEF_PREDICTOR
116 /* Return true in case BB can be CPU intensive and should be optimized
117 for maximal performance. */
119 bool
120 maybe_hot_bb_p (bb)
121 basic_block bb;
123 if (profile_info.count_profiles_merged
124 && flag_branch_probabilities
125 && (bb->count
126 < profile_info.max_counter_in_program
127 / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
128 return false;
129 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
130 return false;
131 return true;
134 /* Return true in case BB is cold and should be optimized for size. */
136 bool
137 probably_cold_bb_p (bb)
138 basic_block bb;
140 if (profile_info.count_profiles_merged
141 && flag_branch_probabilities
142 && (bb->count
143 < profile_info.max_counter_in_program
144 / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
145 return true;
146 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
147 return true;
148 return false;
151 /* Return true in case BB is probably never executed. */
152 bool
153 probably_never_executed_bb_p (bb)
154 basic_block bb;
156 if (profile_info.count_profiles_merged
157 && flag_branch_probabilities)
158 return ((bb->count + profile_info.count_profiles_merged / 2)
159 / profile_info.count_profiles_merged) == 0;
160 return false;
163 /* Return true if the one of outgoing edges is already predicted by
164 PREDICTOR. */
166 static bool
167 predicted_by_p (bb, predictor)
168 basic_block bb;
169 enum br_predictor predictor;
171 rtx note;
172 if (!INSN_P (bb->end))
173 return false;
174 for (note = REG_NOTES (bb->end); note; note = XEXP (note, 1))
175 if (REG_NOTE_KIND (note) == REG_BR_PRED
176 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
177 return true;
178 return false;
181 void
182 predict_insn (insn, predictor, probability)
183 rtx insn;
184 int probability;
185 enum br_predictor predictor;
187 if (!any_condjump_p (insn))
188 abort ();
190 REG_NOTES (insn)
191 = gen_rtx_EXPR_LIST (REG_BR_PRED,
192 gen_rtx_CONCAT (VOIDmode,
193 GEN_INT ((int) predictor),
194 GEN_INT ((int) probability)),
195 REG_NOTES (insn));
198 /* Predict insn by given predictor. */
200 void
201 predict_insn_def (insn, predictor, taken)
202 rtx insn;
203 enum br_predictor predictor;
204 enum prediction taken;
206 int probability = predictor_info[(int) predictor].hitrate;
208 if (taken != TAKEN)
209 probability = REG_BR_PROB_BASE - probability;
211 predict_insn (insn, predictor, probability);
214 /* Predict edge E with given probability if possible. */
216 void
217 predict_edge (e, predictor, probability)
218 edge e;
219 int probability;
220 enum br_predictor predictor;
222 rtx last_insn;
223 last_insn = e->src->end;
225 /* We can store the branch prediction information only about
226 conditional jumps. */
227 if (!any_condjump_p (last_insn))
228 return;
230 /* We always store probability of branching. */
231 if (e->flags & EDGE_FALLTHRU)
232 probability = REG_BR_PROB_BASE - probability;
234 predict_insn (last_insn, predictor, probability);
237 /* Return true when we can store prediction on insn INSN.
238 At the moment we represent predictions only on conditional
239 jumps, not at computed jump or other complicated cases. */
240 static bool
241 can_predict_insn_p (insn)
242 rtx insn;
244 return (GET_CODE (insn) == JUMP_INSN
245 && any_condjump_p (insn)
246 && BLOCK_FOR_INSN (insn)->succ->succ_next);
249 /* Predict edge E by given predictor if possible. */
251 void
252 predict_edge_def (e, predictor, taken)
253 edge e;
254 enum br_predictor predictor;
255 enum prediction taken;
257 int probability = predictor_info[(int) predictor].hitrate;
259 if (taken != TAKEN)
260 probability = REG_BR_PROB_BASE - probability;
262 predict_edge (e, predictor, probability);
265 /* Invert all branch predictions or probability notes in the INSN. This needs
266 to be done each time we invert the condition used by the jump. */
268 void
269 invert_br_probabilities (insn)
270 rtx insn;
272 rtx note;
274 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
275 if (REG_NOTE_KIND (note) == REG_BR_PROB)
276 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
277 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
278 XEXP (XEXP (note, 0), 1)
279 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
282 /* Dump information about the branch prediction to the output file. */
284 static void
285 dump_prediction (predictor, probability, bb, used)
286 enum br_predictor predictor;
287 int probability;
288 basic_block bb;
289 int used;
291 edge e = bb->succ;
293 if (!rtl_dump_file)
294 return;
296 while (e && (e->flags & EDGE_FALLTHRU))
297 e = e->succ_next;
299 fprintf (rtl_dump_file, " %s heuristics%s: %.1f%%",
300 predictor_info[predictor].name,
301 used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
303 if (bb->count)
305 fprintf (rtl_dump_file, " exec ");
306 fprintf (rtl_dump_file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
307 if (e)
309 fprintf (rtl_dump_file, " hit ");
310 fprintf (rtl_dump_file, HOST_WIDEST_INT_PRINT_DEC, e->count);
311 fprintf (rtl_dump_file, " (%.1f%%)", e->count * 100.0 / bb->count);
315 fprintf (rtl_dump_file, "\n");
318 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
319 note if not already present. Remove now useless REG_BR_PRED notes. */
321 static void
322 combine_predictions_for_insn (insn, bb)
323 rtx insn;
324 basic_block bb;
326 rtx prob_note = find_reg_note (insn, REG_BR_PROB, 0);
327 rtx *pnote = &REG_NOTES (insn);
328 rtx note;
329 int best_probability = PROB_EVEN;
330 int best_predictor = END_PREDICTORS;
331 int combined_probability = REG_BR_PROB_BASE / 2;
332 int d;
333 bool first_match = false;
334 bool found = false;
336 if (rtl_dump_file)
337 fprintf (rtl_dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
338 bb->index);
340 /* We implement "first match" heuristics and use probability guessed
341 by predictor with smallest index. In the future we will use better
342 probability combination techniques. */
343 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
344 if (REG_NOTE_KIND (note) == REG_BR_PRED)
346 int predictor = INTVAL (XEXP (XEXP (note, 0), 0));
347 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
349 found = true;
350 if (best_predictor > predictor)
351 best_probability = probability, best_predictor = predictor;
353 d = (combined_probability * probability
354 + (REG_BR_PROB_BASE - combined_probability)
355 * (REG_BR_PROB_BASE - probability));
357 /* Use FP math to avoid overflows of 32bit integers. */
358 if (d == 0)
359 /* If one probability is 0% and one 100%, avoid division by zero. */
360 combined_probability = REG_BR_PROB_BASE / 2;
361 else
362 combined_probability = (((double) combined_probability) * probability
363 * REG_BR_PROB_BASE / d + 0.5);
366 /* Decide which heuristic to use. In case we didn't match anything,
367 use no_prediction heuristic, in case we did match, use either
368 first match or Dempster-Shaffer theory depending on the flags. */
370 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
371 first_match = true;
373 if (!found)
374 dump_prediction (PRED_NO_PREDICTION, combined_probability, bb, true);
375 else
377 dump_prediction (PRED_DS_THEORY, combined_probability, bb, !first_match);
378 dump_prediction (PRED_FIRST_MATCH, best_probability, bb, first_match);
381 if (first_match)
382 combined_probability = best_probability;
383 dump_prediction (PRED_COMBINED, combined_probability, bb, true);
385 while (*pnote)
387 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
389 int predictor = INTVAL (XEXP (XEXP (*pnote, 0), 0));
390 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
392 dump_prediction (predictor, probability, bb,
393 !first_match || best_predictor == predictor);
394 *pnote = XEXP (*pnote, 1);
396 else
397 pnote = &XEXP (*pnote, 1);
400 if (!prob_note)
402 REG_NOTES (insn)
403 = gen_rtx_EXPR_LIST (REG_BR_PROB,
404 GEN_INT (combined_probability), REG_NOTES (insn));
406 /* Save the prediction into CFG in case we are seeing non-degenerated
407 conditional jump. */
408 if (bb->succ->succ_next)
410 BRANCH_EDGE (bb)->probability = combined_probability;
411 FALLTHRU_EDGE (bb)->probability
412 = REG_BR_PROB_BASE - combined_probability;
417 /* Statically estimate the probability that a branch will be taken.
418 ??? In the next revision there will be a number of other predictors added
419 from the above references. Further, each heuristic will be factored out
420 into its own function for clarity (and to facilitate the combination of
421 predictions). */
423 void
424 estimate_probability (loops_info)
425 struct loops *loops_info;
427 dominance_info dominators, post_dominators;
428 basic_block bb;
429 unsigned i;
431 connect_infinite_loops_to_exit ();
432 dominators = calculate_dominance_info (CDI_DOMINATORS);
433 post_dominators = calculate_dominance_info (CDI_POST_DOMINATORS);
435 /* Try to predict out blocks in a loop that are not part of a
436 natural loop. */
437 for (i = 1; i < loops_info->num; i++)
439 basic_block bb, *bbs;
440 unsigned j;
441 int exits;
442 struct loop *loop = loops_info->parray[i];
443 struct loop_desc desc;
444 unsigned HOST_WIDE_INT niter;
446 flow_loop_scan (loops_info, loop, LOOP_EXIT_EDGES);
447 exits = loop->num_exits;
449 if (simple_loop_p (loops_info, loop, &desc)
450 && desc.const_iter)
452 niter = desc.niter + 1;
453 if (niter == 0) /* We might overflow here. */
454 niter = desc.niter;
456 predict_edge (desc.in_edge, PRED_LOOP_ITERATIONS,
457 REG_BR_PROB_BASE
458 - (REG_BR_PROB_BASE + niter /2)
459 / niter);
462 bbs = get_loop_body (loop);
463 for (j = 0; j < loop->num_nodes; j++)
465 int header_found = 0;
466 edge e;
468 bb = bbs[j];
470 /* Bypass loop heuristics on continue statement. These
471 statements construct loops via "non-loop" constructs
472 in the source language and are better to be handled
473 separately. */
474 if (!can_predict_insn_p (bb->end)
475 || predicted_by_p (bb, PRED_CONTINUE))
476 continue;
478 /* Loop branch heuristics - predict an edge back to a
479 loop's head as taken. */
480 for (e = bb->succ; e; e = e->succ_next)
481 if (e->dest == loop->header
482 && e->src == loop->latch)
484 header_found = 1;
485 predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
488 /* Loop exit heuristics - predict an edge exiting the loop if the
489 conditional has no loop header successors as not taken. */
490 if (!header_found)
491 for (e = bb->succ; e; e = e->succ_next)
492 if (e->dest->index < 0
493 || !flow_bb_inside_loop_p (loop, e->dest))
494 predict_edge
495 (e, PRED_LOOP_EXIT,
496 (REG_BR_PROB_BASE
497 - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
498 / exits);
502 /* Attempt to predict conditional jumps using a number of heuristics. */
503 FOR_EACH_BB (bb)
505 rtx last_insn = bb->end;
506 rtx cond, earliest;
507 edge e;
509 if (! can_predict_insn_p (last_insn))
510 continue;
512 for (e = bb->succ; e; e = e->succ_next)
514 /* Predict early returns to be probable, as we've already taken
515 care for error returns and other are often used for fast paths
516 trought function. */
517 if ((e->dest == EXIT_BLOCK_PTR
518 || (e->dest->succ && !e->dest->succ->succ_next
519 && e->dest->succ->dest == EXIT_BLOCK_PTR))
520 && !predicted_by_p (bb, PRED_NULL_RETURN)
521 && !predicted_by_p (bb, PRED_CONST_RETURN)
522 && !predicted_by_p (bb, PRED_NEGATIVE_RETURN)
523 && !last_basic_block_p (e->dest))
524 predict_edge_def (e, PRED_EARLY_RETURN, TAKEN);
526 /* Look for block we are guarding (ie we dominate it,
527 but it doesn't postdominate us). */
528 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
529 && dominated_by_p (dominators, e->dest, e->src)
530 && !dominated_by_p (post_dominators, e->src, e->dest))
532 rtx insn;
534 /* The call heuristic claims that a guarded function call
535 is improbable. This is because such calls are often used
536 to signal exceptional situations such as printing error
537 messages. */
538 for (insn = e->dest->head; insn != NEXT_INSN (e->dest->end);
539 insn = NEXT_INSN (insn))
540 if (GET_CODE (insn) == CALL_INSN
541 /* Constant and pure calls are hardly used to signalize
542 something exceptional. */
543 && ! CONST_OR_PURE_CALL_P (insn))
545 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
546 break;
551 cond = get_condition (last_insn, &earliest);
552 if (! cond)
553 continue;
555 /* Try "pointer heuristic."
556 A comparison ptr == 0 is predicted as false.
557 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
558 if (GET_RTX_CLASS (GET_CODE (cond)) == '<'
559 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
560 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
562 if (GET_CODE (cond) == EQ)
563 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
564 else if (GET_CODE (cond) == NE)
565 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
567 else
569 /* Try "opcode heuristic."
570 EQ tests are usually false and NE tests are usually true. Also,
571 most quantities are positive, so we can make the appropriate guesses
572 about signed comparisons against zero. */
573 switch (GET_CODE (cond))
575 case CONST_INT:
576 /* Unconditional branch. */
577 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
578 cond == const0_rtx ? NOT_TAKEN : TAKEN);
579 break;
581 case EQ:
582 case UNEQ:
583 /* Floating point comparisons appears to behave in a very
584 unpredictable way because of special role of = tests in
585 FP code. */
586 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
588 /* Comparisons with 0 are often used for booleans and there is
589 nothing useful to predict about them. */
590 else if (XEXP (cond, 1) == const0_rtx
591 || XEXP (cond, 0) == const0_rtx)
593 else
594 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
595 break;
597 case NE:
598 case LTGT:
599 /* Floating point comparisons appears to behave in a very
600 unpredictable way because of special role of = tests in
601 FP code. */
602 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
604 /* Comparisons with 0 are often used for booleans and there is
605 nothing useful to predict about them. */
606 else if (XEXP (cond, 1) == const0_rtx
607 || XEXP (cond, 0) == const0_rtx)
609 else
610 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
611 break;
613 case ORDERED:
614 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
615 break;
617 case UNORDERED:
618 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
619 break;
621 case LE:
622 case LT:
623 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
624 || XEXP (cond, 1) == constm1_rtx)
625 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
626 break;
628 case GE:
629 case GT:
630 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
631 || XEXP (cond, 1) == constm1_rtx)
632 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
633 break;
635 default:
636 break;
640 /* Attach the combined probability to each conditional jump. */
641 FOR_EACH_BB (bb)
642 if (GET_CODE (bb->end) == JUMP_INSN
643 && any_condjump_p (bb->end)
644 && bb->succ->succ_next != NULL)
645 combine_predictions_for_insn (bb->end, bb);
647 free_dominance_info (post_dominators);
648 free_dominance_info (dominators);
650 remove_fake_edges ();
651 estimate_bb_frequencies (loops_info);
654 /* __builtin_expect dropped tokens into the insn stream describing expected
655 values of registers. Generate branch probabilities based off these
656 values. */
658 void
659 expected_value_to_br_prob ()
661 rtx insn, cond, ev = NULL_RTX, ev_reg = NULL_RTX;
663 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
665 switch (GET_CODE (insn))
667 case NOTE:
668 /* Look for expected value notes. */
669 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EXPECTED_VALUE)
671 ev = NOTE_EXPECTED_VALUE (insn);
672 ev_reg = XEXP (ev, 0);
673 delete_insn (insn);
675 continue;
677 case CODE_LABEL:
678 /* Never propagate across labels. */
679 ev = NULL_RTX;
680 continue;
682 case JUMP_INSN:
683 /* Look for simple conditional branches. If we haven't got an
684 expected value yet, no point going further. */
685 if (GET_CODE (insn) != JUMP_INSN || ev == NULL_RTX
686 || ! any_condjump_p (insn))
687 continue;
688 break;
690 default:
691 /* Look for insns that clobber the EV register. */
692 if (ev && reg_set_p (ev_reg, insn))
693 ev = NULL_RTX;
694 continue;
697 /* Collect the branch condition, hopefully relative to EV_REG. */
698 /* ??? At present we'll miss things like
699 (expected_value (eq r70 0))
700 (set r71 -1)
701 (set r80 (lt r70 r71))
702 (set pc (if_then_else (ne r80 0) ...))
703 as canonicalize_condition will render this to us as
704 (lt r70, r71)
705 Could use cselib to try and reduce this further. */
706 cond = XEXP (SET_SRC (pc_set (insn)), 0);
707 cond = canonicalize_condition (insn, cond, 0, NULL, ev_reg);
708 if (! cond || XEXP (cond, 0) != ev_reg
709 || GET_CODE (XEXP (cond, 1)) != CONST_INT)
710 continue;
712 /* Substitute and simplify. Given that the expression we're
713 building involves two constants, we should wind up with either
714 true or false. */
715 cond = gen_rtx_fmt_ee (GET_CODE (cond), VOIDmode,
716 XEXP (ev, 1), XEXP (cond, 1));
717 cond = simplify_rtx (cond);
719 /* Turn the condition into a scaled branch probability. */
720 if (cond != const_true_rtx && cond != const0_rtx)
721 abort ();
722 predict_insn_def (insn, PRED_BUILTIN_EXPECT,
723 cond == const_true_rtx ? TAKEN : NOT_TAKEN);
727 /* Check whether this is the last basic block of function. Commonly tehre
728 is one extra common cleanup block. */
729 static bool
730 last_basic_block_p (bb)
731 basic_block bb;
733 if (bb == EXIT_BLOCK_PTR)
734 return false;
736 return (bb->next_bb == EXIT_BLOCK_PTR
737 || (bb->next_bb->next_bb == EXIT_BLOCK_PTR
738 && bb->succ && !bb->succ->succ_next
739 && bb->succ->dest->next_bb == EXIT_BLOCK_PTR));
742 /* Sets branch probabilities according to PREDiction and FLAGS. HEADS[bb->index]
743 should be index of basic block in that we need to alter branch predictions
744 (i.e. the first of our dominators such that we do not post-dominate it)
745 (but we fill this information on demand, so -1 may be there in case this
746 was not needed yet). */
748 static void
749 process_note_prediction (bb, heads, dominators, post_dominators, pred, flags)
750 basic_block bb;
751 int *heads;
752 dominance_info dominators;
753 dominance_info post_dominators;
754 int pred;
755 int flags;
757 edge e;
758 int y;
759 bool taken;
761 taken = flags & IS_TAKEN;
763 if (heads[bb->index] < 0)
765 /* This is first time we need this field in heads array; so
766 find first dominator that we do not post-dominate (we are
767 using already known members of heads array). */
768 basic_block ai = bb;
769 basic_block next_ai = get_immediate_dominator (dominators, bb);
770 int head;
772 while (heads[next_ai->index] < 0)
774 if (!dominated_by_p (post_dominators, next_ai, bb))
775 break;
776 heads[next_ai->index] = ai->index;
777 ai = next_ai;
778 next_ai = get_immediate_dominator (dominators, next_ai);
780 if (!dominated_by_p (post_dominators, next_ai, bb))
781 head = next_ai->index;
782 else
783 head = heads[next_ai->index];
784 while (next_ai != bb)
786 next_ai = ai;
787 if (heads[ai->index] == ENTRY_BLOCK)
788 ai = ENTRY_BLOCK_PTR;
789 else
790 ai = BASIC_BLOCK (heads[ai->index]);
791 heads[next_ai->index] = head;
794 y = heads[bb->index];
796 /* Now find the edge that leads to our branch and aply the prediction. */
798 if (y == last_basic_block || !can_predict_insn_p (BASIC_BLOCK (y)->end))
799 return;
800 for (e = BASIC_BLOCK (y)->succ; e; e = e->succ_next)
801 if (e->dest->index >= 0
802 && dominated_by_p (post_dominators, e->dest, bb))
803 predict_edge_def (e, pred, taken);
806 /* Gathers NOTE_INSN_PREDICTIONs in given basic block and turns them
807 into branch probabilities. For description of heads array, see
808 process_note_prediction. */
810 static void
811 process_note_predictions (bb, heads, dominators, post_dominators)
812 basic_block bb;
813 int *heads;
814 dominance_info dominators;
815 dominance_info post_dominators;
817 rtx insn;
818 edge e;
820 /* Additionally, we check here for blocks with no successors. */
821 int contained_noreturn_call = 0;
822 int was_bb_head = 0;
823 int noreturn_block = 1;
825 for (insn = bb->end; insn;
826 was_bb_head |= (insn == bb->head), insn = PREV_INSN (insn))
828 if (GET_CODE (insn) != NOTE)
830 if (was_bb_head)
831 break;
832 else
834 /* Noreturn calls cause program to exit, therefore they are
835 always predicted as not taken. */
836 if (GET_CODE (insn) == CALL_INSN
837 && find_reg_note (insn, REG_NORETURN, NULL))
838 contained_noreturn_call = 1;
839 continue;
842 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION)
844 int alg = (int) NOTE_PREDICTION_ALG (insn);
845 /* Process single prediction note. */
846 process_note_prediction (bb,
847 heads,
848 dominators,
849 post_dominators,
850 alg, (int) NOTE_PREDICTION_FLAGS (insn));
851 delete_insn (insn);
854 for (e = bb->succ; e; e = e->succ_next)
855 if (!(e->flags & EDGE_FAKE))
856 noreturn_block = 0;
857 if (contained_noreturn_call)
859 /* This block ended from other reasons than because of return.
860 If it is because of noreturn call, this should certainly not
861 be taken. Otherwise it is probably some error recovery. */
862 process_note_prediction (bb,
863 heads,
864 dominators,
865 post_dominators, PRED_NORETURN, NOT_TAKEN);
869 /* Gathers NOTE_INSN_PREDICTIONs and turns them into
870 branch probabilities. */
872 void
873 note_prediction_to_br_prob ()
875 basic_block bb;
876 dominance_info post_dominators, dominators;
877 int *heads;
879 /* To enable handling of noreturn blocks. */
880 add_noreturn_fake_exit_edges ();
881 connect_infinite_loops_to_exit ();
883 post_dominators = calculate_dominance_info (CDI_POST_DOMINATORS);
884 dominators = calculate_dominance_info (CDI_DOMINATORS);
886 heads = xmalloc (sizeof (int) * last_basic_block);
887 memset (heads, -1, sizeof (int) * last_basic_block);
888 heads[ENTRY_BLOCK_PTR->next_bb->index] = last_basic_block;
890 /* Process all prediction notes. */
892 FOR_EACH_BB (bb)
893 process_note_predictions (bb, heads, dominators, post_dominators);
895 free_dominance_info (post_dominators);
896 free_dominance_info (dominators);
897 free (heads);
899 remove_fake_edges ();
902 /* This is used to carry information about basic blocks. It is
903 attached to the AUX field of the standard CFG block. */
905 typedef struct block_info_def
907 /* Estimated frequency of execution of basic_block. */
908 REAL_VALUE_TYPE frequency;
910 /* To keep queue of basic blocks to process. */
911 basic_block next;
913 /* True if block needs to be visited in prop_freqency. */
914 int tovisit:1;
916 /* Number of predecessors we need to visit first. */
917 int npredecessors;
918 } *block_info;
920 /* Similar information for edges. */
921 typedef struct edge_info_def
923 /* In case edge is an loopback edge, the probability edge will be reached
924 in case header is. Estimated number of iterations of the loop can be
925 then computed as 1 / (1 - back_edge_prob). */
926 REAL_VALUE_TYPE back_edge_prob;
927 /* True if the edge is an loopback edge in the natural loop. */
928 int back_edge:1;
929 } *edge_info;
931 #define BLOCK_INFO(B) ((block_info) (B)->aux)
932 #define EDGE_INFO(E) ((edge_info) (E)->aux)
934 /* Helper function for estimate_bb_frequencies.
935 Propagate the frequencies for LOOP. */
937 static void
938 propagate_freq (loop)
939 struct loop *loop;
941 basic_block head = loop->header;
942 basic_block bb;
943 basic_block last;
944 edge e;
945 basic_block nextbb;
947 /* For each basic block we need to visit count number of his predecessors
948 we need to visit first. */
949 FOR_EACH_BB (bb)
951 if (BLOCK_INFO (bb)->tovisit)
953 int count = 0;
955 for (e = bb->pred; e; e = e->pred_next)
956 if (BLOCK_INFO (e->src)->tovisit && !(e->flags & EDGE_DFS_BACK))
957 count++;
958 else if (BLOCK_INFO (e->src)->tovisit
959 && rtl_dump_file && !EDGE_INFO (e)->back_edge)
960 fprintf (rtl_dump_file,
961 "Irreducible region hit, ignoring edge to %i->%i\n",
962 e->src->index, bb->index);
963 BLOCK_INFO (bb)->npredecessors = count;
967 memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
968 last = head;
969 for (bb = head; bb; bb = nextbb)
971 REAL_VALUE_TYPE cyclic_probability, frequency;
973 memcpy (&cyclic_probability, &real_zero, sizeof (real_zero));
974 memcpy (&frequency, &real_zero, sizeof (real_zero));
976 nextbb = BLOCK_INFO (bb)->next;
977 BLOCK_INFO (bb)->next = NULL;
979 /* Compute frequency of basic block. */
980 if (bb != head)
982 #ifdef ENABLE_CHECKING
983 for (e = bb->pred; e; e = e->pred_next)
984 if (BLOCK_INFO (e->src)->tovisit && !(e->flags & EDGE_DFS_BACK))
985 abort ();
986 #endif
988 for (e = bb->pred; e; e = e->pred_next)
989 if (EDGE_INFO (e)->back_edge)
991 REAL_ARITHMETIC (cyclic_probability, PLUS_EXPR,
992 cyclic_probability,
993 EDGE_INFO (e)->back_edge_prob);
995 else if (!(e->flags & EDGE_DFS_BACK))
997 REAL_VALUE_TYPE tmp;
999 /* frequency += (e->probability
1000 * BLOCK_INFO (e->src)->frequency /
1001 REG_BR_PROB_BASE); */
1003 REAL_VALUE_FROM_INT (tmp, e->probability, 0,
1004 TYPE_MODE (double_type_node));
1005 REAL_ARITHMETIC (tmp, MULT_EXPR, tmp,
1006 BLOCK_INFO (e->src)->frequency);
1007 REAL_ARITHMETIC (tmp, MULT_EXPR, tmp, real_inv_br_prob_base);
1008 REAL_ARITHMETIC (frequency, PLUS_EXPR, frequency, tmp);
1011 if (REAL_VALUES_IDENTICAL (cyclic_probability, real_zero))
1012 memcpy (&BLOCK_INFO (bb)->frequency, &frequency, sizeof (frequency));
1013 else
1015 if (REAL_VALUES_LESS (real_almost_one, cyclic_probability))
1016 memcpy (&cyclic_probability, &real_almost_one, sizeof (real_zero));
1018 /* BLOCK_INFO (bb)->frequency = frequency / (1 - cyclic_probability)
1021 REAL_ARITHMETIC (cyclic_probability, MINUS_EXPR, real_one,
1022 cyclic_probability);
1023 REAL_ARITHMETIC (BLOCK_INFO (bb)->frequency,
1024 RDIV_EXPR, frequency, cyclic_probability);
1028 BLOCK_INFO (bb)->tovisit = 0;
1030 /* Compute back edge frequencies. */
1031 for (e = bb->succ; e; e = e->succ_next)
1032 if (e->dest == head)
1034 REAL_VALUE_TYPE tmp;
1036 /* EDGE_INFO (e)->back_edge_prob
1037 = ((e->probability * BLOCK_INFO (bb)->frequency)
1038 / REG_BR_PROB_BASE); */
1039 REAL_VALUE_FROM_INT (tmp, e->probability, 0,
1040 TYPE_MODE (double_type_node));
1041 REAL_ARITHMETIC (tmp, MULT_EXPR, tmp,
1042 BLOCK_INFO (bb)->frequency);
1043 REAL_ARITHMETIC (EDGE_INFO (e)->back_edge_prob,
1044 MULT_EXPR, tmp, real_inv_br_prob_base);
1048 /* Propagate to successor blocks. */
1049 for (e = bb->succ; e; e = e->succ_next)
1050 if (!(e->flags & EDGE_DFS_BACK)
1051 && BLOCK_INFO (e->dest)->npredecessors)
1053 BLOCK_INFO (e->dest)->npredecessors--;
1054 if (!BLOCK_INFO (e->dest)->npredecessors)
1056 if (!nextbb)
1057 nextbb = e->dest;
1058 else
1059 BLOCK_INFO (last)->next = e->dest;
1061 last = e->dest;
1067 /* Estimate probabilities of loopback edges in loops at same nest level. */
1069 static void
1070 estimate_loops_at_level (first_loop)
1071 struct loop *first_loop;
1073 struct loop *loop;
1075 for (loop = first_loop; loop; loop = loop->next)
1077 edge e;
1078 basic_block *bbs;
1079 unsigned i;
1081 estimate_loops_at_level (loop->inner);
1083 if (loop->latch->succ) /* Do not do this for dummy function loop. */
1085 /* Find current loop back edge and mark it. */
1086 e = loop_latch_edge (loop);
1087 EDGE_INFO (e)->back_edge = 1;
1090 bbs = get_loop_body (loop);
1091 for (i = 0; i < loop->num_nodes; i++)
1092 BLOCK_INFO (bbs[i])->tovisit = 1;
1093 free (bbs);
1094 propagate_freq (loop);
1098 /* Convert counts measured by profile driven feedback to frequencies. */
1100 static void
1101 counts_to_freqs ()
1103 gcov_type count_max = 1;
1104 basic_block bb;
1106 FOR_EACH_BB (bb)
1107 count_max = MAX (bb->count, count_max);
1109 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1110 bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
1113 /* Return true if function is likely to be expensive, so there is no point to
1114 optimize performance of prologue, epilogue or do inlining at the expense
1115 of code size growth. THRESHOLD is the limit of number of instructions
1116 function can execute at average to be still considered not expensive. */
1118 bool
1119 expensive_function_p (threshold)
1120 int threshold;
1122 unsigned int sum = 0;
1123 basic_block bb;
1124 unsigned int limit;
1126 /* We can not compute accurately for large thresholds due to scaled
1127 frequencies. */
1128 if (threshold > BB_FREQ_MAX)
1129 abort ();
1131 /* Frequencies are out of range. This either means that function contains
1132 internal loop executing more than BB_FREQ_MAX times or profile feedback
1133 is available and function has not been executed at all. */
1134 if (ENTRY_BLOCK_PTR->frequency == 0)
1135 return true;
1137 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
1138 limit = ENTRY_BLOCK_PTR->frequency * threshold;
1139 FOR_EACH_BB (bb)
1141 rtx insn;
1143 for (insn = bb->head; insn != NEXT_INSN (bb->end);
1144 insn = NEXT_INSN (insn))
1145 if (active_insn_p (insn))
1147 sum += bb->frequency;
1148 if (sum > limit)
1149 return true;
1153 return false;
1156 /* Estimate basic blocks frequency by given branch probabilities. */
1158 static void
1159 estimate_bb_frequencies (loops)
1160 struct loops *loops;
1162 basic_block bb;
1163 REAL_VALUE_TYPE freq_max;
1164 enum machine_mode double_mode = TYPE_MODE (double_type_node);
1166 if (flag_branch_probabilities)
1167 counts_to_freqs ();
1168 else
1170 REAL_VALUE_FROM_INT (real_zero, 0, 0, double_mode);
1171 REAL_VALUE_FROM_INT (real_one, 1, 0, double_mode);
1172 REAL_VALUE_FROM_INT (real_br_prob_base, REG_BR_PROB_BASE, 0, double_mode);
1173 REAL_VALUE_FROM_INT (real_bb_freq_max, BB_FREQ_MAX, 0, double_mode);
1174 REAL_VALUE_FROM_INT (real_one_half, 2, 0, double_mode);
1175 REAL_ARITHMETIC (real_one_half, RDIV_EXPR, real_one, real_one_half);
1176 REAL_ARITHMETIC (real_inv_br_prob_base, RDIV_EXPR, real_one, real_br_prob_base);
1177 REAL_ARITHMETIC (real_almost_one, MINUS_EXPR, real_one, real_inv_br_prob_base);
1179 mark_dfs_back_edges ();
1180 /* Fill in the probability values in flowgraph based on the REG_BR_PROB
1181 notes. */
1182 FOR_EACH_BB (bb)
1184 rtx last_insn = bb->end;
1186 if (!can_predict_insn_p (last_insn))
1188 /* We can predict only conditional jumps at the moment.
1189 Expect each edge to be equally probable.
1190 ?? In the future we want to make abnormal edges improbable. */
1191 int nedges = 0;
1192 edge e;
1194 for (e = bb->succ; e; e = e->succ_next)
1196 nedges++;
1197 if (e->probability != 0)
1198 break;
1200 if (!e)
1201 for (e = bb->succ; e; e = e->succ_next)
1202 e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
1206 ENTRY_BLOCK_PTR->succ->probability = REG_BR_PROB_BASE;
1208 /* Set up block info for each basic block. */
1209 alloc_aux_for_blocks (sizeof (struct block_info_def));
1210 alloc_aux_for_edges (sizeof (struct edge_info_def));
1211 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1213 edge e;
1215 BLOCK_INFO (bb)->tovisit = 0;
1216 for (e = bb->succ; e; e = e->succ_next)
1218 REAL_VALUE_FROM_INT (EDGE_INFO (e)->back_edge_prob,
1219 e->probability, 0, double_mode);
1220 REAL_ARITHMETIC (EDGE_INFO (e)->back_edge_prob,
1221 MULT_EXPR, EDGE_INFO (e)->back_edge_prob,
1222 real_inv_br_prob_base);
1226 /* First compute probabilities locally for each loop from innermost
1227 to outermost to examine probabilities for back edges. */
1228 estimate_loops_at_level (loops->tree_root);
1230 memcpy (&freq_max, &real_zero, sizeof (real_zero));
1231 FOR_EACH_BB (bb)
1232 if (REAL_VALUES_LESS
1233 (freq_max, BLOCK_INFO (bb)->frequency))
1234 memcpy (&freq_max, &BLOCK_INFO (bb)->frequency,
1235 sizeof (freq_max));
1237 REAL_ARITHMETIC (freq_max, RDIV_EXPR, real_bb_freq_max, freq_max);
1239 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1241 REAL_VALUE_TYPE tmp;
1243 REAL_ARITHMETIC (tmp, MULT_EXPR, BLOCK_INFO (bb)->frequency,
1244 freq_max);
1245 REAL_ARITHMETIC (tmp, PLUS_EXPR, tmp, real_one_half);
1246 bb->frequency = REAL_VALUE_UNSIGNED_FIX (tmp);
1249 free_aux_for_blocks ();
1250 free_aux_for_edges ();
1252 compute_function_frequency ();
1253 if (flag_reorder_functions)
1254 choose_function_section ();
1257 /* Decide whether function is hot, cold or unlikely executed. */
1258 static void
1259 compute_function_frequency ()
1261 basic_block bb;
1263 if (!profile_info.count_profiles_merged
1264 || !flag_branch_probabilities)
1265 return;
1266 cfun->function_frequency = FUNCTION_FREQUENCY_UNLIKELY_EXECUTED;
1267 FOR_EACH_BB (bb)
1269 if (maybe_hot_bb_p (bb))
1271 cfun->function_frequency = FUNCTION_FREQUENCY_HOT;
1272 return;
1274 if (!probably_never_executed_bb_p (bb))
1275 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
1279 /* Choose appropriate section for the function. */
1280 static void
1281 choose_function_section ()
1283 if (DECL_SECTION_NAME (current_function_decl)
1284 || !targetm.have_named_sections
1285 /* Theoretically we can split the gnu.linkonce text section too,
1286 but this requires more work as the frequency needs to match
1287 for all generated objects so we need to merge the frequency
1288 of all instances. For now just never set frequency for these. */
1289 || !DECL_ONE_ONLY (current_function_decl))
1290 return;
1291 if (cfun->function_frequency == FUNCTION_FREQUENCY_HOT)
1292 DECL_SECTION_NAME (current_function_decl) =
1293 build_string (strlen (HOT_TEXT_SECTION_NAME), HOT_TEXT_SECTION_NAME);
1294 if (cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
1295 DECL_SECTION_NAME (current_function_decl) =
1296 build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME),
1297 UNLIKELY_EXECUTED_TEXT_SECTION_NAME);