2004-02-11 Eric Christopher <echristo@redhat.com>
[official-gcc.git] / gcc / predict.c
blob50580bd08ecdaeb26bb975d411156f30df0106bd
1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 /* References:
23 [1] "Branch Prediction for Free"
24 Ball and Larus; PLDI '93.
25 [2] "Static Branch Frequency and Program Profile Analysis"
26 Wu and Larus; MICRO-27.
27 [3] "Corpus-based Static Branch Prediction"
28 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
31 #include "config.h"
32 #include "system.h"
33 #include "coretypes.h"
34 #include "tm.h"
35 #include "tree.h"
36 #include "rtl.h"
37 #include "tm_p.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "insn-config.h"
41 #include "regs.h"
42 #include "flags.h"
43 #include "output.h"
44 #include "function.h"
45 #include "except.h"
46 #include "toplev.h"
47 #include "recog.h"
48 #include "expr.h"
49 #include "predict.h"
50 #include "coverage.h"
51 #include "sreal.h"
52 #include "params.h"
53 #include "target.h"
54 #include "loop.h"
55 #include "cfgloop.h"
57 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
58 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
59 static sreal real_zero, real_one, real_almost_one, real_br_prob_base,
60 real_inv_br_prob_base, real_one_half, real_bb_freq_max;
62 /* Random guesstimation given names. */
63 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 10 - 1)
64 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
65 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
66 #define PROB_ALWAYS (REG_BR_PROB_BASE)
68 static bool predicted_by_p (basic_block, enum br_predictor);
69 static void combine_predictions_for_insn (rtx, basic_block);
70 static void dump_prediction (enum br_predictor, int, basic_block, int);
71 static void estimate_loops_at_level (struct loop *loop);
72 static void propagate_freq (struct loop *);
73 static void estimate_bb_frequencies (struct loops *);
74 static int counts_to_freqs (void);
75 static void process_note_predictions (basic_block, int *);
76 static void process_note_prediction (basic_block, int *, int, int);
77 static bool last_basic_block_p (basic_block);
78 static void compute_function_frequency (void);
79 static void choose_function_section (void);
80 static bool can_predict_insn_p (rtx);
82 /* Information we hold about each branch predictor.
83 Filled using information from predict.def. */
85 struct predictor_info
87 const char *const name; /* Name used in the debugging dumps. */
88 const int hitrate; /* Expected hitrate used by
89 predict_insn_def call. */
90 const int flags;
93 /* Use given predictor without Dempster-Shaffer theory if it matches
94 using first_match heuristics. */
95 #define PRED_FLAG_FIRST_MATCH 1
97 /* Recompute hitrate in percent to our representation. */
99 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
101 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
102 static const struct predictor_info predictor_info[]= {
103 #include "predict.def"
105 /* Upper bound on predictors. */
106 {NULL, 0, 0}
108 #undef DEF_PREDICTOR
110 /* Return true in case BB can be CPU intensive and should be optimized
111 for maximal performance. */
113 bool
114 maybe_hot_bb_p (basic_block bb)
116 if (profile_info && flag_branch_probabilities
117 && (bb->count
118 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
119 return false;
120 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
121 return false;
122 return true;
125 /* Return true in case BB is cold and should be optimized for size. */
127 bool
128 probably_cold_bb_p (basic_block bb)
130 if (profile_info && flag_branch_probabilities
131 && (bb->count
132 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
133 return true;
134 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
135 return true;
136 return false;
139 /* Return true in case BB is probably never executed. */
140 bool
141 probably_never_executed_bb_p (basic_block bb)
143 if (profile_info && flag_branch_probabilities)
144 return ((bb->count + profile_info->runs / 2) / profile_info->runs) == 0;
145 return false;
148 /* Return true if the one of outgoing edges is already predicted by
149 PREDICTOR. */
151 static bool
152 predicted_by_p (basic_block bb, enum br_predictor predictor)
154 rtx note;
155 if (!INSN_P (BB_END (bb)))
156 return false;
157 for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
158 if (REG_NOTE_KIND (note) == REG_BR_PRED
159 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
160 return true;
161 return false;
164 void
165 predict_insn (rtx insn, enum br_predictor predictor, int probability)
167 if (!any_condjump_p (insn))
168 abort ();
169 if (!flag_guess_branch_prob)
170 return;
172 REG_NOTES (insn)
173 = gen_rtx_EXPR_LIST (REG_BR_PRED,
174 gen_rtx_CONCAT (VOIDmode,
175 GEN_INT ((int) predictor),
176 GEN_INT ((int) probability)),
177 REG_NOTES (insn));
180 /* Predict insn by given predictor. */
182 void
183 predict_insn_def (rtx insn, enum br_predictor predictor,
184 enum prediction taken)
186 int probability = predictor_info[(int) predictor].hitrate;
188 if (taken != TAKEN)
189 probability = REG_BR_PROB_BASE - probability;
191 predict_insn (insn, predictor, probability);
194 /* Predict edge E with given probability if possible. */
196 void
197 predict_edge (edge e, enum br_predictor predictor, int probability)
199 rtx last_insn;
200 last_insn = BB_END (e->src);
202 /* We can store the branch prediction information only about
203 conditional jumps. */
204 if (!any_condjump_p (last_insn))
205 return;
207 /* We always store probability of branching. */
208 if (e->flags & EDGE_FALLTHRU)
209 probability = REG_BR_PROB_BASE - probability;
211 predict_insn (last_insn, predictor, probability);
214 /* Return true when we can store prediction on insn INSN.
215 At the moment we represent predictions only on conditional
216 jumps, not at computed jump or other complicated cases. */
217 static bool
218 can_predict_insn_p (rtx insn)
220 return (GET_CODE (insn) == JUMP_INSN
221 && any_condjump_p (insn)
222 && BLOCK_FOR_INSN (insn)->succ->succ_next);
225 /* Predict edge E by given predictor if possible. */
227 void
228 predict_edge_def (edge e, enum br_predictor predictor,
229 enum prediction taken)
231 int probability = predictor_info[(int) predictor].hitrate;
233 if (taken != TAKEN)
234 probability = REG_BR_PROB_BASE - probability;
236 predict_edge (e, predictor, probability);
239 /* Invert all branch predictions or probability notes in the INSN. This needs
240 to be done each time we invert the condition used by the jump. */
242 void
243 invert_br_probabilities (rtx insn)
245 rtx note;
247 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
248 if (REG_NOTE_KIND (note) == REG_BR_PROB)
249 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
250 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
251 XEXP (XEXP (note, 0), 1)
252 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
255 /* Dump information about the branch prediction to the output file. */
257 static void
258 dump_prediction (enum br_predictor predictor, int probability,
259 basic_block bb, int used)
261 edge e = bb->succ;
263 if (!rtl_dump_file)
264 return;
266 while (e && (e->flags & EDGE_FALLTHRU))
267 e = e->succ_next;
269 fprintf (rtl_dump_file, " %s heuristics%s: %.1f%%",
270 predictor_info[predictor].name,
271 used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
273 if (bb->count)
275 fprintf (rtl_dump_file, " exec ");
276 fprintf (rtl_dump_file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
277 if (e)
279 fprintf (rtl_dump_file, " hit ");
280 fprintf (rtl_dump_file, HOST_WIDEST_INT_PRINT_DEC, e->count);
281 fprintf (rtl_dump_file, " (%.1f%%)", e->count * 100.0 / bb->count);
285 fprintf (rtl_dump_file, "\n");
288 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
289 note if not already present. Remove now useless REG_BR_PRED notes. */
291 static void
292 combine_predictions_for_insn (rtx insn, basic_block bb)
294 rtx prob_note = find_reg_note (insn, REG_BR_PROB, 0);
295 rtx *pnote = &REG_NOTES (insn);
296 rtx note;
297 int best_probability = PROB_EVEN;
298 int best_predictor = END_PREDICTORS;
299 int combined_probability = REG_BR_PROB_BASE / 2;
300 int d;
301 bool first_match = false;
302 bool found = false;
304 if (rtl_dump_file)
305 fprintf (rtl_dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
306 bb->index);
308 /* We implement "first match" heuristics and use probability guessed
309 by predictor with smallest index. In the future we will use better
310 probability combination techniques. */
311 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
312 if (REG_NOTE_KIND (note) == REG_BR_PRED)
314 int predictor = INTVAL (XEXP (XEXP (note, 0), 0));
315 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
317 found = true;
318 if (best_predictor > predictor)
319 best_probability = probability, best_predictor = predictor;
321 d = (combined_probability * probability
322 + (REG_BR_PROB_BASE - combined_probability)
323 * (REG_BR_PROB_BASE - probability));
325 /* Use FP math to avoid overflows of 32bit integers. */
326 if (d == 0)
327 /* If one probability is 0% and one 100%, avoid division by zero. */
328 combined_probability = REG_BR_PROB_BASE / 2;
329 else
330 combined_probability = (((double) combined_probability) * probability
331 * REG_BR_PROB_BASE / d + 0.5);
334 /* Decide which heuristic to use. In case we didn't match anything,
335 use no_prediction heuristic, in case we did match, use either
336 first match or Dempster-Shaffer theory depending on the flags. */
338 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
339 first_match = true;
341 if (!found)
342 dump_prediction (PRED_NO_PREDICTION, combined_probability, bb, true);
343 else
345 dump_prediction (PRED_DS_THEORY, combined_probability, bb, !first_match);
346 dump_prediction (PRED_FIRST_MATCH, best_probability, bb, first_match);
349 if (first_match)
350 combined_probability = best_probability;
351 dump_prediction (PRED_COMBINED, combined_probability, bb, true);
353 while (*pnote)
355 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
357 int predictor = INTVAL (XEXP (XEXP (*pnote, 0), 0));
358 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
360 dump_prediction (predictor, probability, bb,
361 !first_match || best_predictor == predictor);
362 *pnote = XEXP (*pnote, 1);
364 else
365 pnote = &XEXP (*pnote, 1);
368 if (!prob_note)
370 REG_NOTES (insn)
371 = gen_rtx_EXPR_LIST (REG_BR_PROB,
372 GEN_INT (combined_probability), REG_NOTES (insn));
374 /* Save the prediction into CFG in case we are seeing non-degenerated
375 conditional jump. */
376 if (bb->succ->succ_next)
378 BRANCH_EDGE (bb)->probability = combined_probability;
379 FALLTHRU_EDGE (bb)->probability
380 = REG_BR_PROB_BASE - combined_probability;
385 /* Statically estimate the probability that a branch will be taken.
386 ??? In the next revision there will be a number of other predictors added
387 from the above references. Further, each heuristic will be factored out
388 into its own function for clarity (and to facilitate the combination of
389 predictions). */
391 void
392 estimate_probability (struct loops *loops_info)
394 basic_block bb;
395 unsigned i;
397 connect_infinite_loops_to_exit ();
398 calculate_dominance_info (CDI_DOMINATORS);
399 calculate_dominance_info (CDI_POST_DOMINATORS);
401 /* Try to predict out blocks in a loop that are not part of a
402 natural loop. */
403 for (i = 1; i < loops_info->num; i++)
405 basic_block bb, *bbs;
406 unsigned j;
407 int exits;
408 struct loop *loop = loops_info->parray[i];
409 struct loop_desc desc;
410 unsigned HOST_WIDE_INT niter;
412 flow_loop_scan (loop, LOOP_EXIT_EDGES);
413 exits = loop->num_exits;
415 if (simple_loop_p (loop, &desc) && desc.const_iter)
417 int prob;
418 niter = desc.niter + 1;
419 if (niter == 0) /* We might overflow here. */
420 niter = desc.niter;
422 prob = (REG_BR_PROB_BASE
423 - (REG_BR_PROB_BASE + niter /2) / niter);
424 /* Branch prediction algorithm gives 0 frequency for everything
425 after the end of loop for loop having 0 probability to finish. */
426 if (prob == REG_BR_PROB_BASE)
427 prob = REG_BR_PROB_BASE - 1;
428 predict_edge (desc.in_edge, PRED_LOOP_ITERATIONS,
429 prob);
432 bbs = get_loop_body (loop);
433 for (j = 0; j < loop->num_nodes; j++)
435 int header_found = 0;
436 edge e;
438 bb = bbs[j];
440 /* Bypass loop heuristics on continue statement. These
441 statements construct loops via "non-loop" constructs
442 in the source language and are better to be handled
443 separately. */
444 if (!can_predict_insn_p (BB_END (bb))
445 || predicted_by_p (bb, PRED_CONTINUE))
446 continue;
448 /* Loop branch heuristics - predict an edge back to a
449 loop's head as taken. */
450 for (e = bb->succ; e; e = e->succ_next)
451 if (e->dest == loop->header
452 && e->src == loop->latch)
454 header_found = 1;
455 predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
458 /* Loop exit heuristics - predict an edge exiting the loop if the
459 conditional has no loop header successors as not taken. */
460 if (!header_found)
461 for (e = bb->succ; e; e = e->succ_next)
462 if (e->dest->index < 0
463 || !flow_bb_inside_loop_p (loop, e->dest))
464 predict_edge
465 (e, PRED_LOOP_EXIT,
466 (REG_BR_PROB_BASE
467 - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
468 / exits);
471 /* Free basic blocks from get_loop_body. */
472 free (bbs);
475 /* Attempt to predict conditional jumps using a number of heuristics. */
476 FOR_EACH_BB (bb)
478 rtx last_insn = BB_END (bb);
479 rtx cond, earliest;
480 edge e;
482 if (! can_predict_insn_p (last_insn))
483 continue;
485 for (e = bb->succ; e; e = e->succ_next)
487 /* Predict early returns to be probable, as we've already taken
488 care for error returns and other are often used for fast paths
489 trought function. */
490 if ((e->dest == EXIT_BLOCK_PTR
491 || (e->dest->succ && !e->dest->succ->succ_next
492 && e->dest->succ->dest == EXIT_BLOCK_PTR))
493 && !predicted_by_p (bb, PRED_NULL_RETURN)
494 && !predicted_by_p (bb, PRED_CONST_RETURN)
495 && !predicted_by_p (bb, PRED_NEGATIVE_RETURN)
496 && !last_basic_block_p (e->dest))
497 predict_edge_def (e, PRED_EARLY_RETURN, TAKEN);
499 /* Look for block we are guarding (ie we dominate it,
500 but it doesn't postdominate us). */
501 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
502 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
503 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
505 rtx insn;
507 /* The call heuristic claims that a guarded function call
508 is improbable. This is because such calls are often used
509 to signal exceptional situations such as printing error
510 messages. */
511 for (insn = BB_HEAD (e->dest); insn != NEXT_INSN (BB_END (e->dest));
512 insn = NEXT_INSN (insn))
513 if (GET_CODE (insn) == CALL_INSN
514 /* Constant and pure calls are hardly used to signalize
515 something exceptional. */
516 && ! CONST_OR_PURE_CALL_P (insn))
518 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
519 break;
524 cond = get_condition (last_insn, &earliest, false);
525 if (! cond)
526 continue;
528 /* Try "pointer heuristic."
529 A comparison ptr == 0 is predicted as false.
530 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
531 if (GET_RTX_CLASS (GET_CODE (cond)) == '<'
532 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
533 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
535 if (GET_CODE (cond) == EQ)
536 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
537 else if (GET_CODE (cond) == NE)
538 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
540 else
542 /* Try "opcode heuristic."
543 EQ tests are usually false and NE tests are usually true. Also,
544 most quantities are positive, so we can make the appropriate guesses
545 about signed comparisons against zero. */
546 switch (GET_CODE (cond))
548 case CONST_INT:
549 /* Unconditional branch. */
550 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
551 cond == const0_rtx ? NOT_TAKEN : TAKEN);
552 break;
554 case EQ:
555 case UNEQ:
556 /* Floating point comparisons appears to behave in a very
557 unpredictable way because of special role of = tests in
558 FP code. */
559 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
561 /* Comparisons with 0 are often used for booleans and there is
562 nothing useful to predict about them. */
563 else if (XEXP (cond, 1) == const0_rtx
564 || XEXP (cond, 0) == const0_rtx)
566 else
567 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
568 break;
570 case NE:
571 case LTGT:
572 /* Floating point comparisons appears to behave in a very
573 unpredictable way because of special role of = tests in
574 FP code. */
575 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
577 /* Comparisons with 0 are often used for booleans and there is
578 nothing useful to predict about them. */
579 else if (XEXP (cond, 1) == const0_rtx
580 || XEXP (cond, 0) == const0_rtx)
582 else
583 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
584 break;
586 case ORDERED:
587 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
588 break;
590 case UNORDERED:
591 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
592 break;
594 case LE:
595 case LT:
596 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
597 || XEXP (cond, 1) == constm1_rtx)
598 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
599 break;
601 case GE:
602 case GT:
603 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
604 || XEXP (cond, 1) == constm1_rtx)
605 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
606 break;
608 default:
609 break;
613 /* Attach the combined probability to each conditional jump. */
614 FOR_EACH_BB (bb)
615 if (GET_CODE (BB_END (bb)) == JUMP_INSN
616 && any_condjump_p (BB_END (bb))
617 && bb->succ->succ_next != NULL)
618 combine_predictions_for_insn (BB_END (bb), bb);
620 free_dominance_info (CDI_POST_DOMINATORS);
622 remove_fake_edges ();
623 estimate_bb_frequencies (loops_info);
626 /* __builtin_expect dropped tokens into the insn stream describing expected
627 values of registers. Generate branch probabilities based off these
628 values. */
630 void
631 expected_value_to_br_prob (void)
633 rtx insn, cond, ev = NULL_RTX, ev_reg = NULL_RTX;
635 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
637 switch (GET_CODE (insn))
639 case NOTE:
640 /* Look for expected value notes. */
641 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EXPECTED_VALUE)
643 ev = NOTE_EXPECTED_VALUE (insn);
644 ev_reg = XEXP (ev, 0);
645 delete_insn (insn);
647 continue;
649 case CODE_LABEL:
650 /* Never propagate across labels. */
651 ev = NULL_RTX;
652 continue;
654 case JUMP_INSN:
655 /* Look for simple conditional branches. If we haven't got an
656 expected value yet, no point going further. */
657 if (GET_CODE (insn) != JUMP_INSN || ev == NULL_RTX
658 || ! any_condjump_p (insn))
659 continue;
660 break;
662 default:
663 /* Look for insns that clobber the EV register. */
664 if (ev && reg_set_p (ev_reg, insn))
665 ev = NULL_RTX;
666 continue;
669 /* Collect the branch condition, hopefully relative to EV_REG. */
670 /* ??? At present we'll miss things like
671 (expected_value (eq r70 0))
672 (set r71 -1)
673 (set r80 (lt r70 r71))
674 (set pc (if_then_else (ne r80 0) ...))
675 as canonicalize_condition will render this to us as
676 (lt r70, r71)
677 Could use cselib to try and reduce this further. */
678 cond = XEXP (SET_SRC (pc_set (insn)), 0);
679 cond = canonicalize_condition (insn, cond, 0, NULL, ev_reg, false);
680 if (! cond || XEXP (cond, 0) != ev_reg
681 || GET_CODE (XEXP (cond, 1)) != CONST_INT)
682 continue;
684 /* Substitute and simplify. Given that the expression we're
685 building involves two constants, we should wind up with either
686 true or false. */
687 cond = gen_rtx_fmt_ee (GET_CODE (cond), VOIDmode,
688 XEXP (ev, 1), XEXP (cond, 1));
689 cond = simplify_rtx (cond);
691 /* Turn the condition into a scaled branch probability. */
692 if (cond != const_true_rtx && cond != const0_rtx)
693 abort ();
694 predict_insn_def (insn, PRED_BUILTIN_EXPECT,
695 cond == const_true_rtx ? TAKEN : NOT_TAKEN);
699 /* Check whether this is the last basic block of function. Commonly
700 there is one extra common cleanup block. */
701 static bool
702 last_basic_block_p (basic_block bb)
704 if (bb == EXIT_BLOCK_PTR)
705 return false;
707 return (bb->next_bb == EXIT_BLOCK_PTR
708 || (bb->next_bb->next_bb == EXIT_BLOCK_PTR
709 && bb->succ && !bb->succ->succ_next
710 && bb->succ->dest->next_bb == EXIT_BLOCK_PTR));
713 /* Sets branch probabilities according to PREDiction and
714 FLAGS. HEADS[bb->index] should be index of basic block in that we
715 need to alter branch predictions (i.e. the first of our dominators
716 such that we do not post-dominate it) (but we fill this information
717 on demand, so -1 may be there in case this was not needed yet). */
719 static void
720 process_note_prediction (basic_block bb, int *heads, int pred, int flags)
722 edge e;
723 int y;
724 bool taken;
726 taken = flags & IS_TAKEN;
728 if (heads[bb->index] < 0)
730 /* This is first time we need this field in heads array; so
731 find first dominator that we do not post-dominate (we are
732 using already known members of heads array). */
733 basic_block ai = bb;
734 basic_block next_ai = get_immediate_dominator (CDI_DOMINATORS, bb);
735 int head;
737 while (heads[next_ai->index] < 0)
739 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
740 break;
741 heads[next_ai->index] = ai->index;
742 ai = next_ai;
743 next_ai = get_immediate_dominator (CDI_DOMINATORS, next_ai);
745 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
746 head = next_ai->index;
747 else
748 head = heads[next_ai->index];
749 while (next_ai != bb)
751 next_ai = ai;
752 if (heads[ai->index] == ENTRY_BLOCK)
753 ai = ENTRY_BLOCK_PTR;
754 else
755 ai = BASIC_BLOCK (heads[ai->index]);
756 heads[next_ai->index] = head;
759 y = heads[bb->index];
761 /* Now find the edge that leads to our branch and aply the prediction. */
763 if (y == last_basic_block || !can_predict_insn_p (BB_END (BASIC_BLOCK (y))))
764 return;
765 for (e = BASIC_BLOCK (y)->succ; e; e = e->succ_next)
766 if (e->dest->index >= 0
767 && dominated_by_p (CDI_POST_DOMINATORS, e->dest, bb))
768 predict_edge_def (e, pred, taken);
771 /* Gathers NOTE_INSN_PREDICTIONs in given basic block and turns them
772 into branch probabilities. For description of heads array, see
773 process_note_prediction. */
775 static void
776 process_note_predictions (basic_block bb, int *heads)
778 rtx insn;
779 edge e;
781 /* Additionally, we check here for blocks with no successors. */
782 int contained_noreturn_call = 0;
783 int was_bb_head = 0;
784 int noreturn_block = 1;
786 for (insn = BB_END (bb); insn;
787 was_bb_head |= (insn == BB_HEAD (bb)), insn = PREV_INSN (insn))
789 if (GET_CODE (insn) != NOTE)
791 if (was_bb_head)
792 break;
793 else
795 /* Noreturn calls cause program to exit, therefore they are
796 always predicted as not taken. */
797 if (GET_CODE (insn) == CALL_INSN
798 && find_reg_note (insn, REG_NORETURN, NULL))
799 contained_noreturn_call = 1;
800 continue;
803 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION)
805 int alg = (int) NOTE_PREDICTION_ALG (insn);
806 /* Process single prediction note. */
807 process_note_prediction (bb,
808 heads,
809 alg, (int) NOTE_PREDICTION_FLAGS (insn));
810 delete_insn (insn);
813 for (e = bb->succ; e; e = e->succ_next)
814 if (!(e->flags & EDGE_FAKE))
815 noreturn_block = 0;
816 if (contained_noreturn_call)
818 /* This block ended from other reasons than because of return.
819 If it is because of noreturn call, this should certainly not
820 be taken. Otherwise it is probably some error recovery. */
821 process_note_prediction (bb, heads, PRED_NORETURN, NOT_TAKEN);
825 /* Gathers NOTE_INSN_PREDICTIONs and turns them into
826 branch probabilities. */
828 void
829 note_prediction_to_br_prob (void)
831 basic_block bb;
832 int *heads;
834 /* To enable handling of noreturn blocks. */
835 add_noreturn_fake_exit_edges ();
836 connect_infinite_loops_to_exit ();
838 calculate_dominance_info (CDI_POST_DOMINATORS);
839 calculate_dominance_info (CDI_DOMINATORS);
841 heads = xmalloc (sizeof (int) * last_basic_block);
842 memset (heads, -1, sizeof (int) * last_basic_block);
843 heads[ENTRY_BLOCK_PTR->next_bb->index] = last_basic_block;
845 /* Process all prediction notes. */
847 FOR_EACH_BB (bb)
848 process_note_predictions (bb, heads);
850 free_dominance_info (CDI_POST_DOMINATORS);
851 free_dominance_info (CDI_DOMINATORS);
852 free (heads);
854 remove_fake_edges ();
857 /* This is used to carry information about basic blocks. It is
858 attached to the AUX field of the standard CFG block. */
860 typedef struct block_info_def
862 /* Estimated frequency of execution of basic_block. */
863 sreal frequency;
865 /* To keep queue of basic blocks to process. */
866 basic_block next;
868 /* True if block needs to be visited in propagate_freq. */
869 unsigned int tovisit:1;
871 /* Number of predecessors we need to visit first. */
872 int npredecessors;
873 } *block_info;
875 /* Similar information for edges. */
876 typedef struct edge_info_def
878 /* In case edge is an loopback edge, the probability edge will be reached
879 in case header is. Estimated number of iterations of the loop can be
880 then computed as 1 / (1 - back_edge_prob). */
881 sreal back_edge_prob;
882 /* True if the edge is an loopback edge in the natural loop. */
883 unsigned int back_edge:1;
884 } *edge_info;
886 #define BLOCK_INFO(B) ((block_info) (B)->aux)
887 #define EDGE_INFO(E) ((edge_info) (E)->aux)
889 /* Helper function for estimate_bb_frequencies.
890 Propagate the frequencies for LOOP. */
892 static void
893 propagate_freq (struct loop *loop)
895 basic_block head = loop->header;
896 basic_block bb;
897 basic_block last;
898 edge e;
899 basic_block nextbb;
901 /* For each basic block we need to visit count number of his predecessors
902 we need to visit first. */
903 FOR_EACH_BB (bb)
905 if (BLOCK_INFO (bb)->tovisit)
907 int count = 0;
909 for (e = bb->pred; e; e = e->pred_next)
910 if (BLOCK_INFO (e->src)->tovisit && !(e->flags & EDGE_DFS_BACK))
911 count++;
912 else if (BLOCK_INFO (e->src)->tovisit
913 && rtl_dump_file && !EDGE_INFO (e)->back_edge)
914 fprintf (rtl_dump_file,
915 "Irreducible region hit, ignoring edge to %i->%i\n",
916 e->src->index, bb->index);
917 BLOCK_INFO (bb)->npredecessors = count;
921 memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
922 last = head;
923 for (bb = head; bb; bb = nextbb)
925 sreal cyclic_probability, frequency;
927 memcpy (&cyclic_probability, &real_zero, sizeof (real_zero));
928 memcpy (&frequency, &real_zero, sizeof (real_zero));
930 nextbb = BLOCK_INFO (bb)->next;
931 BLOCK_INFO (bb)->next = NULL;
933 /* Compute frequency of basic block. */
934 if (bb != head)
936 #ifdef ENABLE_CHECKING
937 for (e = bb->pred; e; e = e->pred_next)
938 if (BLOCK_INFO (e->src)->tovisit && !(e->flags & EDGE_DFS_BACK))
939 abort ();
940 #endif
942 for (e = bb->pred; e; e = e->pred_next)
943 if (EDGE_INFO (e)->back_edge)
945 sreal_add (&cyclic_probability, &cyclic_probability,
946 &EDGE_INFO (e)->back_edge_prob);
948 else if (!(e->flags & EDGE_DFS_BACK))
950 sreal tmp;
952 /* frequency += (e->probability
953 * BLOCK_INFO (e->src)->frequency /
954 REG_BR_PROB_BASE); */
956 sreal_init (&tmp, e->probability, 0);
957 sreal_mul (&tmp, &tmp, &BLOCK_INFO (e->src)->frequency);
958 sreal_mul (&tmp, &tmp, &real_inv_br_prob_base);
959 sreal_add (&frequency, &frequency, &tmp);
962 if (sreal_compare (&cyclic_probability, &real_zero) == 0)
964 memcpy (&BLOCK_INFO (bb)->frequency, &frequency,
965 sizeof (frequency));
967 else
969 if (sreal_compare (&cyclic_probability, &real_almost_one) > 0)
971 memcpy (&cyclic_probability, &real_almost_one,
972 sizeof (real_almost_one));
975 /* BLOCK_INFO (bb)->frequency = frequency
976 / (1 - cyclic_probability) */
978 sreal_sub (&cyclic_probability, &real_one, &cyclic_probability);
979 sreal_div (&BLOCK_INFO (bb)->frequency,
980 &frequency, &cyclic_probability);
984 BLOCK_INFO (bb)->tovisit = 0;
986 /* Compute back edge frequencies. */
987 for (e = bb->succ; e; e = e->succ_next)
988 if (e->dest == head)
990 sreal tmp;
992 /* EDGE_INFO (e)->back_edge_prob
993 = ((e->probability * BLOCK_INFO (bb)->frequency)
994 / REG_BR_PROB_BASE); */
996 sreal_init (&tmp, e->probability, 0);
997 sreal_mul (&tmp, &tmp, &BLOCK_INFO (bb)->frequency);
998 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
999 &tmp, &real_inv_br_prob_base);
1002 /* Propagate to successor blocks. */
1003 for (e = bb->succ; e; e = e->succ_next)
1004 if (!(e->flags & EDGE_DFS_BACK)
1005 && BLOCK_INFO (e->dest)->npredecessors)
1007 BLOCK_INFO (e->dest)->npredecessors--;
1008 if (!BLOCK_INFO (e->dest)->npredecessors)
1010 if (!nextbb)
1011 nextbb = e->dest;
1012 else
1013 BLOCK_INFO (last)->next = e->dest;
1015 last = e->dest;
1021 /* Estimate probabilities of loopback edges in loops at same nest level. */
1023 static void
1024 estimate_loops_at_level (struct loop *first_loop)
1026 struct loop *loop;
1028 for (loop = first_loop; loop; loop = loop->next)
1030 edge e;
1031 basic_block *bbs;
1032 unsigned i;
1034 estimate_loops_at_level (loop->inner);
1036 if (loop->latch->succ) /* Do not do this for dummy function loop. */
1038 /* Find current loop back edge and mark it. */
1039 e = loop_latch_edge (loop);
1040 EDGE_INFO (e)->back_edge = 1;
1043 bbs = get_loop_body (loop);
1044 for (i = 0; i < loop->num_nodes; i++)
1045 BLOCK_INFO (bbs[i])->tovisit = 1;
1046 free (bbs);
1047 propagate_freq (loop);
1051 /* Convert counts measured by profile driven feedback to frequencies.
1052 Return nonzero iff there was any nonzero execution count. */
1054 static int
1055 counts_to_freqs (void)
1057 gcov_type count_max, true_count_max = 0;
1058 basic_block bb;
1060 FOR_EACH_BB (bb)
1061 true_count_max = MAX (bb->count, true_count_max);
1063 count_max = MAX (true_count_max, 1);
1064 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1065 bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
1066 return true_count_max;
1069 /* Return true if function is likely to be expensive, so there is no point to
1070 optimize performance of prologue, epilogue or do inlining at the expense
1071 of code size growth. THRESHOLD is the limit of number of instructions
1072 function can execute at average to be still considered not expensive. */
1074 bool
1075 expensive_function_p (int threshold)
1077 unsigned int sum = 0;
1078 basic_block bb;
1079 unsigned int limit;
1081 /* We can not compute accurately for large thresholds due to scaled
1082 frequencies. */
1083 if (threshold > BB_FREQ_MAX)
1084 abort ();
1086 /* Frequencies are out of range. This either means that function contains
1087 internal loop executing more than BB_FREQ_MAX times or profile feedback
1088 is available and function has not been executed at all. */
1089 if (ENTRY_BLOCK_PTR->frequency == 0)
1090 return true;
1092 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
1093 limit = ENTRY_BLOCK_PTR->frequency * threshold;
1094 FOR_EACH_BB (bb)
1096 rtx insn;
1098 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
1099 insn = NEXT_INSN (insn))
1100 if (active_insn_p (insn))
1102 sum += bb->frequency;
1103 if (sum > limit)
1104 return true;
1108 return false;
1111 /* Estimate basic blocks frequency by given branch probabilities. */
1113 static void
1114 estimate_bb_frequencies (struct loops *loops)
1116 basic_block bb;
1117 sreal freq_max;
1119 if (!flag_branch_probabilities || !counts_to_freqs ())
1121 static int real_values_initialized = 0;
1123 if (!real_values_initialized)
1125 real_values_initialized = 1;
1126 sreal_init (&real_zero, 0, 0);
1127 sreal_init (&real_one, 1, 0);
1128 sreal_init (&real_br_prob_base, REG_BR_PROB_BASE, 0);
1129 sreal_init (&real_bb_freq_max, BB_FREQ_MAX, 0);
1130 sreal_init (&real_one_half, 1, -1);
1131 sreal_div (&real_inv_br_prob_base, &real_one, &real_br_prob_base);
1132 sreal_sub (&real_almost_one, &real_one, &real_inv_br_prob_base);
1135 mark_dfs_back_edges ();
1136 /* Fill in the probability values in flowgraph based on the REG_BR_PROB
1137 notes. */
1138 FOR_EACH_BB (bb)
1140 rtx last_insn = BB_END (bb);
1142 if (!can_predict_insn_p (last_insn))
1144 /* We can predict only conditional jumps at the moment.
1145 Expect each edge to be equally probable.
1146 ?? In the future we want to make abnormal edges improbable. */
1147 int nedges = 0;
1148 edge e;
1150 for (e = bb->succ; e; e = e->succ_next)
1152 nedges++;
1153 if (e->probability != 0)
1154 break;
1156 if (!e)
1157 for (e = bb->succ; e; e = e->succ_next)
1158 e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
1162 ENTRY_BLOCK_PTR->succ->probability = REG_BR_PROB_BASE;
1164 /* Set up block info for each basic block. */
1165 alloc_aux_for_blocks (sizeof (struct block_info_def));
1166 alloc_aux_for_edges (sizeof (struct edge_info_def));
1167 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1169 edge e;
1171 BLOCK_INFO (bb)->tovisit = 0;
1172 for (e = bb->succ; e; e = e->succ_next)
1174 sreal_init (&EDGE_INFO (e)->back_edge_prob, e->probability, 0);
1175 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1176 &EDGE_INFO (e)->back_edge_prob,
1177 &real_inv_br_prob_base);
1181 /* First compute probabilities locally for each loop from innermost
1182 to outermost to examine probabilities for back edges. */
1183 estimate_loops_at_level (loops->tree_root);
1185 memcpy (&freq_max, &real_zero, sizeof (real_zero));
1186 FOR_EACH_BB (bb)
1187 if (sreal_compare (&freq_max, &BLOCK_INFO (bb)->frequency) < 0)
1188 memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
1190 sreal_div (&freq_max, &real_bb_freq_max, &freq_max);
1191 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1193 sreal tmp;
1195 sreal_mul (&tmp, &BLOCK_INFO (bb)->frequency, &freq_max);
1196 sreal_add (&tmp, &tmp, &real_one_half);
1197 bb->frequency = sreal_to_int (&tmp);
1200 free_aux_for_blocks ();
1201 free_aux_for_edges ();
1203 compute_function_frequency ();
1204 if (flag_reorder_functions)
1205 choose_function_section ();
1208 /* Decide whether function is hot, cold or unlikely executed. */
1209 static void
1210 compute_function_frequency (void)
1212 basic_block bb;
1214 if (!profile_info || !flag_branch_probabilities)
1215 return;
1216 cfun->function_frequency = FUNCTION_FREQUENCY_UNLIKELY_EXECUTED;
1217 FOR_EACH_BB (bb)
1219 if (maybe_hot_bb_p (bb))
1221 cfun->function_frequency = FUNCTION_FREQUENCY_HOT;
1222 return;
1224 if (!probably_never_executed_bb_p (bb))
1225 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
1229 /* Choose appropriate section for the function. */
1230 static void
1231 choose_function_section (void)
1233 if (DECL_SECTION_NAME (current_function_decl)
1234 || !targetm.have_named_sections
1235 /* Theoretically we can split the gnu.linkonce text section too,
1236 but this requires more work as the frequency needs to match
1237 for all generated objects so we need to merge the frequency
1238 of all instances. For now just never set frequency for these. */
1239 || DECL_ONE_ONLY (current_function_decl))
1240 return;
1241 if (cfun->function_frequency == FUNCTION_FREQUENCY_HOT)
1242 DECL_SECTION_NAME (current_function_decl) =
1243 build_string (strlen (HOT_TEXT_SECTION_NAME), HOT_TEXT_SECTION_NAME);
1244 if (cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
1245 DECL_SECTION_NAME (current_function_decl) =
1246 build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME),
1247 UNLIKELY_EXECUTED_TEXT_SECTION_NAME);