Merge branch 'master' of ssh://crater.dragonflybsd.org/repository/git/dragonfly
[dragonfly.git] / contrib / gcc-3.4 / gcc / predict.c
blob9d885fc4fc69223ec16caac8848e65e12628eb9a
1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* References:
24 [1] "Branch Prediction for Free"
25 Ball and Larus; PLDI '93.
26 [2] "Static Branch Frequency and Program Profile Analysis"
27 Wu and Larus; MICRO-27.
28 [3] "Corpus-based Static Branch Prediction"
29 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
32 #include "config.h"
33 #include "system.h"
34 #include "coretypes.h"
35 #include "tm.h"
36 #include "tree.h"
37 #include "rtl.h"
38 #include "tm_p.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
41 #include "insn-config.h"
42 #include "regs.h"
43 #include "flags.h"
44 #include "output.h"
45 #include "function.h"
46 #include "except.h"
47 #include "toplev.h"
48 #include "recog.h"
49 #include "expr.h"
50 #include "predict.h"
51 #include "coverage.h"
52 #include "sreal.h"
53 #include "params.h"
54 #include "target.h"
55 #include "loop.h"
56 #include "cfgloop.h"
58 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
59 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
60 static sreal real_zero, real_one, real_almost_one, real_br_prob_base,
61 real_inv_br_prob_base, real_one_half, real_bb_freq_max;
63 /* Random guesstimation given names. */
64 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 100 - 1)
65 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
66 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
67 #define PROB_ALWAYS (REG_BR_PROB_BASE)
69 static bool predicted_by_p (basic_block, enum br_predictor);
70 static void combine_predictions_for_insn (rtx, basic_block);
71 static void dump_prediction (enum br_predictor, int, basic_block, int);
72 static void estimate_loops_at_level (struct loop *loop);
73 static void propagate_freq (struct loop *);
74 static void estimate_bb_frequencies (struct loops *);
75 static void counts_to_freqs (void);
76 static void process_note_predictions (basic_block, int *);
77 static void process_note_prediction (basic_block, int *, int, int);
78 static bool last_basic_block_p (basic_block);
79 static void compute_function_frequency (void);
80 static void choose_function_section (void);
81 static bool can_predict_insn_p (rtx);
83 /* Information we hold about each branch predictor.
84 Filled using information from predict.def. */
86 struct predictor_info
88 const char *const name; /* Name used in the debugging dumps. */
89 const int hitrate; /* Expected hitrate used by
90 predict_insn_def call. */
91 const int flags;
94 /* Use given predictor without Dempster-Shaffer theory if it matches
95 using first_match heuristics. */
96 #define PRED_FLAG_FIRST_MATCH 1
98 /* Recompute hitrate in percent to our representation. */
100 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
102 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
103 static const struct predictor_info predictor_info[]= {
104 #include "predict.def"
106 /* Upper bound on predictors. */
107 {NULL, 0, 0}
109 #undef DEF_PREDICTOR
111 /* Return true in case BB can be CPU intensive and should be optimized
112 for maximal performance. */
114 bool
115 maybe_hot_bb_p (basic_block bb)
117 if (profile_info && flag_branch_probabilities
118 && (bb->count
119 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
120 return false;
121 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
122 return false;
123 return true;
126 /* Return true in case BB is cold and should be optimized for size. */
128 bool
129 probably_cold_bb_p (basic_block bb)
131 if (profile_info && flag_branch_probabilities
132 && (bb->count
133 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
134 return true;
135 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
136 return true;
137 return false;
140 /* Return true in case BB is probably never executed. */
141 bool
142 probably_never_executed_bb_p (basic_block bb)
144 if (profile_info && flag_branch_probabilities)
145 return ((bb->count + profile_info->runs / 2) / profile_info->runs) == 0;
146 return false;
149 /* Return true if the one of outgoing edges is already predicted by
150 PREDICTOR. */
152 static bool
153 predicted_by_p (basic_block bb, enum br_predictor predictor)
155 rtx note;
156 if (!INSN_P (BB_END (bb)))
157 return false;
158 for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
159 if (REG_NOTE_KIND (note) == REG_BR_PRED
160 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
161 return true;
162 return false;
165 void
166 predict_insn (rtx insn, enum br_predictor predictor, int probability)
168 if (!any_condjump_p (insn))
169 abort ();
170 if (!flag_guess_branch_prob)
171 return;
173 REG_NOTES (insn)
174 = gen_rtx_EXPR_LIST (REG_BR_PRED,
175 gen_rtx_CONCAT (VOIDmode,
176 GEN_INT ((int) predictor),
177 GEN_INT ((int) probability)),
178 REG_NOTES (insn));
181 /* Predict insn by given predictor. */
183 void
184 predict_insn_def (rtx insn, enum br_predictor predictor,
185 enum prediction taken)
187 int probability = predictor_info[(int) predictor].hitrate;
189 if (taken != TAKEN)
190 probability = REG_BR_PROB_BASE - probability;
192 predict_insn (insn, predictor, probability);
195 /* Predict edge E with given probability if possible. */
197 void
198 predict_edge (edge e, enum br_predictor predictor, int probability)
200 rtx last_insn;
201 last_insn = BB_END (e->src);
203 /* We can store the branch prediction information only about
204 conditional jumps. */
205 if (!any_condjump_p (last_insn))
206 return;
208 /* We always store probability of branching. */
209 if (e->flags & EDGE_FALLTHRU)
210 probability = REG_BR_PROB_BASE - probability;
212 predict_insn (last_insn, predictor, probability);
215 /* Return true when we can store prediction on insn INSN.
216 At the moment we represent predictions only on conditional
217 jumps, not at computed jump or other complicated cases. */
218 static bool
219 can_predict_insn_p (rtx insn)
221 return (GET_CODE (insn) == JUMP_INSN
222 && any_condjump_p (insn)
223 && BLOCK_FOR_INSN (insn)->succ->succ_next);
226 /* Predict edge E by given predictor if possible. */
228 void
229 predict_edge_def (edge e, enum br_predictor predictor,
230 enum prediction taken)
232 int probability = predictor_info[(int) predictor].hitrate;
234 if (taken != TAKEN)
235 probability = REG_BR_PROB_BASE - probability;
237 predict_edge (e, predictor, probability);
240 /* Invert all branch predictions or probability notes in the INSN. This needs
241 to be done each time we invert the condition used by the jump. */
243 void
244 invert_br_probabilities (rtx insn)
246 rtx note;
248 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
249 if (REG_NOTE_KIND (note) == REG_BR_PROB)
250 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
251 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
252 XEXP (XEXP (note, 0), 1)
253 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
256 /* Dump information about the branch prediction to the output file. */
258 static void
259 dump_prediction (enum br_predictor predictor, int probability,
260 basic_block bb, int used)
262 edge e = bb->succ;
264 if (!rtl_dump_file)
265 return;
267 while (e && (e->flags & EDGE_FALLTHRU))
268 e = e->succ_next;
270 fprintf (rtl_dump_file, " %s heuristics%s: %.1f%%",
271 predictor_info[predictor].name,
272 used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
274 if (bb->count)
276 fprintf (rtl_dump_file, " exec ");
277 fprintf (rtl_dump_file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
278 if (e)
280 fprintf (rtl_dump_file, " hit ");
281 fprintf (rtl_dump_file, HOST_WIDEST_INT_PRINT_DEC, e->count);
282 fprintf (rtl_dump_file, " (%.1f%%)", e->count * 100.0 / bb->count);
286 fprintf (rtl_dump_file, "\n");
289 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
290 note if not already present. Remove now useless REG_BR_PRED notes. */
292 static void
293 combine_predictions_for_insn (rtx insn, basic_block bb)
295 rtx prob_note = find_reg_note (insn, REG_BR_PROB, 0);
296 rtx *pnote = &REG_NOTES (insn);
297 rtx note;
298 int best_probability = PROB_EVEN;
299 int best_predictor = END_PREDICTORS;
300 int combined_probability = REG_BR_PROB_BASE / 2;
301 int d;
302 bool first_match = false;
303 bool found = false;
305 if (rtl_dump_file)
306 fprintf (rtl_dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
307 bb->index);
309 /* We implement "first match" heuristics and use probability guessed
310 by predictor with smallest index. In the future we will use better
311 probability combination techniques. */
312 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
313 if (REG_NOTE_KIND (note) == REG_BR_PRED)
315 int predictor = INTVAL (XEXP (XEXP (note, 0), 0));
316 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
318 found = true;
319 if (best_predictor > predictor)
320 best_probability = probability, best_predictor = predictor;
322 d = (combined_probability * probability
323 + (REG_BR_PROB_BASE - combined_probability)
324 * (REG_BR_PROB_BASE - probability));
326 /* Use FP math to avoid overflows of 32bit integers. */
327 if (d == 0)
328 /* If one probability is 0% and one 100%, avoid division by zero. */
329 combined_probability = REG_BR_PROB_BASE / 2;
330 else
331 combined_probability = (((double) combined_probability) * probability
332 * REG_BR_PROB_BASE / d + 0.5);
335 /* Decide which heuristic to use. In case we didn't match anything,
336 use no_prediction heuristic, in case we did match, use either
337 first match or Dempster-Shaffer theory depending on the flags. */
339 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
340 first_match = true;
342 if (!found)
343 dump_prediction (PRED_NO_PREDICTION, combined_probability, bb, true);
344 else
346 dump_prediction (PRED_DS_THEORY, combined_probability, bb, !first_match);
347 dump_prediction (PRED_FIRST_MATCH, best_probability, bb, first_match);
350 if (first_match)
351 combined_probability = best_probability;
352 dump_prediction (PRED_COMBINED, combined_probability, bb, true);
354 while (*pnote)
356 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
358 int predictor = INTVAL (XEXP (XEXP (*pnote, 0), 0));
359 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
361 dump_prediction (predictor, probability, bb,
362 !first_match || best_predictor == predictor);
363 *pnote = XEXP (*pnote, 1);
365 else
366 pnote = &XEXP (*pnote, 1);
369 if (!prob_note)
371 REG_NOTES (insn)
372 = gen_rtx_EXPR_LIST (REG_BR_PROB,
373 GEN_INT (combined_probability), REG_NOTES (insn));
375 /* Save the prediction into CFG in case we are seeing non-degenerated
376 conditional jump. */
377 if (bb->succ->succ_next)
379 BRANCH_EDGE (bb)->probability = combined_probability;
380 FALLTHRU_EDGE (bb)->probability
381 = REG_BR_PROB_BASE - combined_probability;
386 /* Statically estimate the probability that a branch will be taken.
387 ??? In the next revision there will be a number of other predictors added
388 from the above references. Further, each heuristic will be factored out
389 into its own function for clarity (and to facilitate the combination of
390 predictions). */
392 void
393 estimate_probability (struct loops *loops_info)
395 basic_block bb;
396 unsigned i;
398 connect_infinite_loops_to_exit ();
399 calculate_dominance_info (CDI_DOMINATORS);
400 calculate_dominance_info (CDI_POST_DOMINATORS);
402 /* Try to predict out blocks in a loop that are not part of a
403 natural loop. */
404 for (i = 1; i < loops_info->num; i++)
406 basic_block bb, *bbs;
407 unsigned j;
408 int exits;
409 struct loop *loop = loops_info->parray[i];
410 struct loop_desc desc;
411 unsigned HOST_WIDE_INT niter;
413 flow_loop_scan (loop, LOOP_EXIT_EDGES);
414 exits = loop->num_exits;
416 if (simple_loop_p (loop, &desc) && desc.const_iter)
418 int prob;
419 niter = desc.niter + 1;
420 if (niter == 0) /* We might overflow here. */
421 niter = desc.niter;
423 prob = (REG_BR_PROB_BASE
424 - (REG_BR_PROB_BASE + niter /2) / niter);
425 /* Branch prediction algorithm gives 0 frequency for everything
426 after the end of loop for loop having 0 probability to finish. */
427 if (prob == REG_BR_PROB_BASE)
428 prob = REG_BR_PROB_BASE - 1;
429 predict_edge (desc.in_edge, PRED_LOOP_ITERATIONS,
430 prob);
433 bbs = get_loop_body (loop);
434 for (j = 0; j < loop->num_nodes; j++)
436 int header_found = 0;
437 edge e;
439 bb = bbs[j];
441 /* Bypass loop heuristics on continue statement. These
442 statements construct loops via "non-loop" constructs
443 in the source language and are better to be handled
444 separately. */
445 if (!can_predict_insn_p (BB_END (bb))
446 || predicted_by_p (bb, PRED_CONTINUE))
447 continue;
449 /* Loop branch heuristics - predict an edge back to a
450 loop's head as taken. */
451 for (e = bb->succ; e; e = e->succ_next)
452 if (e->dest == loop->header
453 && e->src == loop->latch)
455 header_found = 1;
456 predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
459 /* Loop exit heuristics - predict an edge exiting the loop if the
460 conditional has no loop header successors as not taken. */
461 if (!header_found)
462 for (e = bb->succ; e; e = e->succ_next)
463 if (e->dest->index < 0
464 || !flow_bb_inside_loop_p (loop, e->dest))
465 predict_edge
466 (e, PRED_LOOP_EXIT,
467 (REG_BR_PROB_BASE
468 - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
469 / exits);
472 /* Free basic blocks from get_loop_body. */
473 free (bbs);
476 /* Attempt to predict conditional jumps using a number of heuristics. */
477 FOR_EACH_BB (bb)
479 rtx last_insn = BB_END (bb);
480 rtx cond, earliest;
481 edge e;
483 if (! can_predict_insn_p (last_insn))
484 continue;
486 for (e = bb->succ; e; e = e->succ_next)
488 /* Predict early returns to be probable, as we've already taken
489 care for error returns and other are often used for fast paths
490 trought function. */
491 if ((e->dest == EXIT_BLOCK_PTR
492 || (e->dest->succ && !e->dest->succ->succ_next
493 && e->dest->succ->dest == EXIT_BLOCK_PTR))
494 && !predicted_by_p (bb, PRED_NULL_RETURN)
495 && !predicted_by_p (bb, PRED_CONST_RETURN)
496 && !predicted_by_p (bb, PRED_NEGATIVE_RETURN)
497 && !last_basic_block_p (e->dest))
498 predict_edge_def (e, PRED_EARLY_RETURN, TAKEN);
500 /* Look for block we are guarding (ie we dominate it,
501 but it doesn't postdominate us). */
502 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
503 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
504 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
506 rtx insn;
508 /* The call heuristic claims that a guarded function call
509 is improbable. This is because such calls are often used
510 to signal exceptional situations such as printing error
511 messages. */
512 for (insn = BB_HEAD (e->dest); insn != NEXT_INSN (BB_END (e->dest));
513 insn = NEXT_INSN (insn))
514 if (GET_CODE (insn) == CALL_INSN
515 /* Constant and pure calls are hardly used to signalize
516 something exceptional. */
517 && ! CONST_OR_PURE_CALL_P (insn))
519 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
520 break;
525 cond = get_condition (last_insn, &earliest, false);
526 if (! cond)
527 continue;
529 /* Try "pointer heuristic."
530 A comparison ptr == 0 is predicted as false.
531 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
532 if (GET_RTX_CLASS (GET_CODE (cond)) == '<'
533 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
534 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
536 if (GET_CODE (cond) == EQ)
537 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
538 else if (GET_CODE (cond) == NE)
539 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
541 else
543 /* Try "opcode heuristic."
544 EQ tests are usually false and NE tests are usually true. Also,
545 most quantities are positive, so we can make the appropriate guesses
546 about signed comparisons against zero. */
547 switch (GET_CODE (cond))
549 case CONST_INT:
550 /* Unconditional branch. */
551 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
552 cond == const0_rtx ? NOT_TAKEN : TAKEN);
553 break;
555 case EQ:
556 case UNEQ:
557 /* Floating point comparisons appears to behave in a very
558 unpredictable way because of special role of = tests in
559 FP code. */
560 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
562 /* Comparisons with 0 are often used for booleans and there is
563 nothing useful to predict about them. */
564 else if (XEXP (cond, 1) == const0_rtx
565 || XEXP (cond, 0) == const0_rtx)
567 else
568 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
569 break;
571 case NE:
572 case LTGT:
573 /* Floating point comparisons appears to behave in a very
574 unpredictable way because of special role of = tests in
575 FP code. */
576 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
578 /* Comparisons with 0 are often used for booleans and there is
579 nothing useful to predict about them. */
580 else if (XEXP (cond, 1) == const0_rtx
581 || XEXP (cond, 0) == const0_rtx)
583 else
584 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
585 break;
587 case ORDERED:
588 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
589 break;
591 case UNORDERED:
592 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
593 break;
595 case LE:
596 case LT:
597 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
598 || XEXP (cond, 1) == constm1_rtx)
599 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
600 break;
602 case GE:
603 case GT:
604 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
605 || XEXP (cond, 1) == constm1_rtx)
606 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
607 break;
609 default:
610 break;
614 /* Attach the combined probability to each conditional jump. */
615 FOR_EACH_BB (bb)
616 if (GET_CODE (BB_END (bb)) == JUMP_INSN
617 && any_condjump_p (BB_END (bb))
618 && bb->succ->succ_next != NULL)
619 combine_predictions_for_insn (BB_END (bb), bb);
621 free_dominance_info (CDI_POST_DOMINATORS);
623 remove_fake_edges ();
624 estimate_bb_frequencies (loops_info);
627 /* __builtin_expect dropped tokens into the insn stream describing expected
628 values of registers. Generate branch probabilities based off these
629 values. */
631 void
632 expected_value_to_br_prob (void)
634 rtx insn, cond, ev = NULL_RTX, ev_reg = NULL_RTX;
636 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
638 switch (GET_CODE (insn))
640 case NOTE:
641 /* Look for expected value notes. */
642 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EXPECTED_VALUE)
644 ev = NOTE_EXPECTED_VALUE (insn);
645 ev_reg = XEXP (ev, 0);
646 delete_insn (insn);
648 continue;
650 case CODE_LABEL:
651 /* Never propagate across labels. */
652 ev = NULL_RTX;
653 continue;
655 case JUMP_INSN:
656 /* Look for simple conditional branches. If we haven't got an
657 expected value yet, no point going further. */
658 if (GET_CODE (insn) != JUMP_INSN || ev == NULL_RTX
659 || ! any_condjump_p (insn))
660 continue;
661 break;
663 default:
664 /* Look for insns that clobber the EV register. */
665 if (ev && reg_set_p (ev_reg, insn))
666 ev = NULL_RTX;
667 continue;
670 /* Collect the branch condition, hopefully relative to EV_REG. */
671 /* ??? At present we'll miss things like
672 (expected_value (eq r70 0))
673 (set r71 -1)
674 (set r80 (lt r70 r71))
675 (set pc (if_then_else (ne r80 0) ...))
676 as canonicalize_condition will render this to us as
677 (lt r70, r71)
678 Could use cselib to try and reduce this further. */
679 cond = XEXP (SET_SRC (pc_set (insn)), 0);
680 cond = canonicalize_condition (insn, cond, 0, NULL, ev_reg, false);
681 if (! cond || XEXP (cond, 0) != ev_reg
682 || GET_CODE (XEXP (cond, 1)) != CONST_INT)
683 continue;
685 /* Substitute and simplify. Given that the expression we're
686 building involves two constants, we should wind up with either
687 true or false. */
688 cond = gen_rtx_fmt_ee (GET_CODE (cond), VOIDmode,
689 XEXP (ev, 1), XEXP (cond, 1));
690 cond = simplify_rtx (cond);
692 /* Turn the condition into a scaled branch probability. */
693 if (cond != const_true_rtx && cond != const0_rtx)
694 abort ();
695 predict_insn_def (insn, PRED_BUILTIN_EXPECT,
696 cond == const_true_rtx ? TAKEN : NOT_TAKEN);
700 /* Check whether this is the last basic block of function. Commonly
701 there is one extra common cleanup block. */
702 static bool
703 last_basic_block_p (basic_block bb)
705 if (bb == EXIT_BLOCK_PTR)
706 return false;
708 return (bb->next_bb == EXIT_BLOCK_PTR
709 || (bb->next_bb->next_bb == EXIT_BLOCK_PTR
710 && bb->succ && !bb->succ->succ_next
711 && bb->succ->dest->next_bb == EXIT_BLOCK_PTR));
714 /* Sets branch probabilities according to PREDiction and
715 FLAGS. HEADS[bb->index] should be index of basic block in that we
716 need to alter branch predictions (i.e. the first of our dominators
717 such that we do not post-dominate it) (but we fill this information
718 on demand, so -1 may be there in case this was not needed yet). */
720 static void
721 process_note_prediction (basic_block bb, int *heads, int pred, int flags)
723 edge e;
724 int y;
725 bool taken;
727 taken = flags & IS_TAKEN;
729 if (heads[bb->index] < 0)
731 /* This is first time we need this field in heads array; so
732 find first dominator that we do not post-dominate (we are
733 using already known members of heads array). */
734 basic_block ai = bb;
735 basic_block next_ai = get_immediate_dominator (CDI_DOMINATORS, bb);
736 int head;
738 while (heads[next_ai->index] < 0)
740 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
741 break;
742 heads[next_ai->index] = ai->index;
743 ai = next_ai;
744 next_ai = get_immediate_dominator (CDI_DOMINATORS, next_ai);
746 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
747 head = next_ai->index;
748 else
749 head = heads[next_ai->index];
750 while (next_ai != bb)
752 next_ai = ai;
753 if (heads[ai->index] == ENTRY_BLOCK)
754 ai = ENTRY_BLOCK_PTR;
755 else
756 ai = BASIC_BLOCK (heads[ai->index]);
757 heads[next_ai->index] = head;
760 y = heads[bb->index];
762 /* Now find the edge that leads to our branch and aply the prediction. */
764 if (y == last_basic_block || !can_predict_insn_p (BB_END (BASIC_BLOCK (y))))
765 return;
766 for (e = BASIC_BLOCK (y)->succ; e; e = e->succ_next)
767 if (e->dest->index >= 0
768 && dominated_by_p (CDI_POST_DOMINATORS, e->dest, bb))
769 predict_edge_def (e, pred, taken);
772 /* Gathers NOTE_INSN_PREDICTIONs in given basic block and turns them
773 into branch probabilities. For description of heads array, see
774 process_note_prediction. */
776 static void
777 process_note_predictions (basic_block bb, int *heads)
779 rtx insn;
780 edge e;
782 /* Additionally, we check here for blocks with no successors. */
783 int contained_noreturn_call = 0;
784 int was_bb_head = 0;
785 int noreturn_block = 1;
787 for (insn = BB_END (bb); insn;
788 was_bb_head |= (insn == BB_HEAD (bb)), insn = PREV_INSN (insn))
790 if (GET_CODE (insn) != NOTE)
792 if (was_bb_head)
793 break;
794 else
796 /* Noreturn calls cause program to exit, therefore they are
797 always predicted as not taken. */
798 if (GET_CODE (insn) == CALL_INSN
799 && find_reg_note (insn, REG_NORETURN, NULL))
800 contained_noreturn_call = 1;
801 continue;
804 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION)
806 int alg = (int) NOTE_PREDICTION_ALG (insn);
807 /* Process single prediction note. */
808 process_note_prediction (bb,
809 heads,
810 alg, (int) NOTE_PREDICTION_FLAGS (insn));
811 delete_insn (insn);
814 for (e = bb->succ; e; e = e->succ_next)
815 if (!(e->flags & EDGE_FAKE))
816 noreturn_block = 0;
817 if (contained_noreturn_call)
819 /* This block ended from other reasons than because of return.
820 If it is because of noreturn call, this should certainly not
821 be taken. Otherwise it is probably some error recovery. */
822 process_note_prediction (bb, heads, PRED_NORETURN, NOT_TAKEN);
826 /* Gathers NOTE_INSN_PREDICTIONs and turns them into
827 branch probabilities. */
829 void
830 note_prediction_to_br_prob (void)
832 basic_block bb;
833 int *heads;
835 /* To enable handling of noreturn blocks. */
836 add_noreturn_fake_exit_edges ();
837 connect_infinite_loops_to_exit ();
839 calculate_dominance_info (CDI_POST_DOMINATORS);
840 calculate_dominance_info (CDI_DOMINATORS);
842 heads = xmalloc (sizeof (int) * last_basic_block);
843 memset (heads, -1, sizeof (int) * last_basic_block);
844 heads[ENTRY_BLOCK_PTR->next_bb->index] = last_basic_block;
846 /* Process all prediction notes. */
848 FOR_EACH_BB (bb)
849 process_note_predictions (bb, heads);
851 free_dominance_info (CDI_POST_DOMINATORS);
852 free_dominance_info (CDI_DOMINATORS);
853 free (heads);
855 remove_fake_edges ();
858 /* This is used to carry information about basic blocks. It is
859 attached to the AUX field of the standard CFG block. */
861 typedef struct block_info_def
863 /* Estimated frequency of execution of basic_block. */
864 sreal frequency;
866 /* To keep queue of basic blocks to process. */
867 basic_block next;
869 /* True if block needs to be visited in propagate_freq. */
870 unsigned int tovisit:1;
872 /* Number of predecessors we need to visit first. */
873 int npredecessors;
874 } *block_info;
876 /* Similar information for edges. */
877 typedef struct edge_info_def
879 /* In case edge is an loopback edge, the probability edge will be reached
880 in case header is. Estimated number of iterations of the loop can be
881 then computed as 1 / (1 - back_edge_prob). */
882 sreal back_edge_prob;
883 /* True if the edge is an loopback edge in the natural loop. */
884 unsigned int back_edge:1;
885 } *edge_info;
887 #define BLOCK_INFO(B) ((block_info) (B)->aux)
888 #define EDGE_INFO(E) ((edge_info) (E)->aux)
890 /* Helper function for estimate_bb_frequencies.
891 Propagate the frequencies for LOOP. */
893 static void
894 propagate_freq (struct loop *loop)
896 basic_block head = loop->header;
897 basic_block bb;
898 basic_block last;
899 edge e;
900 basic_block nextbb;
902 /* For each basic block we need to visit count number of his predecessors
903 we need to visit first. */
904 FOR_EACH_BB (bb)
906 if (BLOCK_INFO (bb)->tovisit)
908 int count = 0;
910 for (e = bb->pred; e; e = e->pred_next)
911 if (BLOCK_INFO (e->src)->tovisit && !(e->flags & EDGE_DFS_BACK))
912 count++;
913 else if (BLOCK_INFO (e->src)->tovisit
914 && rtl_dump_file && !EDGE_INFO (e)->back_edge)
915 fprintf (rtl_dump_file,
916 "Irreducible region hit, ignoring edge to %i->%i\n",
917 e->src->index, bb->index);
918 BLOCK_INFO (bb)->npredecessors = count;
922 memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
923 last = head;
924 for (bb = head; bb; bb = nextbb)
926 sreal cyclic_probability, frequency;
928 memcpy (&cyclic_probability, &real_zero, sizeof (real_zero));
929 memcpy (&frequency, &real_zero, sizeof (real_zero));
931 nextbb = BLOCK_INFO (bb)->next;
932 BLOCK_INFO (bb)->next = NULL;
934 /* Compute frequency of basic block. */
935 if (bb != head)
937 #ifdef ENABLE_CHECKING
938 for (e = bb->pred; e; e = e->pred_next)
939 if (BLOCK_INFO (e->src)->tovisit && !(e->flags & EDGE_DFS_BACK))
940 abort ();
941 #endif
943 for (e = bb->pred; e; e = e->pred_next)
944 if (EDGE_INFO (e)->back_edge)
946 sreal_add (&cyclic_probability, &cyclic_probability,
947 &EDGE_INFO (e)->back_edge_prob);
949 else if (!(e->flags & EDGE_DFS_BACK))
951 sreal tmp;
953 /* frequency += (e->probability
954 * BLOCK_INFO (e->src)->frequency /
955 REG_BR_PROB_BASE); */
957 sreal_init (&tmp, e->probability, 0);
958 sreal_mul (&tmp, &tmp, &BLOCK_INFO (e->src)->frequency);
959 sreal_mul (&tmp, &tmp, &real_inv_br_prob_base);
960 sreal_add (&frequency, &frequency, &tmp);
963 if (sreal_compare (&cyclic_probability, &real_zero) == 0)
965 memcpy (&BLOCK_INFO (bb)->frequency, &frequency,
966 sizeof (frequency));
968 else
970 if (sreal_compare (&cyclic_probability, &real_almost_one) > 0)
972 memcpy (&cyclic_probability, &real_almost_one,
973 sizeof (real_almost_one));
976 /* BLOCK_INFO (bb)->frequency = frequency
977 / (1 - cyclic_probability) */
979 sreal_sub (&cyclic_probability, &real_one, &cyclic_probability);
980 sreal_div (&BLOCK_INFO (bb)->frequency,
981 &frequency, &cyclic_probability);
985 BLOCK_INFO (bb)->tovisit = 0;
987 /* Compute back edge frequencies. */
988 for (e = bb->succ; e; e = e->succ_next)
989 if (e->dest == head)
991 sreal tmp;
993 /* EDGE_INFO (e)->back_edge_prob
994 = ((e->probability * BLOCK_INFO (bb)->frequency)
995 / REG_BR_PROB_BASE); */
997 sreal_init (&tmp, e->probability, 0);
998 sreal_mul (&tmp, &tmp, &BLOCK_INFO (bb)->frequency);
999 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1000 &tmp, &real_inv_br_prob_base);
1003 /* Propagate to successor blocks. */
1004 for (e = bb->succ; e; e = e->succ_next)
1005 if (!(e->flags & EDGE_DFS_BACK)
1006 && BLOCK_INFO (e->dest)->npredecessors)
1008 BLOCK_INFO (e->dest)->npredecessors--;
1009 if (!BLOCK_INFO (e->dest)->npredecessors)
1011 if (!nextbb)
1012 nextbb = e->dest;
1013 else
1014 BLOCK_INFO (last)->next = e->dest;
1016 last = e->dest;
1022 /* Estimate probabilities of loopback edges in loops at same nest level. */
1024 static void
1025 estimate_loops_at_level (struct loop *first_loop)
1027 struct loop *loop;
1029 for (loop = first_loop; loop; loop = loop->next)
1031 edge e;
1032 basic_block *bbs;
1033 unsigned i;
1035 estimate_loops_at_level (loop->inner);
1037 if (loop->latch->succ) /* Do not do this for dummy function loop. */
1039 /* Find current loop back edge and mark it. */
1040 e = loop_latch_edge (loop);
1041 EDGE_INFO (e)->back_edge = 1;
1044 bbs = get_loop_body (loop);
1045 for (i = 0; i < loop->num_nodes; i++)
1046 BLOCK_INFO (bbs[i])->tovisit = 1;
1047 free (bbs);
1048 propagate_freq (loop);
1052 /* Convert counts measured by profile driven feedback to frequencies. */
1054 static void
1055 counts_to_freqs (void)
1057 gcov_type count_max = 1;
1058 basic_block bb;
1060 FOR_EACH_BB (bb)
1061 count_max = MAX (bb->count, count_max);
1063 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1064 bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
1067 /* Return true if function is likely to be expensive, so there is no point to
1068 optimize performance of prologue, epilogue or do inlining at the expense
1069 of code size growth. THRESHOLD is the limit of number of instructions
1070 function can execute at average to be still considered not expensive. */
1072 bool
1073 expensive_function_p (int threshold)
1075 unsigned int sum = 0;
1076 basic_block bb;
1077 unsigned int limit;
1079 /* We can not compute accurately for large thresholds due to scaled
1080 frequencies. */
1081 if (threshold > BB_FREQ_MAX)
1082 abort ();
1084 /* Frequencies are out of range. This either means that function contains
1085 internal loop executing more than BB_FREQ_MAX times or profile feedback
1086 is available and function has not been executed at all. */
1087 if (ENTRY_BLOCK_PTR->frequency == 0)
1088 return true;
1090 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
1091 limit = ENTRY_BLOCK_PTR->frequency * threshold;
1092 FOR_EACH_BB (bb)
1094 rtx insn;
1096 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
1097 insn = NEXT_INSN (insn))
1098 if (active_insn_p (insn))
1100 sum += bb->frequency;
1101 if (sum > limit)
1102 return true;
1106 return false;
1109 /* Estimate basic blocks frequency by given branch probabilities. */
1111 static void
1112 estimate_bb_frequencies (struct loops *loops)
1114 basic_block bb;
1115 sreal freq_max;
1117 if (flag_branch_probabilities)
1118 counts_to_freqs ();
1119 else
1121 static int real_values_initialized = 0;
1123 if (!real_values_initialized)
1125 real_values_initialized = 1;
1126 sreal_init (&real_zero, 0, 0);
1127 sreal_init (&real_one, 1, 0);
1128 sreal_init (&real_br_prob_base, REG_BR_PROB_BASE, 0);
1129 sreal_init (&real_bb_freq_max, BB_FREQ_MAX, 0);
1130 sreal_init (&real_one_half, 1, -1);
1131 sreal_div (&real_inv_br_prob_base, &real_one, &real_br_prob_base);
1132 sreal_sub (&real_almost_one, &real_one, &real_inv_br_prob_base);
1135 mark_dfs_back_edges ();
1136 /* Fill in the probability values in flowgraph based on the REG_BR_PROB
1137 notes. */
1138 FOR_EACH_BB (bb)
1140 rtx last_insn = BB_END (bb);
1142 if (!can_predict_insn_p (last_insn))
1144 /* We can predict only conditional jumps at the moment.
1145 Expect each edge to be equally probable.
1146 ?? In the future we want to make abnormal edges improbable. */
1147 int nedges = 0;
1148 edge e;
1150 for (e = bb->succ; e; e = e->succ_next)
1152 nedges++;
1153 if (e->probability != 0)
1154 break;
1156 if (!e)
1157 for (e = bb->succ; e; e = e->succ_next)
1158 e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
1162 ENTRY_BLOCK_PTR->succ->probability = REG_BR_PROB_BASE;
1164 /* Set up block info for each basic block. */
1165 alloc_aux_for_blocks (sizeof (struct block_info_def));
1166 alloc_aux_for_edges (sizeof (struct edge_info_def));
1167 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1169 edge e;
1171 BLOCK_INFO (bb)->tovisit = 0;
1172 for (e = bb->succ; e; e = e->succ_next)
1174 sreal_init (&EDGE_INFO (e)->back_edge_prob, e->probability, 0);
1175 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1176 &EDGE_INFO (e)->back_edge_prob,
1177 &real_inv_br_prob_base);
1181 /* First compute probabilities locally for each loop from innermost
1182 to outermost to examine probabilities for back edges. */
1183 estimate_loops_at_level (loops->tree_root);
1185 memcpy (&freq_max, &real_zero, sizeof (real_zero));
1186 FOR_EACH_BB (bb)
1187 if (sreal_compare (&freq_max, &BLOCK_INFO (bb)->frequency) < 0)
1188 memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
1190 sreal_div (&freq_max, &real_bb_freq_max, &freq_max);
1191 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1193 sreal tmp;
1195 sreal_mul (&tmp, &BLOCK_INFO (bb)->frequency, &freq_max);
1196 sreal_add (&tmp, &tmp, &real_one_half);
1197 bb->frequency = sreal_to_int (&tmp);
1200 free_aux_for_blocks ();
1201 free_aux_for_edges ();
1203 compute_function_frequency ();
1204 if (flag_reorder_functions)
1205 choose_function_section ();
1208 /* Decide whether function is hot, cold or unlikely executed. */
1209 static void
1210 compute_function_frequency (void)
1212 basic_block bb;
1214 if (!profile_info || !flag_branch_probabilities)
1215 return;
1216 cfun->function_frequency = FUNCTION_FREQUENCY_UNLIKELY_EXECUTED;
1217 FOR_EACH_BB (bb)
1219 if (maybe_hot_bb_p (bb))
1221 cfun->function_frequency = FUNCTION_FREQUENCY_HOT;
1222 return;
1224 if (!probably_never_executed_bb_p (bb))
1225 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
1229 /* Choose appropriate section for the function. */
1230 static void
1231 choose_function_section (void)
1233 if (DECL_SECTION_NAME (current_function_decl)
1234 || !targetm.have_named_sections
1235 /* Theoretically we can split the gnu.linkonce text section too,
1236 but this requires more work as the frequency needs to match
1237 for all generated objects so we need to merge the frequency
1238 of all instances. For now just never set frequency for these. */
1239 || DECL_ONE_ONLY (current_function_decl))
1240 return;
1241 if (cfun->function_frequency == FUNCTION_FREQUENCY_HOT)
1242 DECL_SECTION_NAME (current_function_decl) =
1243 build_string (strlen (HOT_TEXT_SECTION_NAME), HOT_TEXT_SECTION_NAME);
1244 if (cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
1245 DECL_SECTION_NAME (current_function_decl) =
1246 build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME),
1247 UNLIKELY_EXECUTED_TEXT_SECTION_NAME);