Makefile.in: Add dummy "install-info" target.
[official-gcc.git] / gcc / predict.c
blobf8a6a1175f2ea7e58517d7e292c01da4314a0a04
1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /* References:
24 [1] "Branch Prediction for Free"
25 Ball and Larus; PLDI '93.
26 [2] "Static Branch Frequency and Program Profile Analysis"
27 Wu and Larus; MICRO-27.
28 [3] "Corpus-based Static Branch Prediction"
29 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
32 #include "config.h"
33 #include "system.h"
34 #include "coretypes.h"
35 #include "tm.h"
36 #include "tree.h"
37 #include "rtl.h"
38 #include "tm_p.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
41 #include "insn-config.h"
42 #include "regs.h"
43 #include "flags.h"
44 #include "output.h"
45 #include "function.h"
46 #include "except.h"
47 #include "toplev.h"
48 #include "recog.h"
49 #include "expr.h"
50 #include "predict.h"
51 #include "coverage.h"
52 #include "sreal.h"
53 #include "params.h"
54 #include "target.h"
55 #include "cfgloop.h"
56 #include "tree-flow.h"
57 #include "ggc.h"
58 #include "tree-dump.h"
59 #include "tree-pass.h"
60 #include "timevar.h"
61 #include "tree-scalar-evolution.h"
62 #include "cfgloop.h"
64 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
65 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
66 static sreal real_zero, real_one, real_almost_one, real_br_prob_base,
67 real_inv_br_prob_base, real_one_half, real_bb_freq_max;
69 /* Random guesstimation given names. */
70 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 100 - 1)
71 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
72 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
73 #define PROB_ALWAYS (REG_BR_PROB_BASE)
75 static void combine_predictions_for_insn (rtx, basic_block);
76 static void dump_prediction (FILE *, enum br_predictor, int, basic_block, int);
77 static void predict_paths_leading_to (basic_block, int *, enum br_predictor, enum prediction);
78 static void compute_function_frequency (void);
79 static void choose_function_section (void);
80 static bool can_predict_insn_p (rtx);
82 /* Information we hold about each branch predictor.
83 Filled using information from predict.def. */
85 struct predictor_info
87 const char *const name; /* Name used in the debugging dumps. */
88 const int hitrate; /* Expected hitrate used by
89 predict_insn_def call. */
90 const int flags;
93 /* Use given predictor without Dempster-Shaffer theory if it matches
94 using first_match heuristics. */
95 #define PRED_FLAG_FIRST_MATCH 1
97 /* Recompute hitrate in percent to our representation. */
99 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
101 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
102 static const struct predictor_info predictor_info[]= {
103 #include "predict.def"
105 /* Upper bound on predictors. */
106 {NULL, 0, 0}
108 #undef DEF_PREDICTOR
110 /* Return true in case BB can be CPU intensive and should be optimized
111 for maximal performance. */
113 bool
114 maybe_hot_bb_p (basic_block bb)
116 if (profile_info && flag_branch_probabilities
117 && (bb->count
118 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
119 return false;
120 if (!profile_info || !flag_branch_probabilities)
122 if (cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
123 return false;
124 if (cfun->function_frequency == FUNCTION_FREQUENCY_HOT)
125 return true;
127 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
128 return false;
129 return true;
132 /* Return true in case BB is cold and should be optimized for size. */
134 bool
135 probably_cold_bb_p (basic_block bb)
137 if (profile_info && flag_branch_probabilities
138 && (bb->count
139 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
140 return true;
141 if ((!profile_info || !flag_branch_probabilities)
142 && cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
143 return true;
144 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
145 return true;
146 return false;
149 /* Return true in case BB is probably never executed. */
150 bool
151 probably_never_executed_bb_p (basic_block bb)
153 if (profile_info && flag_branch_probabilities)
154 return ((bb->count + profile_info->runs / 2) / profile_info->runs) == 0;
155 if ((!profile_info || !flag_branch_probabilities)
156 && cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
157 return true;
158 return false;
161 /* Return true if the one of outgoing edges is already predicted by
162 PREDICTOR. */
164 bool
165 rtl_predicted_by_p (basic_block bb, enum br_predictor predictor)
167 rtx note;
168 if (!INSN_P (BB_END (bb)))
169 return false;
170 for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
171 if (REG_NOTE_KIND (note) == REG_BR_PRED
172 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
173 return true;
174 return false;
177 /* Return true if the one of outgoing edges is already predicted by
178 PREDICTOR. */
180 bool
181 tree_predicted_by_p (basic_block bb, enum br_predictor predictor)
183 struct edge_prediction *i;
184 for (i = bb->predictions; i; i = i->ep_next)
185 if (i->ep_predictor == predictor)
186 return true;
187 return false;
190 /* Return true when the probability of edge is reliable.
192 The profile guessing code is good at predicting branch outcome (ie.
193 taken/not taken), that is predicted right slightly over 75% of time.
194 It is however notoriously poor on predicting the probability itself.
195 In general the profile appear a lot flatter (with probabilities closer
196 to 50%) than the reality so it is bad idea to use it to drive optimization
197 such as those disabling dynamic branch prediction for well predictable
198 branches.
200 There are two exceptions - edges leading to noreturn edges and edges
201 predicted by number of iterations heuristics are predicted well. This macro
202 should be able to distinguish those, but at the moment it simply check for
203 noreturn heuristic that is only one giving probability over 99% or bellow
204 1%. In future we might want to propagate reliability information across the
205 CFG if we find this information useful on multiple places. */
206 static bool
207 probability_reliable_p (int prob)
209 return (profile_status == PROFILE_READ
210 || (profile_status == PROFILE_GUESSED
211 && (prob <= HITRATE (1) || prob >= HITRATE (99))));
214 /* Same predicate as above, working on edges. */
215 bool
216 edge_probability_reliable_p (edge e)
218 return probability_reliable_p (e->probability);
221 /* Same predicate as edge_probability_reliable_p, working on notes. */
222 bool
223 br_prob_note_reliable_p (rtx note)
225 gcc_assert (REG_NOTE_KIND (note) == REG_BR_PROB);
226 return probability_reliable_p (INTVAL (XEXP (note, 0)));
229 static void
230 predict_insn (rtx insn, enum br_predictor predictor, int probability)
232 gcc_assert (any_condjump_p (insn));
233 if (!flag_guess_branch_prob)
234 return;
236 REG_NOTES (insn)
237 = gen_rtx_EXPR_LIST (REG_BR_PRED,
238 gen_rtx_CONCAT (VOIDmode,
239 GEN_INT ((int) predictor),
240 GEN_INT ((int) probability)),
241 REG_NOTES (insn));
244 /* Predict insn by given predictor. */
246 void
247 predict_insn_def (rtx insn, enum br_predictor predictor,
248 enum prediction taken)
250 int probability = predictor_info[(int) predictor].hitrate;
252 if (taken != TAKEN)
253 probability = REG_BR_PROB_BASE - probability;
255 predict_insn (insn, predictor, probability);
258 /* Predict edge E with given probability if possible. */
260 void
261 rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
263 rtx last_insn;
264 last_insn = BB_END (e->src);
266 /* We can store the branch prediction information only about
267 conditional jumps. */
268 if (!any_condjump_p (last_insn))
269 return;
271 /* We always store probability of branching. */
272 if (e->flags & EDGE_FALLTHRU)
273 probability = REG_BR_PROB_BASE - probability;
275 predict_insn (last_insn, predictor, probability);
278 /* Predict edge E with the given PROBABILITY. */
279 void
280 tree_predict_edge (edge e, enum br_predictor predictor, int probability)
282 gcc_assert (profile_status != PROFILE_GUESSED);
283 if ((e->src != ENTRY_BLOCK_PTR && EDGE_COUNT (e->src->succs) > 1)
284 && flag_guess_branch_prob && optimize)
286 struct edge_prediction *i = ggc_alloc (sizeof (struct edge_prediction));
288 i->ep_next = e->src->predictions;
289 e->src->predictions = i;
290 i->ep_probability = probability;
291 i->ep_predictor = predictor;
292 i->ep_edge = e;
296 /* Remove all predictions on given basic block that are attached
297 to edge E. */
298 void
299 remove_predictions_associated_with_edge (edge e)
301 if (e->src->predictions)
303 struct edge_prediction **prediction = &e->src->predictions;
304 while (*prediction)
306 if ((*prediction)->ep_edge == e)
307 *prediction = (*prediction)->ep_next;
308 else
309 prediction = &((*prediction)->ep_next);
314 /* Return true when we can store prediction on insn INSN.
315 At the moment we represent predictions only on conditional
316 jumps, not at computed jump or other complicated cases. */
317 static bool
318 can_predict_insn_p (rtx insn)
320 return (JUMP_P (insn)
321 && any_condjump_p (insn)
322 && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs) >= 2);
325 /* Predict edge E by given predictor if possible. */
327 void
328 predict_edge_def (edge e, enum br_predictor predictor,
329 enum prediction taken)
331 int probability = predictor_info[(int) predictor].hitrate;
333 if (taken != TAKEN)
334 probability = REG_BR_PROB_BASE - probability;
336 predict_edge (e, predictor, probability);
339 /* Invert all branch predictions or probability notes in the INSN. This needs
340 to be done each time we invert the condition used by the jump. */
342 void
343 invert_br_probabilities (rtx insn)
345 rtx note;
347 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
348 if (REG_NOTE_KIND (note) == REG_BR_PROB)
349 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
350 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
351 XEXP (XEXP (note, 0), 1)
352 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
355 /* Dump information about the branch prediction to the output file. */
357 static void
358 dump_prediction (FILE *file, enum br_predictor predictor, int probability,
359 basic_block bb, int used)
361 edge e;
362 edge_iterator ei;
364 if (!file)
365 return;
367 FOR_EACH_EDGE (e, ei, bb->succs)
368 if (! (e->flags & EDGE_FALLTHRU))
369 break;
371 fprintf (file, " %s heuristics%s: %.1f%%",
372 predictor_info[predictor].name,
373 used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
375 if (bb->count)
377 fprintf (file, " exec ");
378 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
379 if (e)
381 fprintf (file, " hit ");
382 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, e->count);
383 fprintf (file, " (%.1f%%)", e->count * 100.0 / bb->count);
387 fprintf (file, "\n");
390 /* We can not predict the probabilities of outgoing edges of bb. Set them
391 evenly and hope for the best. */
392 static void
393 set_even_probabilities (basic_block bb)
395 int nedges = 0;
396 edge e;
397 edge_iterator ei;
399 FOR_EACH_EDGE (e, ei, bb->succs)
400 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
401 nedges ++;
402 FOR_EACH_EDGE (e, ei, bb->succs)
403 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
404 e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
405 else
406 e->probability = 0;
409 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
410 note if not already present. Remove now useless REG_BR_PRED notes. */
412 static void
413 combine_predictions_for_insn (rtx insn, basic_block bb)
415 rtx prob_note;
416 rtx *pnote;
417 rtx note;
418 int best_probability = PROB_EVEN;
419 int best_predictor = END_PREDICTORS;
420 int combined_probability = REG_BR_PROB_BASE / 2;
421 int d;
422 bool first_match = false;
423 bool found = false;
425 if (!can_predict_insn_p (insn))
427 set_even_probabilities (bb);
428 return;
431 prob_note = find_reg_note (insn, REG_BR_PROB, 0);
432 pnote = &REG_NOTES (insn);
433 if (dump_file)
434 fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
435 bb->index);
437 /* We implement "first match" heuristics and use probability guessed
438 by predictor with smallest index. */
439 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
440 if (REG_NOTE_KIND (note) == REG_BR_PRED)
442 int predictor = INTVAL (XEXP (XEXP (note, 0), 0));
443 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
445 found = true;
446 if (best_predictor > predictor)
447 best_probability = probability, best_predictor = predictor;
449 d = (combined_probability * probability
450 + (REG_BR_PROB_BASE - combined_probability)
451 * (REG_BR_PROB_BASE - probability));
453 /* Use FP math to avoid overflows of 32bit integers. */
454 if (d == 0)
455 /* If one probability is 0% and one 100%, avoid division by zero. */
456 combined_probability = REG_BR_PROB_BASE / 2;
457 else
458 combined_probability = (((double) combined_probability) * probability
459 * REG_BR_PROB_BASE / d + 0.5);
462 /* Decide which heuristic to use. In case we didn't match anything,
463 use no_prediction heuristic, in case we did match, use either
464 first match or Dempster-Shaffer theory depending on the flags. */
466 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
467 first_match = true;
469 if (!found)
470 dump_prediction (dump_file, PRED_NO_PREDICTION,
471 combined_probability, bb, true);
472 else
474 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability,
475 bb, !first_match);
476 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability,
477 bb, first_match);
480 if (first_match)
481 combined_probability = best_probability;
482 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
484 while (*pnote)
486 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
488 int predictor = INTVAL (XEXP (XEXP (*pnote, 0), 0));
489 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
491 dump_prediction (dump_file, predictor, probability, bb,
492 !first_match || best_predictor == predictor);
493 *pnote = XEXP (*pnote, 1);
495 else
496 pnote = &XEXP (*pnote, 1);
499 if (!prob_note)
501 REG_NOTES (insn)
502 = gen_rtx_EXPR_LIST (REG_BR_PROB,
503 GEN_INT (combined_probability), REG_NOTES (insn));
505 /* Save the prediction into CFG in case we are seeing non-degenerated
506 conditional jump. */
507 if (!single_succ_p (bb))
509 BRANCH_EDGE (bb)->probability = combined_probability;
510 FALLTHRU_EDGE (bb)->probability
511 = REG_BR_PROB_BASE - combined_probability;
514 else if (!single_succ_p (bb))
516 int prob = INTVAL (XEXP (prob_note, 0));
518 BRANCH_EDGE (bb)->probability = prob;
519 FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
521 else
522 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
525 /* Combine predictions into single probability and store them into CFG.
526 Remove now useless prediction entries. */
528 static void
529 combine_predictions_for_bb (basic_block bb)
531 int best_probability = PROB_EVEN;
532 int best_predictor = END_PREDICTORS;
533 int combined_probability = REG_BR_PROB_BASE / 2;
534 int d;
535 bool first_match = false;
536 bool found = false;
537 struct edge_prediction *pred;
538 int nedges = 0;
539 edge e, first = NULL, second = NULL;
540 edge_iterator ei;
542 FOR_EACH_EDGE (e, ei, bb->succs)
543 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
545 nedges ++;
546 if (first && !second)
547 second = e;
548 if (!first)
549 first = e;
552 /* When there is no successor or only one choice, prediction is easy.
554 We are lazy for now and predict only basic blocks with two outgoing
555 edges. It is possible to predict generic case too, but we have to
556 ignore first match heuristics and do more involved combining. Implement
557 this later. */
558 if (nedges != 2)
560 if (!bb->count)
561 set_even_probabilities (bb);
562 bb->predictions = NULL;
563 if (dump_file)
564 fprintf (dump_file, "%i edges in bb %i predicted to even probabilities\n",
565 nedges, bb->index);
566 return;
569 if (dump_file)
570 fprintf (dump_file, "Predictions for bb %i\n", bb->index);
572 /* We implement "first match" heuristics and use probability guessed
573 by predictor with smallest index. */
574 for (pred = bb->predictions; pred; pred = pred->ep_next)
576 int predictor = pred->ep_predictor;
577 int probability = pred->ep_probability;
579 if (pred->ep_edge != first)
580 probability = REG_BR_PROB_BASE - probability;
582 found = true;
583 if (best_predictor > predictor)
584 best_probability = probability, best_predictor = predictor;
586 d = (combined_probability * probability
587 + (REG_BR_PROB_BASE - combined_probability)
588 * (REG_BR_PROB_BASE - probability));
590 /* Use FP math to avoid overflows of 32bit integers. */
591 if (d == 0)
592 /* If one probability is 0% and one 100%, avoid division by zero. */
593 combined_probability = REG_BR_PROB_BASE / 2;
594 else
595 combined_probability = (((double) combined_probability) * probability
596 * REG_BR_PROB_BASE / d + 0.5);
599 /* Decide which heuristic to use. In case we didn't match anything,
600 use no_prediction heuristic, in case we did match, use either
601 first match or Dempster-Shaffer theory depending on the flags. */
603 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
604 first_match = true;
606 if (!found)
607 dump_prediction (dump_file, PRED_NO_PREDICTION, combined_probability, bb, true);
608 else
610 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, bb,
611 !first_match);
612 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, bb,
613 first_match);
616 if (first_match)
617 combined_probability = best_probability;
618 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
620 for (pred = bb->predictions; pred; pred = pred->ep_next)
622 int predictor = pred->ep_predictor;
623 int probability = pred->ep_probability;
625 if (pred->ep_edge != EDGE_SUCC (bb, 0))
626 probability = REG_BR_PROB_BASE - probability;
627 dump_prediction (dump_file, predictor, probability, bb,
628 !first_match || best_predictor == predictor);
630 bb->predictions = NULL;
632 if (!bb->count)
634 first->probability = combined_probability;
635 second->probability = REG_BR_PROB_BASE - combined_probability;
639 /* Predict edge probabilities by exploiting loop structure. */
641 static void
642 predict_loops (void)
644 loop_iterator li;
645 struct loop *loop;
647 scev_initialize ();
649 /* Try to predict out blocks in a loop that are not part of a
650 natural loop. */
651 FOR_EACH_LOOP (li, loop, 0)
653 basic_block bb, *bbs;
654 unsigned j, n_exits;
655 VEC (edge, heap) *exits;
656 struct tree_niter_desc niter_desc;
657 edge ex;
659 exits = get_loop_exit_edges (loop);
660 n_exits = VEC_length (edge, exits);
662 for (j = 0; VEC_iterate (edge, exits, j, ex); j++)
664 tree niter = NULL;
665 HOST_WIDE_INT nitercst;
666 int max = PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS);
667 int probability;
668 enum br_predictor predictor;
670 if (number_of_iterations_exit (loop, ex, &niter_desc, false))
671 niter = niter_desc.niter;
672 if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST)
673 niter = loop_niter_by_eval (loop, ex);
675 if (TREE_CODE (niter) == INTEGER_CST)
677 if (host_integerp (niter, 1)
678 && compare_tree_int (niter, max-1) == -1)
679 nitercst = tree_low_cst (niter, 1) + 1;
680 else
681 nitercst = max;
682 predictor = PRED_LOOP_ITERATIONS;
684 /* If we have just one exit and we can derive some information about
685 the number of iterations of the loop from the statements inside
686 the loop, use it to predict this exit. */
687 else if (n_exits == 1)
689 nitercst = estimated_loop_iterations_int (loop, false);
690 if (nitercst < 0)
691 continue;
692 if (nitercst > max)
693 nitercst = max;
695 predictor = PRED_LOOP_ITERATIONS_GUESSED;
697 else
698 continue;
700 probability = ((REG_BR_PROB_BASE + nitercst / 2) / nitercst);
701 predict_edge (ex, predictor, probability);
703 VEC_free (edge, heap, exits);
705 bbs = get_loop_body (loop);
707 for (j = 0; j < loop->num_nodes; j++)
709 int header_found = 0;
710 edge e;
711 edge_iterator ei;
713 bb = bbs[j];
715 /* Bypass loop heuristics on continue statement. These
716 statements construct loops via "non-loop" constructs
717 in the source language and are better to be handled
718 separately. */
719 if (predicted_by_p (bb, PRED_CONTINUE))
720 continue;
722 /* Loop branch heuristics - predict an edge back to a
723 loop's head as taken. */
724 if (bb == loop->latch)
726 e = find_edge (loop->latch, loop->header);
727 if (e)
729 header_found = 1;
730 predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
734 /* Loop exit heuristics - predict an edge exiting the loop if the
735 conditional has no loop header successors as not taken. */
736 if (!header_found
737 /* If we already used more reliable loop exit predictors, do not
738 bother with PRED_LOOP_EXIT. */
739 && !predicted_by_p (bb, PRED_LOOP_ITERATIONS_GUESSED)
740 && !predicted_by_p (bb, PRED_LOOP_ITERATIONS))
742 /* For loop with many exits we don't want to predict all exits
743 with the pretty large probability, because if all exits are
744 considered in row, the loop would be predicted to iterate
745 almost never. The code to divide probability by number of
746 exits is very rough. It should compute the number of exits
747 taken in each patch through function (not the overall number
748 of exits that might be a lot higher for loops with wide switch
749 statements in them) and compute n-th square root.
751 We limit the minimal probability by 2% to avoid
752 EDGE_PROBABILITY_RELIABLE from trusting the branch prediction
753 as this was causing regression in perl benchmark containing such
754 a wide loop. */
756 int probability = ((REG_BR_PROB_BASE
757 - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
758 / n_exits);
759 if (probability < HITRATE (2))
760 probability = HITRATE (2);
761 FOR_EACH_EDGE (e, ei, bb->succs)
762 if (e->dest->index < NUM_FIXED_BLOCKS
763 || !flow_bb_inside_loop_p (loop, e->dest))
764 predict_edge (e, PRED_LOOP_EXIT, probability);
768 /* Free basic blocks from get_loop_body. */
769 free (bbs);
772 scev_finalize ();
775 /* Attempt to predict probabilities of BB outgoing edges using local
776 properties. */
777 static void
778 bb_estimate_probability_locally (basic_block bb)
780 rtx last_insn = BB_END (bb);
781 rtx cond;
783 if (! can_predict_insn_p (last_insn))
784 return;
785 cond = get_condition (last_insn, NULL, false, false);
786 if (! cond)
787 return;
789 /* Try "pointer heuristic."
790 A comparison ptr == 0 is predicted as false.
791 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
792 if (COMPARISON_P (cond)
793 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
794 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
796 if (GET_CODE (cond) == EQ)
797 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
798 else if (GET_CODE (cond) == NE)
799 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
801 else
803 /* Try "opcode heuristic."
804 EQ tests are usually false and NE tests are usually true. Also,
805 most quantities are positive, so we can make the appropriate guesses
806 about signed comparisons against zero. */
807 switch (GET_CODE (cond))
809 case CONST_INT:
810 /* Unconditional branch. */
811 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
812 cond == const0_rtx ? NOT_TAKEN : TAKEN);
813 break;
815 case EQ:
816 case UNEQ:
817 /* Floating point comparisons appears to behave in a very
818 unpredictable way because of special role of = tests in
819 FP code. */
820 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
822 /* Comparisons with 0 are often used for booleans and there is
823 nothing useful to predict about them. */
824 else if (XEXP (cond, 1) == const0_rtx
825 || XEXP (cond, 0) == const0_rtx)
827 else
828 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
829 break;
831 case NE:
832 case LTGT:
833 /* Floating point comparisons appears to behave in a very
834 unpredictable way because of special role of = tests in
835 FP code. */
836 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
838 /* Comparisons with 0 are often used for booleans and there is
839 nothing useful to predict about them. */
840 else if (XEXP (cond, 1) == const0_rtx
841 || XEXP (cond, 0) == const0_rtx)
843 else
844 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
845 break;
847 case ORDERED:
848 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
849 break;
851 case UNORDERED:
852 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
853 break;
855 case LE:
856 case LT:
857 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
858 || XEXP (cond, 1) == constm1_rtx)
859 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
860 break;
862 case GE:
863 case GT:
864 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
865 || XEXP (cond, 1) == constm1_rtx)
866 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
867 break;
869 default:
870 break;
874 /* Set edge->probability for each successor edge of BB. */
875 void
876 guess_outgoing_edge_probabilities (basic_block bb)
878 bb_estimate_probability_locally (bb);
879 combine_predictions_for_insn (BB_END (bb), bb);
882 /* Return constant EXPR will likely have at execution time, NULL if unknown.
883 The function is used by builtin_expect branch predictor so the evidence
884 must come from this construct and additional possible constant folding.
886 We may want to implement more involved value guess (such as value range
887 propagation based prediction), but such tricks shall go to new
888 implementation. */
890 static tree
891 expr_expected_value (tree expr, bitmap visited)
893 if (TREE_CONSTANT (expr))
894 return expr;
895 else if (TREE_CODE (expr) == SSA_NAME)
897 tree def = SSA_NAME_DEF_STMT (expr);
899 /* If we were already here, break the infinite cycle. */
900 if (bitmap_bit_p (visited, SSA_NAME_VERSION (expr)))
901 return NULL;
902 bitmap_set_bit (visited, SSA_NAME_VERSION (expr));
904 if (TREE_CODE (def) == PHI_NODE)
906 /* All the arguments of the PHI node must have the same constant
907 length. */
908 int i;
909 tree val = NULL, new_val;
911 for (i = 0; i < PHI_NUM_ARGS (def); i++)
913 tree arg = PHI_ARG_DEF (def, i);
915 /* If this PHI has itself as an argument, we cannot
916 determine the string length of this argument. However,
917 if we can find an expected constant value for the other
918 PHI args then we can still be sure that this is
919 likely a constant. So be optimistic and just
920 continue with the next argument. */
921 if (arg == PHI_RESULT (def))
922 continue;
924 new_val = expr_expected_value (arg, visited);
925 if (!new_val)
926 return NULL;
927 if (!val)
928 val = new_val;
929 else if (!operand_equal_p (val, new_val, false))
930 return NULL;
932 return val;
934 if (TREE_CODE (def) != GIMPLE_MODIFY_STMT
935 || GIMPLE_STMT_OPERAND (def, 0) != expr)
936 return NULL;
937 return expr_expected_value (GIMPLE_STMT_OPERAND (def, 1), visited);
939 else if (TREE_CODE (expr) == CALL_EXPR)
941 tree decl = get_callee_fndecl (expr);
942 if (!decl)
943 return NULL;
944 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
945 && DECL_FUNCTION_CODE (decl) == BUILT_IN_EXPECT)
947 tree val;
949 if (call_expr_nargs (expr) != 2)
950 return NULL;
951 val = CALL_EXPR_ARG (expr, 0);
952 if (TREE_CONSTANT (val))
953 return val;
954 return CALL_EXPR_ARG (expr, 1);
957 if (BINARY_CLASS_P (expr) || COMPARISON_CLASS_P (expr))
959 tree op0, op1, res;
960 op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
961 if (!op0)
962 return NULL;
963 op1 = expr_expected_value (TREE_OPERAND (expr, 1), visited);
964 if (!op1)
965 return NULL;
966 res = fold_build2 (TREE_CODE (expr), TREE_TYPE (expr), op0, op1);
967 if (TREE_CONSTANT (res))
968 return res;
969 return NULL;
971 if (UNARY_CLASS_P (expr))
973 tree op0, res;
974 op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
975 if (!op0)
976 return NULL;
977 res = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr), op0);
978 if (TREE_CONSTANT (res))
979 return res;
980 return NULL;
982 return NULL;
985 /* Get rid of all builtin_expect calls we no longer need. */
986 static void
987 strip_builtin_expect (void)
989 basic_block bb;
990 FOR_EACH_BB (bb)
992 block_stmt_iterator bi;
993 for (bi = bsi_start (bb); !bsi_end_p (bi); bsi_next (&bi))
995 tree stmt = bsi_stmt (bi);
996 tree fndecl;
997 tree call;
999 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1000 && (call = GIMPLE_STMT_OPERAND (stmt, 1))
1001 && TREE_CODE (call) == CALL_EXPR
1002 && (fndecl = get_callee_fndecl (call))
1003 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1004 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
1005 && call_expr_nargs (call) == 2)
1007 GIMPLE_STMT_OPERAND (stmt, 1) = CALL_EXPR_ARG (call, 0);
1008 update_stmt (stmt);
1014 /* Predict using opcode of the last statement in basic block. */
1015 static void
1016 tree_predict_by_opcode (basic_block bb)
1018 tree stmt = last_stmt (bb);
1019 edge then_edge;
1020 tree cond;
1021 tree op0;
1022 tree type;
1023 tree val;
1024 bitmap visited;
1025 edge_iterator ei;
1027 if (!stmt || TREE_CODE (stmt) != COND_EXPR)
1028 return;
1029 FOR_EACH_EDGE (then_edge, ei, bb->succs)
1030 if (then_edge->flags & EDGE_TRUE_VALUE)
1031 break;
1032 cond = TREE_OPERAND (stmt, 0);
1033 if (!COMPARISON_CLASS_P (cond))
1034 return;
1035 op0 = TREE_OPERAND (cond, 0);
1036 type = TREE_TYPE (op0);
1037 visited = BITMAP_ALLOC (NULL);
1038 val = expr_expected_value (cond, visited);
1039 BITMAP_FREE (visited);
1040 if (val)
1042 if (integer_zerop (val))
1043 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, NOT_TAKEN);
1044 else
1045 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, TAKEN);
1046 return;
1048 /* Try "pointer heuristic."
1049 A comparison ptr == 0 is predicted as false.
1050 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1051 if (POINTER_TYPE_P (type))
1053 if (TREE_CODE (cond) == EQ_EXPR)
1054 predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN);
1055 else if (TREE_CODE (cond) == NE_EXPR)
1056 predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN);
1058 else
1060 /* Try "opcode heuristic."
1061 EQ tests are usually false and NE tests are usually true. Also,
1062 most quantities are positive, so we can make the appropriate guesses
1063 about signed comparisons against zero. */
1064 switch (TREE_CODE (cond))
1066 case EQ_EXPR:
1067 case UNEQ_EXPR:
1068 /* Floating point comparisons appears to behave in a very
1069 unpredictable way because of special role of = tests in
1070 FP code. */
1071 if (FLOAT_TYPE_P (type))
1073 /* Comparisons with 0 are often used for booleans and there is
1074 nothing useful to predict about them. */
1075 else if (integer_zerop (op0)
1076 || integer_zerop (TREE_OPERAND (cond, 1)))
1078 else
1079 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN);
1080 break;
1082 case NE_EXPR:
1083 case LTGT_EXPR:
1084 /* Floating point comparisons appears to behave in a very
1085 unpredictable way because of special role of = tests in
1086 FP code. */
1087 if (FLOAT_TYPE_P (type))
1089 /* Comparisons with 0 are often used for booleans and there is
1090 nothing useful to predict about them. */
1091 else if (integer_zerop (op0)
1092 || integer_zerop (TREE_OPERAND (cond, 1)))
1094 else
1095 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN);
1096 break;
1098 case ORDERED_EXPR:
1099 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN);
1100 break;
1102 case UNORDERED_EXPR:
1103 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN);
1104 break;
1106 case LE_EXPR:
1107 case LT_EXPR:
1108 if (integer_zerop (TREE_OPERAND (cond, 1))
1109 || integer_onep (TREE_OPERAND (cond, 1))
1110 || integer_all_onesp (TREE_OPERAND (cond, 1))
1111 || real_zerop (TREE_OPERAND (cond, 1))
1112 || real_onep (TREE_OPERAND (cond, 1))
1113 || real_minus_onep (TREE_OPERAND (cond, 1)))
1114 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN);
1115 break;
1117 case GE_EXPR:
1118 case GT_EXPR:
1119 if (integer_zerop (TREE_OPERAND (cond, 1))
1120 || integer_onep (TREE_OPERAND (cond, 1))
1121 || integer_all_onesp (TREE_OPERAND (cond, 1))
1122 || real_zerop (TREE_OPERAND (cond, 1))
1123 || real_onep (TREE_OPERAND (cond, 1))
1124 || real_minus_onep (TREE_OPERAND (cond, 1)))
1125 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN);
1126 break;
1128 default:
1129 break;
1133 /* Try to guess whether the value of return means error code. */
1134 static enum br_predictor
1135 return_prediction (tree val, enum prediction *prediction)
1137 /* VOID. */
1138 if (!val)
1139 return PRED_NO_PREDICTION;
1140 /* Different heuristics for pointers and scalars. */
1141 if (POINTER_TYPE_P (TREE_TYPE (val)))
1143 /* NULL is usually not returned. */
1144 if (integer_zerop (val))
1146 *prediction = NOT_TAKEN;
1147 return PRED_NULL_RETURN;
1150 else if (INTEGRAL_TYPE_P (TREE_TYPE (val)))
1152 /* Negative return values are often used to indicate
1153 errors. */
1154 if (TREE_CODE (val) == INTEGER_CST
1155 && tree_int_cst_sgn (val) < 0)
1157 *prediction = NOT_TAKEN;
1158 return PRED_NEGATIVE_RETURN;
1160 /* Constant return values seems to be commonly taken.
1161 Zero/one often represent booleans so exclude them from the
1162 heuristics. */
1163 if (TREE_CONSTANT (val)
1164 && (!integer_zerop (val) && !integer_onep (val)))
1166 *prediction = TAKEN;
1167 return PRED_CONST_RETURN;
1170 return PRED_NO_PREDICTION;
1173 /* Find the basic block with return expression and look up for possible
1174 return value trying to apply RETURN_PREDICTION heuristics. */
1175 static void
1176 apply_return_prediction (int *heads)
1178 tree return_stmt = NULL;
1179 tree return_val;
1180 edge e;
1181 tree phi;
1182 int phi_num_args, i;
1183 enum br_predictor pred;
1184 enum prediction direction;
1185 edge_iterator ei;
1187 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1189 return_stmt = last_stmt (e->src);
1190 if (TREE_CODE (return_stmt) == RETURN_EXPR)
1191 break;
1193 if (!e)
1194 return;
1195 return_val = TREE_OPERAND (return_stmt, 0);
1196 if (!return_val)
1197 return;
1198 if (TREE_CODE (return_val) == GIMPLE_MODIFY_STMT)
1199 return_val = GIMPLE_STMT_OPERAND (return_val, 1);
1200 if (TREE_CODE (return_val) != SSA_NAME
1201 || !SSA_NAME_DEF_STMT (return_val)
1202 || TREE_CODE (SSA_NAME_DEF_STMT (return_val)) != PHI_NODE)
1203 return;
1204 for (phi = SSA_NAME_DEF_STMT (return_val); phi; phi = PHI_CHAIN (phi))
1205 if (PHI_RESULT (phi) == return_val)
1206 break;
1207 if (!phi)
1208 return;
1209 phi_num_args = PHI_NUM_ARGS (phi);
1210 pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction);
1212 /* Avoid the degenerate case where all return values form the function
1213 belongs to same category (ie they are all positive constants)
1214 so we can hardly say something about them. */
1215 for (i = 1; i < phi_num_args; i++)
1216 if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction))
1217 break;
1218 if (i != phi_num_args)
1219 for (i = 0; i < phi_num_args; i++)
1221 pred = return_prediction (PHI_ARG_DEF (phi, i), &direction);
1222 if (pred != PRED_NO_PREDICTION)
1223 predict_paths_leading_to (PHI_ARG_EDGE (phi, i)->src, heads, pred,
1224 direction);
1228 /* Look for basic block that contains unlikely to happen events
1229 (such as noreturn calls) and mark all paths leading to execution
1230 of this basic blocks as unlikely. */
1232 static void
1233 tree_bb_level_predictions (void)
1235 basic_block bb;
1236 int *heads;
1238 heads = XCNEWVEC (int, last_basic_block);
1239 heads[ENTRY_BLOCK_PTR->next_bb->index] = last_basic_block;
1241 apply_return_prediction (heads);
1243 FOR_EACH_BB (bb)
1245 block_stmt_iterator bsi = bsi_last (bb);
1247 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1249 tree stmt = bsi_stmt (bsi);
1250 tree decl;
1251 switch (TREE_CODE (stmt))
1253 case GIMPLE_MODIFY_STMT:
1254 if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == CALL_EXPR)
1256 stmt = GIMPLE_STMT_OPERAND (stmt, 1);
1257 goto call_expr;
1259 break;
1260 case CALL_EXPR:
1261 call_expr:;
1262 if (call_expr_flags (stmt) & ECF_NORETURN)
1263 predict_paths_leading_to (bb, heads, PRED_NORETURN,
1264 NOT_TAKEN);
1265 decl = get_callee_fndecl (stmt);
1266 if (decl
1267 && lookup_attribute ("cold",
1268 DECL_ATTRIBUTES (decl)))
1269 predict_paths_leading_to (bb, heads, PRED_COLD_FUNCTION,
1270 NOT_TAKEN);
1271 break;
1272 default:
1273 break;
1278 free (heads);
1281 /* Predict branch probabilities and estimate profile of the tree CFG. */
1282 static unsigned int
1283 tree_estimate_probability (void)
1285 basic_block bb;
1287 loop_optimizer_init (0);
1288 if (current_loops && dump_file && (dump_flags & TDF_DETAILS))
1289 flow_loops_dump (dump_file, NULL, 0);
1291 add_noreturn_fake_exit_edges ();
1292 connect_infinite_loops_to_exit ();
1293 calculate_dominance_info (CDI_DOMINATORS);
1294 calculate_dominance_info (CDI_POST_DOMINATORS);
1296 tree_bb_level_predictions ();
1298 mark_irreducible_loops ();
1299 record_loop_exits ();
1300 if (current_loops)
1301 predict_loops ();
1303 FOR_EACH_BB (bb)
1305 edge e;
1306 edge_iterator ei;
1308 FOR_EACH_EDGE (e, ei, bb->succs)
1310 /* Predict early returns to be probable, as we've already taken
1311 care for error returns and other cases are often used for
1312 fast paths through function.
1314 Since we've already removed the return statments, we are
1315 looking for CFG like:
1317 if (conditoinal)
1320 goto return_block
1322 some other blocks
1323 return_block:
1324 return_stmt. */
1325 if (e->dest != bb->next_bb
1326 && e->dest != EXIT_BLOCK_PTR
1327 && single_succ_p (e->dest)
1328 && single_succ_edge (e->dest)->dest == EXIT_BLOCK_PTR
1329 && TREE_CODE (last_stmt (e->dest)) == RETURN_EXPR)
1331 edge e1;
1332 edge_iterator ei1;
1334 if (single_succ_p (bb))
1336 FOR_EACH_EDGE (e1, ei1, bb->preds)
1337 if (!predicted_by_p (e1->src, PRED_NULL_RETURN)
1338 && !predicted_by_p (e1->src, PRED_CONST_RETURN)
1339 && !predicted_by_p (e1->src, PRED_NEGATIVE_RETURN))
1340 predict_edge_def (e1, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
1342 else
1343 if (!predicted_by_p (e->src, PRED_NULL_RETURN)
1344 && !predicted_by_p (e->src, PRED_CONST_RETURN)
1345 && !predicted_by_p (e->src, PRED_NEGATIVE_RETURN))
1346 predict_edge_def (e, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
1349 /* Look for block we are guarding (ie we dominate it,
1350 but it doesn't postdominate us). */
1351 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
1352 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
1353 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
1355 block_stmt_iterator bi;
1357 /* The call heuristic claims that a guarded function call
1358 is improbable. This is because such calls are often used
1359 to signal exceptional situations such as printing error
1360 messages. */
1361 for (bi = bsi_start (e->dest); !bsi_end_p (bi);
1362 bsi_next (&bi))
1364 tree stmt = bsi_stmt (bi);
1365 if ((TREE_CODE (stmt) == CALL_EXPR
1366 || (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1367 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1))
1368 == CALL_EXPR))
1369 /* Constant and pure calls are hardly used to signalize
1370 something exceptional. */
1371 && TREE_SIDE_EFFECTS (stmt))
1373 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
1374 break;
1379 tree_predict_by_opcode (bb);
1381 FOR_EACH_BB (bb)
1382 combine_predictions_for_bb (bb);
1384 strip_builtin_expect ();
1385 estimate_bb_frequencies ();
1386 free_dominance_info (CDI_POST_DOMINATORS);
1387 remove_fake_exit_edges ();
1388 loop_optimizer_finalize ();
1389 if (dump_file && (dump_flags & TDF_DETAILS))
1390 dump_tree_cfg (dump_file, dump_flags);
1391 if (profile_status == PROFILE_ABSENT)
1392 profile_status = PROFILE_GUESSED;
1393 return 0;
1396 /* Sets branch probabilities according to PREDiction and
1397 FLAGS. HEADS[bb->index] should be index of basic block in that we
1398 need to alter branch predictions (i.e. the first of our dominators
1399 such that we do not post-dominate it) (but we fill this information
1400 on demand, so -1 may be there in case this was not needed yet). */
1402 static void
1403 predict_paths_leading_to (basic_block bb, int *heads, enum br_predictor pred,
1404 enum prediction taken)
1406 edge e;
1407 edge_iterator ei;
1408 int y;
1410 if (heads[bb->index] == ENTRY_BLOCK)
1412 /* This is first time we need this field in heads array; so
1413 find first dominator that we do not post-dominate (we are
1414 using already known members of heads array). */
1415 basic_block ai = bb;
1416 basic_block next_ai = get_immediate_dominator (CDI_DOMINATORS, bb);
1417 int head;
1419 while (heads[next_ai->index] == ENTRY_BLOCK)
1421 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
1422 break;
1423 heads[next_ai->index] = ai->index;
1424 ai = next_ai;
1425 next_ai = get_immediate_dominator (CDI_DOMINATORS, next_ai);
1427 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
1428 head = next_ai->index;
1429 else
1430 head = heads[next_ai->index];
1431 while (next_ai != bb)
1433 next_ai = ai;
1434 ai = BASIC_BLOCK (heads[ai->index]);
1435 heads[next_ai->index] = head;
1438 y = heads[bb->index];
1440 /* Now find the edge that leads to our branch and aply the prediction. */
1442 if (y == last_basic_block)
1443 return;
1444 FOR_EACH_EDGE (e, ei, BASIC_BLOCK (y)->succs)
1445 if (e->dest->index >= NUM_FIXED_BLOCKS
1446 && dominated_by_p (CDI_POST_DOMINATORS, e->dest, bb))
1447 predict_edge_def (e, pred, taken);
1450 /* This is used to carry information about basic blocks. It is
1451 attached to the AUX field of the standard CFG block. */
1453 typedef struct block_info_def
1455 /* Estimated frequency of execution of basic_block. */
1456 sreal frequency;
1458 /* To keep queue of basic blocks to process. */
1459 basic_block next;
1461 /* Number of predecessors we need to visit first. */
1462 int npredecessors;
1463 } *block_info;
1465 /* Similar information for edges. */
1466 typedef struct edge_info_def
1468 /* In case edge is a loopback edge, the probability edge will be reached
1469 in case header is. Estimated number of iterations of the loop can be
1470 then computed as 1 / (1 - back_edge_prob). */
1471 sreal back_edge_prob;
1472 /* True if the edge is a loopback edge in the natural loop. */
1473 unsigned int back_edge:1;
1474 } *edge_info;
1476 #define BLOCK_INFO(B) ((block_info) (B)->aux)
1477 #define EDGE_INFO(E) ((edge_info) (E)->aux)
1479 /* Helper function for estimate_bb_frequencies.
1480 Propagate the frequencies in blocks marked in
1481 TOVISIT, starting in HEAD. */
1483 static void
1484 propagate_freq (basic_block head, bitmap tovisit)
1486 basic_block bb;
1487 basic_block last;
1488 unsigned i;
1489 edge e;
1490 basic_block nextbb;
1491 bitmap_iterator bi;
1493 /* For each basic block we need to visit count number of his predecessors
1494 we need to visit first. */
1495 EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi)
1497 edge_iterator ei;
1498 int count = 0;
1500 /* The outermost "loop" includes the exit block, which we can not
1501 look up via BASIC_BLOCK. Detect this and use EXIT_BLOCK_PTR
1502 directly. Do the same for the entry block. */
1503 bb = BASIC_BLOCK (i);
1505 FOR_EACH_EDGE (e, ei, bb->preds)
1507 bool visit = bitmap_bit_p (tovisit, e->src->index);
1509 if (visit && !(e->flags & EDGE_DFS_BACK))
1510 count++;
1511 else if (visit && dump_file && !EDGE_INFO (e)->back_edge)
1512 fprintf (dump_file,
1513 "Irreducible region hit, ignoring edge to %i->%i\n",
1514 e->src->index, bb->index);
1516 BLOCK_INFO (bb)->npredecessors = count;
1519 memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
1520 last = head;
1521 for (bb = head; bb; bb = nextbb)
1523 edge_iterator ei;
1524 sreal cyclic_probability, frequency;
1526 memcpy (&cyclic_probability, &real_zero, sizeof (real_zero));
1527 memcpy (&frequency, &real_zero, sizeof (real_zero));
1529 nextbb = BLOCK_INFO (bb)->next;
1530 BLOCK_INFO (bb)->next = NULL;
1532 /* Compute frequency of basic block. */
1533 if (bb != head)
1535 #ifdef ENABLE_CHECKING
1536 FOR_EACH_EDGE (e, ei, bb->preds)
1537 gcc_assert (!bitmap_bit_p (tovisit, e->src->index)
1538 || (e->flags & EDGE_DFS_BACK));
1539 #endif
1541 FOR_EACH_EDGE (e, ei, bb->preds)
1542 if (EDGE_INFO (e)->back_edge)
1544 sreal_add (&cyclic_probability, &cyclic_probability,
1545 &EDGE_INFO (e)->back_edge_prob);
1547 else if (!(e->flags & EDGE_DFS_BACK))
1549 sreal tmp;
1551 /* frequency += (e->probability
1552 * BLOCK_INFO (e->src)->frequency /
1553 REG_BR_PROB_BASE); */
1555 sreal_init (&tmp, e->probability, 0);
1556 sreal_mul (&tmp, &tmp, &BLOCK_INFO (e->src)->frequency);
1557 sreal_mul (&tmp, &tmp, &real_inv_br_prob_base);
1558 sreal_add (&frequency, &frequency, &tmp);
1561 if (sreal_compare (&cyclic_probability, &real_zero) == 0)
1563 memcpy (&BLOCK_INFO (bb)->frequency, &frequency,
1564 sizeof (frequency));
1566 else
1568 if (sreal_compare (&cyclic_probability, &real_almost_one) > 0)
1570 memcpy (&cyclic_probability, &real_almost_one,
1571 sizeof (real_almost_one));
1574 /* BLOCK_INFO (bb)->frequency = frequency
1575 / (1 - cyclic_probability) */
1577 sreal_sub (&cyclic_probability, &real_one, &cyclic_probability);
1578 sreal_div (&BLOCK_INFO (bb)->frequency,
1579 &frequency, &cyclic_probability);
1583 bitmap_clear_bit (tovisit, bb->index);
1585 e = find_edge (bb, head);
1586 if (e)
1588 sreal tmp;
1590 /* EDGE_INFO (e)->back_edge_prob
1591 = ((e->probability * BLOCK_INFO (bb)->frequency)
1592 / REG_BR_PROB_BASE); */
1594 sreal_init (&tmp, e->probability, 0);
1595 sreal_mul (&tmp, &tmp, &BLOCK_INFO (bb)->frequency);
1596 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1597 &tmp, &real_inv_br_prob_base);
1600 /* Propagate to successor blocks. */
1601 FOR_EACH_EDGE (e, ei, bb->succs)
1602 if (!(e->flags & EDGE_DFS_BACK)
1603 && BLOCK_INFO (e->dest)->npredecessors)
1605 BLOCK_INFO (e->dest)->npredecessors--;
1606 if (!BLOCK_INFO (e->dest)->npredecessors)
1608 if (!nextbb)
1609 nextbb = e->dest;
1610 else
1611 BLOCK_INFO (last)->next = e->dest;
1613 last = e->dest;
1619 /* Estimate probabilities of loopback edges in loops at same nest level. */
1621 static void
1622 estimate_loops_at_level (struct loop *first_loop)
1624 struct loop *loop;
1626 for (loop = first_loop; loop; loop = loop->next)
1628 edge e;
1629 basic_block *bbs;
1630 unsigned i;
1631 bitmap tovisit = BITMAP_ALLOC (NULL);
1633 estimate_loops_at_level (loop->inner);
1635 /* Find current loop back edge and mark it. */
1636 e = loop_latch_edge (loop);
1637 EDGE_INFO (e)->back_edge = 1;
1639 bbs = get_loop_body (loop);
1640 for (i = 0; i < loop->num_nodes; i++)
1641 bitmap_set_bit (tovisit, bbs[i]->index);
1642 free (bbs);
1643 propagate_freq (loop->header, tovisit);
1644 BITMAP_FREE (tovisit);
1648 /* Propagates frequencies through structure of loops. */
1650 static void
1651 estimate_loops (void)
1653 bitmap tovisit = BITMAP_ALLOC (NULL);
1654 basic_block bb;
1656 /* Start by estimating the frequencies in the loops. */
1657 if (current_loops)
1658 estimate_loops_at_level (current_loops->tree_root->inner);
1660 /* Now propagate the frequencies through all the blocks. */
1661 FOR_ALL_BB (bb)
1663 bitmap_set_bit (tovisit, bb->index);
1665 propagate_freq (ENTRY_BLOCK_PTR, tovisit);
1666 BITMAP_FREE (tovisit);
1669 /* Convert counts measured by profile driven feedback to frequencies.
1670 Return nonzero iff there was any nonzero execution count. */
1673 counts_to_freqs (void)
1675 gcov_type count_max, true_count_max = 0;
1676 basic_block bb;
1678 FOR_EACH_BB (bb)
1679 true_count_max = MAX (bb->count, true_count_max);
1681 count_max = MAX (true_count_max, 1);
1682 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1683 bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
1685 return true_count_max;
1688 /* Return true if function is likely to be expensive, so there is no point to
1689 optimize performance of prologue, epilogue or do inlining at the expense
1690 of code size growth. THRESHOLD is the limit of number of instructions
1691 function can execute at average to be still considered not expensive. */
1693 bool
1694 expensive_function_p (int threshold)
1696 unsigned int sum = 0;
1697 basic_block bb;
1698 unsigned int limit;
1700 /* We can not compute accurately for large thresholds due to scaled
1701 frequencies. */
1702 gcc_assert (threshold <= BB_FREQ_MAX);
1704 /* Frequencies are out of range. This either means that function contains
1705 internal loop executing more than BB_FREQ_MAX times or profile feedback
1706 is available and function has not been executed at all. */
1707 if (ENTRY_BLOCK_PTR->frequency == 0)
1708 return true;
1710 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
1711 limit = ENTRY_BLOCK_PTR->frequency * threshold;
1712 FOR_EACH_BB (bb)
1714 rtx insn;
1716 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
1717 insn = NEXT_INSN (insn))
1718 if (active_insn_p (insn))
1720 sum += bb->frequency;
1721 if (sum > limit)
1722 return true;
1726 return false;
1729 /* Estimate basic blocks frequency by given branch probabilities. */
1731 void
1732 estimate_bb_frequencies (void)
1734 basic_block bb;
1735 sreal freq_max;
1737 if (!flag_branch_probabilities || !counts_to_freqs ())
1739 static int real_values_initialized = 0;
1741 if (!real_values_initialized)
1743 real_values_initialized = 1;
1744 sreal_init (&real_zero, 0, 0);
1745 sreal_init (&real_one, 1, 0);
1746 sreal_init (&real_br_prob_base, REG_BR_PROB_BASE, 0);
1747 sreal_init (&real_bb_freq_max, BB_FREQ_MAX, 0);
1748 sreal_init (&real_one_half, 1, -1);
1749 sreal_div (&real_inv_br_prob_base, &real_one, &real_br_prob_base);
1750 sreal_sub (&real_almost_one, &real_one, &real_inv_br_prob_base);
1753 mark_dfs_back_edges ();
1755 single_succ_edge (ENTRY_BLOCK_PTR)->probability = REG_BR_PROB_BASE;
1757 /* Set up block info for each basic block. */
1758 alloc_aux_for_blocks (sizeof (struct block_info_def));
1759 alloc_aux_for_edges (sizeof (struct edge_info_def));
1760 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1762 edge e;
1763 edge_iterator ei;
1765 FOR_EACH_EDGE (e, ei, bb->succs)
1767 sreal_init (&EDGE_INFO (e)->back_edge_prob, e->probability, 0);
1768 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1769 &EDGE_INFO (e)->back_edge_prob,
1770 &real_inv_br_prob_base);
1774 /* First compute probabilities locally for each loop from innermost
1775 to outermost to examine probabilities for back edges. */
1776 estimate_loops ();
1778 memcpy (&freq_max, &real_zero, sizeof (real_zero));
1779 FOR_EACH_BB (bb)
1780 if (sreal_compare (&freq_max, &BLOCK_INFO (bb)->frequency) < 0)
1781 memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
1783 sreal_div (&freq_max, &real_bb_freq_max, &freq_max);
1784 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1786 sreal tmp;
1788 sreal_mul (&tmp, &BLOCK_INFO (bb)->frequency, &freq_max);
1789 sreal_add (&tmp, &tmp, &real_one_half);
1790 bb->frequency = sreal_to_int (&tmp);
1793 free_aux_for_blocks ();
1794 free_aux_for_edges ();
1796 compute_function_frequency ();
1797 if (flag_reorder_functions)
1798 choose_function_section ();
1801 /* Decide whether function is hot, cold or unlikely executed. */
1802 static void
1803 compute_function_frequency (void)
1805 basic_block bb;
1807 if (!profile_info || !flag_branch_probabilities)
1809 if (lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl))
1810 != NULL)
1811 cfun->function_frequency = FUNCTION_FREQUENCY_UNLIKELY_EXECUTED;
1812 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (current_function_decl))
1813 != NULL)
1814 cfun->function_frequency = FUNCTION_FREQUENCY_HOT;
1815 return;
1817 cfun->function_frequency = FUNCTION_FREQUENCY_UNLIKELY_EXECUTED;
1818 FOR_EACH_BB (bb)
1820 if (maybe_hot_bb_p (bb))
1822 cfun->function_frequency = FUNCTION_FREQUENCY_HOT;
1823 return;
1825 if (!probably_never_executed_bb_p (bb))
1826 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
1830 /* Choose appropriate section for the function. */
1831 static void
1832 choose_function_section (void)
1834 if (DECL_SECTION_NAME (current_function_decl)
1835 || !targetm.have_named_sections
1836 /* Theoretically we can split the gnu.linkonce text section too,
1837 but this requires more work as the frequency needs to match
1838 for all generated objects so we need to merge the frequency
1839 of all instances. For now just never set frequency for these. */
1840 || DECL_ONE_ONLY (current_function_decl))
1841 return;
1843 /* If we are doing the partitioning optimization, let the optimization
1844 choose the correct section into which to put things. */
1846 if (flag_reorder_blocks_and_partition)
1847 return;
1849 if (cfun->function_frequency == FUNCTION_FREQUENCY_HOT)
1850 DECL_SECTION_NAME (current_function_decl) =
1851 build_string (strlen (HOT_TEXT_SECTION_NAME), HOT_TEXT_SECTION_NAME);
1852 if (cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
1853 DECL_SECTION_NAME (current_function_decl) =
1854 build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME),
1855 UNLIKELY_EXECUTED_TEXT_SECTION_NAME);
1858 static bool
1859 gate_estimate_probability (void)
1861 return flag_guess_branch_prob;
1864 struct tree_opt_pass pass_profile =
1866 "profile", /* name */
1867 gate_estimate_probability, /* gate */
1868 tree_estimate_probability, /* execute */
1869 NULL, /* sub */
1870 NULL, /* next */
1871 0, /* static_pass_number */
1872 TV_BRANCH_PROB, /* tv_id */
1873 PROP_cfg, /* properties_required */
1874 0, /* properties_provided */
1875 0, /* properties_destroyed */
1876 0, /* todo_flags_start */
1877 TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
1878 0 /* letter */