2008-06-04 Xinliang David Li <davidxl@google.com>
[official-gcc.git] / gcc / predict.c
blob42852dcfcacd5761f4e33a4d2eeafad640966e0a
1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2007
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* References:
23 [1] "Branch Prediction for Free"
24 Ball and Larus; PLDI '93.
25 [2] "Static Branch Frequency and Program Profile Analysis"
26 Wu and Larus; MICRO-27.
27 [3] "Corpus-based Static Branch Prediction"
28 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
31 #include "config.h"
32 #include "system.h"
33 #include "coretypes.h"
34 #include "tm.h"
35 #include "tree.h"
36 #include "rtl.h"
37 #include "tm_p.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "insn-config.h"
41 #include "regs.h"
42 #include "flags.h"
43 #include "output.h"
44 #include "function.h"
45 #include "except.h"
46 #include "toplev.h"
47 #include "recog.h"
48 #include "expr.h"
49 #include "predict.h"
50 #include "coverage.h"
51 #include "sreal.h"
52 #include "params.h"
53 #include "target.h"
54 #include "cfgloop.h"
55 #include "tree-flow.h"
56 #include "ggc.h"
57 #include "tree-dump.h"
58 #include "tree-pass.h"
59 #include "timevar.h"
60 #include "tree-scalar-evolution.h"
61 #include "cfgloop.h"
62 #include "pointer-set.h"
64 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
65 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
66 static sreal real_zero, real_one, real_almost_one, real_br_prob_base,
67 real_inv_br_prob_base, real_one_half, real_bb_freq_max;
69 /* Random guesstimation given names. */
70 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 100 - 1)
71 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
72 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
73 #define PROB_ALWAYS (REG_BR_PROB_BASE)
75 static void combine_predictions_for_insn (rtx, basic_block);
76 static void dump_prediction (FILE *, enum br_predictor, int, basic_block, int);
77 static void predict_paths_leading_to (basic_block, enum br_predictor, enum prediction);
78 static void compute_function_frequency (void);
79 static void choose_function_section (void);
80 static bool can_predict_insn_p (const_rtx);
82 /* Information we hold about each branch predictor.
83 Filled using information from predict.def. */
85 struct predictor_info
87 const char *const name; /* Name used in the debugging dumps. */
88 const int hitrate; /* Expected hitrate used by
89 predict_insn_def call. */
90 const int flags;
93 /* Use given predictor without Dempster-Shaffer theory if it matches
94 using first_match heuristics. */
95 #define PRED_FLAG_FIRST_MATCH 1
97 /* Recompute hitrate in percent to our representation. */
99 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
101 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
102 static const struct predictor_info predictor_info[]= {
103 #include "predict.def"
105 /* Upper bound on predictors. */
106 {NULL, 0, 0}
108 #undef DEF_PREDICTOR
110 /* Return TRUE if frequency FREQ is considered to be hot. */
111 static bool
112 maybe_hot_frequency_p (int freq)
114 if (!profile_info || !flag_branch_probabilities)
116 if (cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
117 return false;
118 if (cfun->function_frequency == FUNCTION_FREQUENCY_HOT)
119 return true;
121 if (freq < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
122 return false;
123 return true;
126 /* Return true in case BB can be CPU intensive and should be optimized
127 for maximal performance. */
129 bool
130 maybe_hot_bb_p (const_basic_block bb)
132 if (profile_info && flag_branch_probabilities
133 && (bb->count
134 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
135 return false;
136 return maybe_hot_frequency_p (bb->frequency);
139 /* Return true in case BB can be CPU intensive and should be optimized
140 for maximal performance. */
142 bool
143 maybe_hot_edge_p (edge e)
145 if (profile_info && flag_branch_probabilities
146 && (e->count
147 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
148 return false;
149 return maybe_hot_frequency_p (EDGE_FREQUENCY (e));
152 /* Return true in case BB is cold and should be optimized for size. */
154 bool
155 probably_cold_bb_p (const_basic_block bb)
157 if (profile_info && flag_branch_probabilities
158 && (bb->count
159 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
160 return true;
161 if ((!profile_info || !flag_branch_probabilities)
162 && cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
163 return true;
164 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
165 return true;
166 return false;
169 /* Return true in case BB is probably never executed. */
170 bool
171 probably_never_executed_bb_p (const_basic_block bb)
173 if (profile_info && flag_branch_probabilities)
174 return ((bb->count + profile_info->runs / 2) / profile_info->runs) == 0;
175 if ((!profile_info || !flag_branch_probabilities)
176 && cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
177 return true;
178 return false;
181 /* Return true if the one of outgoing edges is already predicted by
182 PREDICTOR. */
184 bool
185 rtl_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
187 rtx note;
188 if (!INSN_P (BB_END (bb)))
189 return false;
190 for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
191 if (REG_NOTE_KIND (note) == REG_BR_PRED
192 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
193 return true;
194 return false;
197 /* This map contains for a basic block the list of predictions for the
198 outgoing edges. */
200 static struct pointer_map_t *bb_predictions;
202 /* Return true if the one of outgoing edges is already predicted by
203 PREDICTOR. */
205 bool
206 tree_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
208 struct edge_prediction *i;
209 void **preds = pointer_map_contains (bb_predictions, bb);
211 if (!preds)
212 return false;
214 for (i = *preds; i; i = i->ep_next)
215 if (i->ep_predictor == predictor)
216 return true;
217 return false;
220 /* Return true when the probability of edge is reliable.
222 The profile guessing code is good at predicting branch outcome (ie.
223 taken/not taken), that is predicted right slightly over 75% of time.
224 It is however notoriously poor on predicting the probability itself.
225 In general the profile appear a lot flatter (with probabilities closer
226 to 50%) than the reality so it is bad idea to use it to drive optimization
227 such as those disabling dynamic branch prediction for well predictable
228 branches.
230 There are two exceptions - edges leading to noreturn edges and edges
231 predicted by number of iterations heuristics are predicted well. This macro
232 should be able to distinguish those, but at the moment it simply check for
233 noreturn heuristic that is only one giving probability over 99% or bellow
234 1%. In future we might want to propagate reliability information across the
235 CFG if we find this information useful on multiple places. */
236 static bool
237 probability_reliable_p (int prob)
239 return (profile_status == PROFILE_READ
240 || (profile_status == PROFILE_GUESSED
241 && (prob <= HITRATE (1) || prob >= HITRATE (99))));
244 /* Same predicate as above, working on edges. */
245 bool
246 edge_probability_reliable_p (const_edge e)
248 return probability_reliable_p (e->probability);
251 /* Same predicate as edge_probability_reliable_p, working on notes. */
252 bool
253 br_prob_note_reliable_p (const_rtx note)
255 gcc_assert (REG_NOTE_KIND (note) == REG_BR_PROB);
256 return probability_reliable_p (INTVAL (XEXP (note, 0)));
259 static void
260 predict_insn (rtx insn, enum br_predictor predictor, int probability)
262 gcc_assert (any_condjump_p (insn));
263 if (!flag_guess_branch_prob)
264 return;
266 REG_NOTES (insn)
267 = gen_rtx_EXPR_LIST (REG_BR_PRED,
268 gen_rtx_CONCAT (VOIDmode,
269 GEN_INT ((int) predictor),
270 GEN_INT ((int) probability)),
271 REG_NOTES (insn));
274 /* Predict insn by given predictor. */
276 void
277 predict_insn_def (rtx insn, enum br_predictor predictor,
278 enum prediction taken)
280 int probability = predictor_info[(int) predictor].hitrate;
282 if (taken != TAKEN)
283 probability = REG_BR_PROB_BASE - probability;
285 predict_insn (insn, predictor, probability);
288 /* Predict edge E with given probability if possible. */
290 void
291 rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
293 rtx last_insn;
294 last_insn = BB_END (e->src);
296 /* We can store the branch prediction information only about
297 conditional jumps. */
298 if (!any_condjump_p (last_insn))
299 return;
301 /* We always store probability of branching. */
302 if (e->flags & EDGE_FALLTHRU)
303 probability = REG_BR_PROB_BASE - probability;
305 predict_insn (last_insn, predictor, probability);
308 /* Predict edge E with the given PROBABILITY. */
309 void
310 tree_predict_edge (edge e, enum br_predictor predictor, int probability)
312 gcc_assert (profile_status != PROFILE_GUESSED);
313 if ((e->src != ENTRY_BLOCK_PTR && EDGE_COUNT (e->src->succs) > 1)
314 && flag_guess_branch_prob && optimize)
316 struct edge_prediction *i = XNEW (struct edge_prediction);
317 void **preds = pointer_map_insert (bb_predictions, e->src);
319 i->ep_next = *preds;
320 *preds = i;
321 i->ep_probability = probability;
322 i->ep_predictor = predictor;
323 i->ep_edge = e;
327 /* Remove all predictions on given basic block that are attached
328 to edge E. */
329 void
330 remove_predictions_associated_with_edge (edge e)
332 void **preds;
334 if (!bb_predictions)
335 return;
337 preds = pointer_map_contains (bb_predictions, e->src);
339 if (preds)
341 struct edge_prediction **prediction = (struct edge_prediction **) preds;
342 struct edge_prediction *next;
344 while (*prediction)
346 if ((*prediction)->ep_edge == e)
348 next = (*prediction)->ep_next;
349 free (*prediction);
350 *prediction = next;
352 else
353 prediction = &((*prediction)->ep_next);
358 /* Clears the list of predictions stored for BB. */
360 static void
361 clear_bb_predictions (basic_block bb)
363 void **preds = pointer_map_contains (bb_predictions, bb);
364 struct edge_prediction *pred, *next;
366 if (!preds)
367 return;
369 for (pred = *preds; pred; pred = next)
371 next = pred->ep_next;
372 free (pred);
374 *preds = NULL;
377 /* Return true when we can store prediction on insn INSN.
378 At the moment we represent predictions only on conditional
379 jumps, not at computed jump or other complicated cases. */
380 static bool
381 can_predict_insn_p (const_rtx insn)
383 return (JUMP_P (insn)
384 && any_condjump_p (insn)
385 && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs) >= 2);
388 /* Predict edge E by given predictor if possible. */
390 void
391 predict_edge_def (edge e, enum br_predictor predictor,
392 enum prediction taken)
394 int probability = predictor_info[(int) predictor].hitrate;
396 if (taken != TAKEN)
397 probability = REG_BR_PROB_BASE - probability;
399 predict_edge (e, predictor, probability);
402 /* Invert all branch predictions or probability notes in the INSN. This needs
403 to be done each time we invert the condition used by the jump. */
405 void
406 invert_br_probabilities (rtx insn)
408 rtx note;
410 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
411 if (REG_NOTE_KIND (note) == REG_BR_PROB)
412 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
413 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
414 XEXP (XEXP (note, 0), 1)
415 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
418 /* Dump information about the branch prediction to the output file. */
420 static void
421 dump_prediction (FILE *file, enum br_predictor predictor, int probability,
422 basic_block bb, int used)
424 edge e;
425 edge_iterator ei;
427 if (!file)
428 return;
430 FOR_EACH_EDGE (e, ei, bb->succs)
431 if (! (e->flags & EDGE_FALLTHRU))
432 break;
434 fprintf (file, " %s heuristics%s: %.1f%%",
435 predictor_info[predictor].name,
436 used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
438 if (bb->count)
440 fprintf (file, " exec ");
441 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
442 if (e)
444 fprintf (file, " hit ");
445 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, e->count);
446 fprintf (file, " (%.1f%%)", e->count * 100.0 / bb->count);
450 fprintf (file, "\n");
453 /* We can not predict the probabilities of outgoing edges of bb. Set them
454 evenly and hope for the best. */
455 static void
456 set_even_probabilities (basic_block bb)
458 int nedges = 0;
459 edge e;
460 edge_iterator ei;
462 FOR_EACH_EDGE (e, ei, bb->succs)
463 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
464 nedges ++;
465 FOR_EACH_EDGE (e, ei, bb->succs)
466 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
467 e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
468 else
469 e->probability = 0;
472 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
473 note if not already present. Remove now useless REG_BR_PRED notes. */
475 static void
476 combine_predictions_for_insn (rtx insn, basic_block bb)
478 rtx prob_note;
479 rtx *pnote;
480 rtx note;
481 int best_probability = PROB_EVEN;
482 int best_predictor = END_PREDICTORS;
483 int combined_probability = REG_BR_PROB_BASE / 2;
484 int d;
485 bool first_match = false;
486 bool found = false;
488 if (!can_predict_insn_p (insn))
490 set_even_probabilities (bb);
491 return;
494 prob_note = find_reg_note (insn, REG_BR_PROB, 0);
495 pnote = &REG_NOTES (insn);
496 if (dump_file)
497 fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
498 bb->index);
500 /* We implement "first match" heuristics and use probability guessed
501 by predictor with smallest index. */
502 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
503 if (REG_NOTE_KIND (note) == REG_BR_PRED)
505 int predictor = INTVAL (XEXP (XEXP (note, 0), 0));
506 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
508 found = true;
509 if (best_predictor > predictor)
510 best_probability = probability, best_predictor = predictor;
512 d = (combined_probability * probability
513 + (REG_BR_PROB_BASE - combined_probability)
514 * (REG_BR_PROB_BASE - probability));
516 /* Use FP math to avoid overflows of 32bit integers. */
517 if (d == 0)
518 /* If one probability is 0% and one 100%, avoid division by zero. */
519 combined_probability = REG_BR_PROB_BASE / 2;
520 else
521 combined_probability = (((double) combined_probability) * probability
522 * REG_BR_PROB_BASE / d + 0.5);
525 /* Decide which heuristic to use. In case we didn't match anything,
526 use no_prediction heuristic, in case we did match, use either
527 first match or Dempster-Shaffer theory depending on the flags. */
529 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
530 first_match = true;
532 if (!found)
533 dump_prediction (dump_file, PRED_NO_PREDICTION,
534 combined_probability, bb, true);
535 else
537 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability,
538 bb, !first_match);
539 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability,
540 bb, first_match);
543 if (first_match)
544 combined_probability = best_probability;
545 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
547 while (*pnote)
549 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
551 int predictor = INTVAL (XEXP (XEXP (*pnote, 0), 0));
552 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
554 dump_prediction (dump_file, predictor, probability, bb,
555 !first_match || best_predictor == predictor);
556 *pnote = XEXP (*pnote, 1);
558 else
559 pnote = &XEXP (*pnote, 1);
562 if (!prob_note)
564 REG_NOTES (insn)
565 = gen_rtx_EXPR_LIST (REG_BR_PROB,
566 GEN_INT (combined_probability), REG_NOTES (insn));
568 /* Save the prediction into CFG in case we are seeing non-degenerated
569 conditional jump. */
570 if (!single_succ_p (bb))
572 BRANCH_EDGE (bb)->probability = combined_probability;
573 FALLTHRU_EDGE (bb)->probability
574 = REG_BR_PROB_BASE - combined_probability;
577 else if (!single_succ_p (bb))
579 int prob = INTVAL (XEXP (prob_note, 0));
581 BRANCH_EDGE (bb)->probability = prob;
582 FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
584 else
585 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
588 /* Combine predictions into single probability and store them into CFG.
589 Remove now useless prediction entries. */
591 static void
592 combine_predictions_for_bb (basic_block bb)
594 int best_probability = PROB_EVEN;
595 int best_predictor = END_PREDICTORS;
596 int combined_probability = REG_BR_PROB_BASE / 2;
597 int d;
598 bool first_match = false;
599 bool found = false;
600 struct edge_prediction *pred;
601 int nedges = 0;
602 edge e, first = NULL, second = NULL;
603 edge_iterator ei;
604 void **preds;
606 FOR_EACH_EDGE (e, ei, bb->succs)
607 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
609 nedges ++;
610 if (first && !second)
611 second = e;
612 if (!first)
613 first = e;
616 /* When there is no successor or only one choice, prediction is easy.
618 We are lazy for now and predict only basic blocks with two outgoing
619 edges. It is possible to predict generic case too, but we have to
620 ignore first match heuristics and do more involved combining. Implement
621 this later. */
622 if (nedges != 2)
624 if (!bb->count)
625 set_even_probabilities (bb);
626 clear_bb_predictions (bb);
627 if (dump_file)
628 fprintf (dump_file, "%i edges in bb %i predicted to even probabilities\n",
629 nedges, bb->index);
630 return;
633 if (dump_file)
634 fprintf (dump_file, "Predictions for bb %i\n", bb->index);
636 preds = pointer_map_contains (bb_predictions, bb);
637 if (preds)
639 /* We implement "first match" heuristics and use probability guessed
640 by predictor with smallest index. */
641 for (pred = *preds; pred; pred = pred->ep_next)
643 int predictor = pred->ep_predictor;
644 int probability = pred->ep_probability;
646 if (pred->ep_edge != first)
647 probability = REG_BR_PROB_BASE - probability;
649 found = true;
650 if (best_predictor > predictor)
651 best_probability = probability, best_predictor = predictor;
653 d = (combined_probability * probability
654 + (REG_BR_PROB_BASE - combined_probability)
655 * (REG_BR_PROB_BASE - probability));
657 /* Use FP math to avoid overflows of 32bit integers. */
658 if (d == 0)
659 /* If one probability is 0% and one 100%, avoid division by zero. */
660 combined_probability = REG_BR_PROB_BASE / 2;
661 else
662 combined_probability = (((double) combined_probability)
663 * probability
664 * REG_BR_PROB_BASE / d + 0.5);
668 /* Decide which heuristic to use. In case we didn't match anything,
669 use no_prediction heuristic, in case we did match, use either
670 first match or Dempster-Shaffer theory depending on the flags. */
672 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
673 first_match = true;
675 if (!found)
676 dump_prediction (dump_file, PRED_NO_PREDICTION, combined_probability, bb, true);
677 else
679 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, bb,
680 !first_match);
681 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, bb,
682 first_match);
685 if (first_match)
686 combined_probability = best_probability;
687 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
689 if (preds)
691 for (pred = *preds; pred; pred = pred->ep_next)
693 int predictor = pred->ep_predictor;
694 int probability = pred->ep_probability;
696 if (pred->ep_edge != EDGE_SUCC (bb, 0))
697 probability = REG_BR_PROB_BASE - probability;
698 dump_prediction (dump_file, predictor, probability, bb,
699 !first_match || best_predictor == predictor);
702 clear_bb_predictions (bb);
704 if (!bb->count)
706 first->probability = combined_probability;
707 second->probability = REG_BR_PROB_BASE - combined_probability;
711 /* Predict edge probabilities by exploiting loop structure. */
713 static void
714 predict_loops (void)
716 loop_iterator li;
717 struct loop *loop;
719 scev_initialize ();
721 /* Try to predict out blocks in a loop that are not part of a
722 natural loop. */
723 FOR_EACH_LOOP (li, loop, 0)
725 basic_block bb, *bbs;
726 unsigned j, n_exits;
727 VEC (edge, heap) *exits;
728 struct tree_niter_desc niter_desc;
729 edge ex;
731 exits = get_loop_exit_edges (loop);
732 n_exits = VEC_length (edge, exits);
734 for (j = 0; VEC_iterate (edge, exits, j, ex); j++)
736 tree niter = NULL;
737 HOST_WIDE_INT nitercst;
738 int max = PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS);
739 int probability;
740 enum br_predictor predictor;
742 if (number_of_iterations_exit (loop, ex, &niter_desc, false))
743 niter = niter_desc.niter;
744 if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST)
745 niter = loop_niter_by_eval (loop, ex);
747 if (TREE_CODE (niter) == INTEGER_CST)
749 if (host_integerp (niter, 1)
750 && compare_tree_int (niter, max-1) == -1)
751 nitercst = tree_low_cst (niter, 1) + 1;
752 else
753 nitercst = max;
754 predictor = PRED_LOOP_ITERATIONS;
756 /* If we have just one exit and we can derive some information about
757 the number of iterations of the loop from the statements inside
758 the loop, use it to predict this exit. */
759 else if (n_exits == 1)
761 nitercst = estimated_loop_iterations_int (loop, false);
762 if (nitercst < 0)
763 continue;
764 if (nitercst > max)
765 nitercst = max;
767 predictor = PRED_LOOP_ITERATIONS_GUESSED;
769 else
770 continue;
772 probability = ((REG_BR_PROB_BASE + nitercst / 2) / nitercst);
773 predict_edge (ex, predictor, probability);
775 VEC_free (edge, heap, exits);
777 bbs = get_loop_body (loop);
779 for (j = 0; j < loop->num_nodes; j++)
781 int header_found = 0;
782 edge e;
783 edge_iterator ei;
785 bb = bbs[j];
787 /* Bypass loop heuristics on continue statement. These
788 statements construct loops via "non-loop" constructs
789 in the source language and are better to be handled
790 separately. */
791 if (predicted_by_p (bb, PRED_CONTINUE))
792 continue;
794 /* Loop branch heuristics - predict an edge back to a
795 loop's head as taken. */
796 if (bb == loop->latch)
798 e = find_edge (loop->latch, loop->header);
799 if (e)
801 header_found = 1;
802 predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
806 /* Loop exit heuristics - predict an edge exiting the loop if the
807 conditional has no loop header successors as not taken. */
808 if (!header_found
809 /* If we already used more reliable loop exit predictors, do not
810 bother with PRED_LOOP_EXIT. */
811 && !predicted_by_p (bb, PRED_LOOP_ITERATIONS_GUESSED)
812 && !predicted_by_p (bb, PRED_LOOP_ITERATIONS))
814 /* For loop with many exits we don't want to predict all exits
815 with the pretty large probability, because if all exits are
816 considered in row, the loop would be predicted to iterate
817 almost never. The code to divide probability by number of
818 exits is very rough. It should compute the number of exits
819 taken in each patch through function (not the overall number
820 of exits that might be a lot higher for loops with wide switch
821 statements in them) and compute n-th square root.
823 We limit the minimal probability by 2% to avoid
824 EDGE_PROBABILITY_RELIABLE from trusting the branch prediction
825 as this was causing regression in perl benchmark containing such
826 a wide loop. */
828 int probability = ((REG_BR_PROB_BASE
829 - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
830 / n_exits);
831 if (probability < HITRATE (2))
832 probability = HITRATE (2);
833 FOR_EACH_EDGE (e, ei, bb->succs)
834 if (e->dest->index < NUM_FIXED_BLOCKS
835 || !flow_bb_inside_loop_p (loop, e->dest))
836 predict_edge (e, PRED_LOOP_EXIT, probability);
840 /* Free basic blocks from get_loop_body. */
841 free (bbs);
844 scev_finalize ();
847 /* Attempt to predict probabilities of BB outgoing edges using local
848 properties. */
849 static void
850 bb_estimate_probability_locally (basic_block bb)
852 rtx last_insn = BB_END (bb);
853 rtx cond;
855 if (! can_predict_insn_p (last_insn))
856 return;
857 cond = get_condition (last_insn, NULL, false, false);
858 if (! cond)
859 return;
861 /* Try "pointer heuristic."
862 A comparison ptr == 0 is predicted as false.
863 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
864 if (COMPARISON_P (cond)
865 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
866 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
868 if (GET_CODE (cond) == EQ)
869 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
870 else if (GET_CODE (cond) == NE)
871 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
873 else
875 /* Try "opcode heuristic."
876 EQ tests are usually false and NE tests are usually true. Also,
877 most quantities are positive, so we can make the appropriate guesses
878 about signed comparisons against zero. */
879 switch (GET_CODE (cond))
881 case CONST_INT:
882 /* Unconditional branch. */
883 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
884 cond == const0_rtx ? NOT_TAKEN : TAKEN);
885 break;
887 case EQ:
888 case UNEQ:
889 /* Floating point comparisons appears to behave in a very
890 unpredictable way because of special role of = tests in
891 FP code. */
892 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
894 /* Comparisons with 0 are often used for booleans and there is
895 nothing useful to predict about them. */
896 else if (XEXP (cond, 1) == const0_rtx
897 || XEXP (cond, 0) == const0_rtx)
899 else
900 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
901 break;
903 case NE:
904 case LTGT:
905 /* Floating point comparisons appears to behave in a very
906 unpredictable way because of special role of = tests in
907 FP code. */
908 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
910 /* Comparisons with 0 are often used for booleans and there is
911 nothing useful to predict about them. */
912 else if (XEXP (cond, 1) == const0_rtx
913 || XEXP (cond, 0) == const0_rtx)
915 else
916 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
917 break;
919 case ORDERED:
920 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
921 break;
923 case UNORDERED:
924 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
925 break;
927 case LE:
928 case LT:
929 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
930 || XEXP (cond, 1) == constm1_rtx)
931 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
932 break;
934 case GE:
935 case GT:
936 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
937 || XEXP (cond, 1) == constm1_rtx)
938 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
939 break;
941 default:
942 break;
946 /* Set edge->probability for each successor edge of BB. */
947 void
948 guess_outgoing_edge_probabilities (basic_block bb)
950 bb_estimate_probability_locally (bb);
951 combine_predictions_for_insn (BB_END (bb), bb);
954 /* Return constant EXPR will likely have at execution time, NULL if unknown.
955 The function is used by builtin_expect branch predictor so the evidence
956 must come from this construct and additional possible constant folding.
958 We may want to implement more involved value guess (such as value range
959 propagation based prediction), but such tricks shall go to new
960 implementation. */
962 static tree
963 expr_expected_value (tree expr, bitmap visited)
965 if (TREE_CONSTANT (expr))
966 return expr;
967 else if (TREE_CODE (expr) == SSA_NAME)
969 tree def = SSA_NAME_DEF_STMT (expr);
971 /* If we were already here, break the infinite cycle. */
972 if (bitmap_bit_p (visited, SSA_NAME_VERSION (expr)))
973 return NULL;
974 bitmap_set_bit (visited, SSA_NAME_VERSION (expr));
976 if (TREE_CODE (def) == PHI_NODE)
978 /* All the arguments of the PHI node must have the same constant
979 length. */
980 int i;
981 tree val = NULL, new_val;
983 for (i = 0; i < PHI_NUM_ARGS (def); i++)
985 tree arg = PHI_ARG_DEF (def, i);
987 /* If this PHI has itself as an argument, we cannot
988 determine the string length of this argument. However,
989 if we can find an expected constant value for the other
990 PHI args then we can still be sure that this is
991 likely a constant. So be optimistic and just
992 continue with the next argument. */
993 if (arg == PHI_RESULT (def))
994 continue;
996 new_val = expr_expected_value (arg, visited);
997 if (!new_val)
998 return NULL;
999 if (!val)
1000 val = new_val;
1001 else if (!operand_equal_p (val, new_val, false))
1002 return NULL;
1004 return val;
1006 if (TREE_CODE (def) != GIMPLE_MODIFY_STMT
1007 || GIMPLE_STMT_OPERAND (def, 0) != expr)
1008 return NULL;
1009 return expr_expected_value (GIMPLE_STMT_OPERAND (def, 1), visited);
1011 else if (TREE_CODE (expr) == CALL_EXPR)
1013 tree decl = get_callee_fndecl (expr);
1014 if (!decl)
1015 return NULL;
1016 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1017 && DECL_FUNCTION_CODE (decl) == BUILT_IN_EXPECT)
1019 tree val;
1021 if (call_expr_nargs (expr) != 2)
1022 return NULL;
1023 val = CALL_EXPR_ARG (expr, 0);
1024 if (TREE_CONSTANT (val))
1025 return val;
1026 return CALL_EXPR_ARG (expr, 1);
1029 if (BINARY_CLASS_P (expr) || COMPARISON_CLASS_P (expr))
1031 tree op0, op1, res;
1032 op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
1033 if (!op0)
1034 return NULL;
1035 op1 = expr_expected_value (TREE_OPERAND (expr, 1), visited);
1036 if (!op1)
1037 return NULL;
1038 res = fold_build2 (TREE_CODE (expr), TREE_TYPE (expr), op0, op1);
1039 if (TREE_CONSTANT (res))
1040 return res;
1041 return NULL;
1043 if (UNARY_CLASS_P (expr))
1045 tree op0, res;
1046 op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
1047 if (!op0)
1048 return NULL;
1049 res = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr), op0);
1050 if (TREE_CONSTANT (res))
1051 return res;
1052 return NULL;
1054 return NULL;
1057 /* Get rid of all builtin_expect calls we no longer need. */
1058 static void
1059 strip_builtin_expect (void)
1061 basic_block bb;
1062 FOR_EACH_BB (bb)
1064 block_stmt_iterator bi;
1065 for (bi = bsi_start (bb); !bsi_end_p (bi); bsi_next (&bi))
1067 tree stmt = bsi_stmt (bi);
1068 tree fndecl;
1069 tree call;
1071 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1072 && (call = GIMPLE_STMT_OPERAND (stmt, 1))
1073 && TREE_CODE (call) == CALL_EXPR
1074 && (fndecl = get_callee_fndecl (call))
1075 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1076 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
1077 && call_expr_nargs (call) == 2)
1079 GIMPLE_STMT_OPERAND (stmt, 1) = CALL_EXPR_ARG (call, 0);
1080 update_stmt (stmt);
1086 /* Predict using opcode of the last statement in basic block. */
1087 static void
1088 tree_predict_by_opcode (basic_block bb)
1090 tree stmt = last_stmt (bb);
1091 edge then_edge;
1092 tree cond;
1093 tree op0;
1094 tree type;
1095 tree val;
1096 bitmap visited;
1097 edge_iterator ei;
1099 if (!stmt || TREE_CODE (stmt) != COND_EXPR)
1100 return;
1101 FOR_EACH_EDGE (then_edge, ei, bb->succs)
1102 if (then_edge->flags & EDGE_TRUE_VALUE)
1103 break;
1104 cond = TREE_OPERAND (stmt, 0);
1105 if (!COMPARISON_CLASS_P (cond))
1106 return;
1107 op0 = TREE_OPERAND (cond, 0);
1108 type = TREE_TYPE (op0);
1109 visited = BITMAP_ALLOC (NULL);
1110 val = expr_expected_value (cond, visited);
1111 BITMAP_FREE (visited);
1112 if (val)
1114 if (integer_zerop (val))
1115 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, NOT_TAKEN);
1116 else
1117 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, TAKEN);
1118 return;
1120 /* Try "pointer heuristic."
1121 A comparison ptr == 0 is predicted as false.
1122 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1123 if (POINTER_TYPE_P (type))
1125 if (TREE_CODE (cond) == EQ_EXPR)
1126 predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN);
1127 else if (TREE_CODE (cond) == NE_EXPR)
1128 predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN);
1130 else
1132 /* Try "opcode heuristic."
1133 EQ tests are usually false and NE tests are usually true. Also,
1134 most quantities are positive, so we can make the appropriate guesses
1135 about signed comparisons against zero. */
1136 switch (TREE_CODE (cond))
1138 case EQ_EXPR:
1139 case UNEQ_EXPR:
1140 /* Floating point comparisons appears to behave in a very
1141 unpredictable way because of special role of = tests in
1142 FP code. */
1143 if (FLOAT_TYPE_P (type))
1145 /* Comparisons with 0 are often used for booleans and there is
1146 nothing useful to predict about them. */
1147 else if (integer_zerop (op0)
1148 || integer_zerop (TREE_OPERAND (cond, 1)))
1150 else
1151 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN);
1152 break;
1154 case NE_EXPR:
1155 case LTGT_EXPR:
1156 /* Floating point comparisons appears to behave in a very
1157 unpredictable way because of special role of = tests in
1158 FP code. */
1159 if (FLOAT_TYPE_P (type))
1161 /* Comparisons with 0 are often used for booleans and there is
1162 nothing useful to predict about them. */
1163 else if (integer_zerop (op0)
1164 || integer_zerop (TREE_OPERAND (cond, 1)))
1166 else
1167 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN);
1168 break;
1170 case ORDERED_EXPR:
1171 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN);
1172 break;
1174 case UNORDERED_EXPR:
1175 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN);
1176 break;
1178 case LE_EXPR:
1179 case LT_EXPR:
1180 if (integer_zerop (TREE_OPERAND (cond, 1))
1181 || integer_onep (TREE_OPERAND (cond, 1))
1182 || integer_all_onesp (TREE_OPERAND (cond, 1))
1183 || real_zerop (TREE_OPERAND (cond, 1))
1184 || real_onep (TREE_OPERAND (cond, 1))
1185 || real_minus_onep (TREE_OPERAND (cond, 1)))
1186 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN);
1187 break;
1189 case GE_EXPR:
1190 case GT_EXPR:
1191 if (integer_zerop (TREE_OPERAND (cond, 1))
1192 || integer_onep (TREE_OPERAND (cond, 1))
1193 || integer_all_onesp (TREE_OPERAND (cond, 1))
1194 || real_zerop (TREE_OPERAND (cond, 1))
1195 || real_onep (TREE_OPERAND (cond, 1))
1196 || real_minus_onep (TREE_OPERAND (cond, 1)))
1197 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN);
1198 break;
1200 default:
1201 break;
1205 /* Try to guess whether the value of return means error code. */
1206 static enum br_predictor
1207 return_prediction (tree val, enum prediction *prediction)
1209 /* VOID. */
1210 if (!val)
1211 return PRED_NO_PREDICTION;
1212 /* Different heuristics for pointers and scalars. */
1213 if (POINTER_TYPE_P (TREE_TYPE (val)))
1215 /* NULL is usually not returned. */
1216 if (integer_zerop (val))
1218 *prediction = NOT_TAKEN;
1219 return PRED_NULL_RETURN;
1222 else if (INTEGRAL_TYPE_P (TREE_TYPE (val)))
1224 /* Negative return values are often used to indicate
1225 errors. */
1226 if (TREE_CODE (val) == INTEGER_CST
1227 && tree_int_cst_sgn (val) < 0)
1229 *prediction = NOT_TAKEN;
1230 return PRED_NEGATIVE_RETURN;
1232 /* Constant return values seems to be commonly taken.
1233 Zero/one often represent booleans so exclude them from the
1234 heuristics. */
1235 if (TREE_CONSTANT (val)
1236 && (!integer_zerop (val) && !integer_onep (val)))
1238 *prediction = TAKEN;
1239 return PRED_CONST_RETURN;
1242 return PRED_NO_PREDICTION;
1245 /* Find the basic block with return expression and look up for possible
1246 return value trying to apply RETURN_PREDICTION heuristics. */
1247 static void
1248 apply_return_prediction (void)
1250 tree return_stmt = NULL;
1251 tree return_val;
1252 edge e;
1253 tree phi;
1254 int phi_num_args, i;
1255 enum br_predictor pred;
1256 enum prediction direction;
1257 edge_iterator ei;
1259 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1261 return_stmt = last_stmt (e->src);
1262 if (return_stmt
1263 && TREE_CODE (return_stmt) == RETURN_EXPR)
1264 break;
1266 if (!e)
1267 return;
1268 return_val = TREE_OPERAND (return_stmt, 0);
1269 if (!return_val)
1270 return;
1271 if (TREE_CODE (return_val) == GIMPLE_MODIFY_STMT)
1272 return_val = GIMPLE_STMT_OPERAND (return_val, 1);
1273 if (TREE_CODE (return_val) != SSA_NAME
1274 || !SSA_NAME_DEF_STMT (return_val)
1275 || TREE_CODE (SSA_NAME_DEF_STMT (return_val)) != PHI_NODE)
1276 return;
1277 for (phi = SSA_NAME_DEF_STMT (return_val); phi; phi = PHI_CHAIN (phi))
1278 if (PHI_RESULT (phi) == return_val)
1279 break;
1280 if (!phi)
1281 return;
1282 phi_num_args = PHI_NUM_ARGS (phi);
1283 pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction);
1285 /* Avoid the degenerate case where all return values form the function
1286 belongs to same category (ie they are all positive constants)
1287 so we can hardly say something about them. */
1288 for (i = 1; i < phi_num_args; i++)
1289 if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction))
1290 break;
1291 if (i != phi_num_args)
1292 for (i = 0; i < phi_num_args; i++)
1294 pred = return_prediction (PHI_ARG_DEF (phi, i), &direction);
1295 if (pred != PRED_NO_PREDICTION)
1296 predict_paths_leading_to (PHI_ARG_EDGE (phi, i)->src, pred,
1297 direction);
1301 /* Look for basic block that contains unlikely to happen events
1302 (such as noreturn calls) and mark all paths leading to execution
1303 of this basic blocks as unlikely. */
1305 static void
1306 tree_bb_level_predictions (void)
1308 basic_block bb;
1310 apply_return_prediction ();
1312 FOR_EACH_BB (bb)
1314 block_stmt_iterator bsi = bsi_last (bb);
1316 for (bsi = bsi_start (bb); !bsi_end_p (bsi);)
1318 tree stmt = bsi_stmt (bsi);
1319 tree decl;
1320 bool next = false;
1322 switch (TREE_CODE (stmt))
1324 case GIMPLE_MODIFY_STMT:
1325 if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == CALL_EXPR)
1327 stmt = GIMPLE_STMT_OPERAND (stmt, 1);
1328 goto call_expr;
1330 break;
1331 case CALL_EXPR:
1332 call_expr:;
1333 if (call_expr_flags (stmt) & ECF_NORETURN)
1334 predict_paths_leading_to (bb, PRED_NORETURN,
1335 NOT_TAKEN);
1336 decl = get_callee_fndecl (stmt);
1337 if (decl
1338 && lookup_attribute ("cold",
1339 DECL_ATTRIBUTES (decl)))
1340 predict_paths_leading_to (bb, PRED_COLD_FUNCTION,
1341 NOT_TAKEN);
1342 break;
1343 case PREDICT_EXPR:
1344 predict_paths_leading_to (bb, PREDICT_EXPR_PREDICTOR (stmt),
1345 PREDICT_EXPR_OUTCOME (stmt));
1346 bsi_remove (&bsi, true);
1347 next = true;
1348 break;
1349 default:
1350 break;
1352 if (!next)
1353 bsi_next (&bsi);
1358 #ifdef ENABLE_CHECKING
1360 /* Callback for pointer_map_traverse, asserts that the pointer map is
1361 empty. */
1363 static bool
1364 assert_is_empty (const void *key ATTRIBUTE_UNUSED, void **value,
1365 void *data ATTRIBUTE_UNUSED)
1367 gcc_assert (!*value);
1368 return false;
1370 #endif
1372 /* Predict branch probabilities and estimate profile of the tree CFG. */
1373 static unsigned int
1374 tree_estimate_probability (void)
1376 basic_block bb;
1378 loop_optimizer_init (0);
1379 if (dump_file && (dump_flags & TDF_DETAILS))
1380 flow_loops_dump (dump_file, NULL, 0);
1382 add_noreturn_fake_exit_edges ();
1383 connect_infinite_loops_to_exit ();
1384 /* We use loop_niter_by_eval, which requires that the loops have
1385 preheaders. */
1386 create_preheaders (CP_SIMPLE_PREHEADERS);
1387 calculate_dominance_info (CDI_POST_DOMINATORS);
1389 bb_predictions = pointer_map_create ();
1390 tree_bb_level_predictions ();
1392 mark_irreducible_loops ();
1393 record_loop_exits ();
1394 if (number_of_loops () > 1)
1395 predict_loops ();
1397 FOR_EACH_BB (bb)
1399 edge e;
1400 edge_iterator ei;
1402 FOR_EACH_EDGE (e, ei, bb->succs)
1404 /* Predict early returns to be probable, as we've already taken
1405 care for error returns and other cases are often used for
1406 fast paths through function.
1408 Since we've already removed the return statements, we are
1409 looking for CFG like:
1411 if (conditional)
1414 goto return_block
1416 some other blocks
1417 return_block:
1418 return_stmt. */
1419 if (e->dest != bb->next_bb
1420 && e->dest != EXIT_BLOCK_PTR
1421 && single_succ_p (e->dest)
1422 && single_succ_edge (e->dest)->dest == EXIT_BLOCK_PTR
1423 && TREE_CODE (last_stmt (e->dest)) == RETURN_EXPR)
1425 edge e1;
1426 edge_iterator ei1;
1428 if (single_succ_p (bb))
1430 FOR_EACH_EDGE (e1, ei1, bb->preds)
1431 if (!predicted_by_p (e1->src, PRED_NULL_RETURN)
1432 && !predicted_by_p (e1->src, PRED_CONST_RETURN)
1433 && !predicted_by_p (e1->src, PRED_NEGATIVE_RETURN))
1434 predict_edge_def (e1, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
1436 else
1437 if (!predicted_by_p (e->src, PRED_NULL_RETURN)
1438 && !predicted_by_p (e->src, PRED_CONST_RETURN)
1439 && !predicted_by_p (e->src, PRED_NEGATIVE_RETURN))
1440 predict_edge_def (e, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
1443 /* Look for block we are guarding (ie we dominate it,
1444 but it doesn't postdominate us). */
1445 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
1446 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
1447 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
1449 block_stmt_iterator bi;
1451 /* The call heuristic claims that a guarded function call
1452 is improbable. This is because such calls are often used
1453 to signal exceptional situations such as printing error
1454 messages. */
1455 for (bi = bsi_start (e->dest); !bsi_end_p (bi);
1456 bsi_next (&bi))
1458 tree stmt = bsi_stmt (bi);
1459 if ((TREE_CODE (stmt) == CALL_EXPR
1460 || (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1461 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1))
1462 == CALL_EXPR))
1463 /* Constant and pure calls are hardly used to signalize
1464 something exceptional. */
1465 && TREE_SIDE_EFFECTS (stmt))
1467 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
1468 break;
1473 tree_predict_by_opcode (bb);
1475 FOR_EACH_BB (bb)
1476 combine_predictions_for_bb (bb);
1478 #ifdef ENABLE_CHECKING
1479 pointer_map_traverse (bb_predictions, assert_is_empty, NULL);
1480 #endif
1481 pointer_map_destroy (bb_predictions);
1482 bb_predictions = NULL;
1484 strip_builtin_expect ();
1485 estimate_bb_frequencies ();
1486 free_dominance_info (CDI_POST_DOMINATORS);
1487 remove_fake_exit_edges ();
1488 loop_optimizer_finalize ();
1489 if (dump_file && (dump_flags & TDF_DETAILS))
1490 dump_tree_cfg (dump_file, dump_flags);
1491 if (profile_status == PROFILE_ABSENT)
1492 profile_status = PROFILE_GUESSED;
1493 return 0;
1496 /* Predict edges to succestors of CUR whose sources are not postdominated by
1497 BB by PRED and recurse to all postdominators. */
1499 static void
1500 predict_paths_for_bb (basic_block cur, basic_block bb,
1501 enum br_predictor pred,
1502 enum prediction taken)
1504 edge e;
1505 edge_iterator ei;
1506 basic_block son;
1508 /* We are looking for all edges forming edge cut induced by
1509 set of all blocks postdominated by BB. */
1510 FOR_EACH_EDGE (e, ei, cur->preds)
1511 if (e->src->index >= NUM_FIXED_BLOCKS
1512 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, bb))
1514 gcc_assert (bb == cur || dominated_by_p (CDI_POST_DOMINATORS, cur, bb));
1515 predict_edge_def (e, pred, taken);
1517 for (son = first_dom_son (CDI_POST_DOMINATORS, cur);
1518 son;
1519 son = next_dom_son (CDI_POST_DOMINATORS, son))
1520 predict_paths_for_bb (son, bb, pred, taken);
1523 /* Sets branch probabilities according to PREDiction and
1524 FLAGS. */
1526 static void
1527 predict_paths_leading_to (basic_block bb, enum br_predictor pred,
1528 enum prediction taken)
1530 predict_paths_for_bb (bb, bb, pred, taken);
1533 /* This is used to carry information about basic blocks. It is
1534 attached to the AUX field of the standard CFG block. */
1536 typedef struct block_info_def
1538 /* Estimated frequency of execution of basic_block. */
1539 sreal frequency;
1541 /* To keep queue of basic blocks to process. */
1542 basic_block next;
1544 /* Number of predecessors we need to visit first. */
1545 int npredecessors;
1546 } *block_info;
1548 /* Similar information for edges. */
1549 typedef struct edge_info_def
1551 /* In case edge is a loopback edge, the probability edge will be reached
1552 in case header is. Estimated number of iterations of the loop can be
1553 then computed as 1 / (1 - back_edge_prob). */
1554 sreal back_edge_prob;
1555 /* True if the edge is a loopback edge in the natural loop. */
1556 unsigned int back_edge:1;
1557 } *edge_info;
1559 #define BLOCK_INFO(B) ((block_info) (B)->aux)
1560 #define EDGE_INFO(E) ((edge_info) (E)->aux)
1562 /* Helper function for estimate_bb_frequencies.
1563 Propagate the frequencies in blocks marked in
1564 TOVISIT, starting in HEAD. */
1566 static void
1567 propagate_freq (basic_block head, bitmap tovisit)
1569 basic_block bb;
1570 basic_block last;
1571 unsigned i;
1572 edge e;
1573 basic_block nextbb;
1574 bitmap_iterator bi;
1576 /* For each basic block we need to visit count number of his predecessors
1577 we need to visit first. */
1578 EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi)
1580 edge_iterator ei;
1581 int count = 0;
1583 /* The outermost "loop" includes the exit block, which we can not
1584 look up via BASIC_BLOCK. Detect this and use EXIT_BLOCK_PTR
1585 directly. Do the same for the entry block. */
1586 bb = BASIC_BLOCK (i);
1588 FOR_EACH_EDGE (e, ei, bb->preds)
1590 bool visit = bitmap_bit_p (tovisit, e->src->index);
1592 if (visit && !(e->flags & EDGE_DFS_BACK))
1593 count++;
1594 else if (visit && dump_file && !EDGE_INFO (e)->back_edge)
1595 fprintf (dump_file,
1596 "Irreducible region hit, ignoring edge to %i->%i\n",
1597 e->src->index, bb->index);
1599 BLOCK_INFO (bb)->npredecessors = count;
1602 memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
1603 last = head;
1604 for (bb = head; bb; bb = nextbb)
1606 edge_iterator ei;
1607 sreal cyclic_probability, frequency;
1609 memcpy (&cyclic_probability, &real_zero, sizeof (real_zero));
1610 memcpy (&frequency, &real_zero, sizeof (real_zero));
1612 nextbb = BLOCK_INFO (bb)->next;
1613 BLOCK_INFO (bb)->next = NULL;
1615 /* Compute frequency of basic block. */
1616 if (bb != head)
1618 #ifdef ENABLE_CHECKING
1619 FOR_EACH_EDGE (e, ei, bb->preds)
1620 gcc_assert (!bitmap_bit_p (tovisit, e->src->index)
1621 || (e->flags & EDGE_DFS_BACK));
1622 #endif
1624 FOR_EACH_EDGE (e, ei, bb->preds)
1625 if (EDGE_INFO (e)->back_edge)
1627 sreal_add (&cyclic_probability, &cyclic_probability,
1628 &EDGE_INFO (e)->back_edge_prob);
1630 else if (!(e->flags & EDGE_DFS_BACK))
1632 sreal tmp;
1634 /* frequency += (e->probability
1635 * BLOCK_INFO (e->src)->frequency /
1636 REG_BR_PROB_BASE); */
1638 sreal_init (&tmp, e->probability, 0);
1639 sreal_mul (&tmp, &tmp, &BLOCK_INFO (e->src)->frequency);
1640 sreal_mul (&tmp, &tmp, &real_inv_br_prob_base);
1641 sreal_add (&frequency, &frequency, &tmp);
1644 if (sreal_compare (&cyclic_probability, &real_zero) == 0)
1646 memcpy (&BLOCK_INFO (bb)->frequency, &frequency,
1647 sizeof (frequency));
1649 else
1651 if (sreal_compare (&cyclic_probability, &real_almost_one) > 0)
1653 memcpy (&cyclic_probability, &real_almost_one,
1654 sizeof (real_almost_one));
1657 /* BLOCK_INFO (bb)->frequency = frequency
1658 / (1 - cyclic_probability) */
1660 sreal_sub (&cyclic_probability, &real_one, &cyclic_probability);
1661 sreal_div (&BLOCK_INFO (bb)->frequency,
1662 &frequency, &cyclic_probability);
1666 bitmap_clear_bit (tovisit, bb->index);
1668 e = find_edge (bb, head);
1669 if (e)
1671 sreal tmp;
1673 /* EDGE_INFO (e)->back_edge_prob
1674 = ((e->probability * BLOCK_INFO (bb)->frequency)
1675 / REG_BR_PROB_BASE); */
1677 sreal_init (&tmp, e->probability, 0);
1678 sreal_mul (&tmp, &tmp, &BLOCK_INFO (bb)->frequency);
1679 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1680 &tmp, &real_inv_br_prob_base);
1683 /* Propagate to successor blocks. */
1684 FOR_EACH_EDGE (e, ei, bb->succs)
1685 if (!(e->flags & EDGE_DFS_BACK)
1686 && BLOCK_INFO (e->dest)->npredecessors)
1688 BLOCK_INFO (e->dest)->npredecessors--;
1689 if (!BLOCK_INFO (e->dest)->npredecessors)
1691 if (!nextbb)
1692 nextbb = e->dest;
1693 else
1694 BLOCK_INFO (last)->next = e->dest;
1696 last = e->dest;
1702 /* Estimate probabilities of loopback edges in loops at same nest level. */
1704 static void
1705 estimate_loops_at_level (struct loop *first_loop)
1707 struct loop *loop;
1709 for (loop = first_loop; loop; loop = loop->next)
1711 edge e;
1712 basic_block *bbs;
1713 unsigned i;
1714 bitmap tovisit = BITMAP_ALLOC (NULL);
1716 estimate_loops_at_level (loop->inner);
1718 /* Find current loop back edge and mark it. */
1719 e = loop_latch_edge (loop);
1720 EDGE_INFO (e)->back_edge = 1;
1722 bbs = get_loop_body (loop);
1723 for (i = 0; i < loop->num_nodes; i++)
1724 bitmap_set_bit (tovisit, bbs[i]->index);
1725 free (bbs);
1726 propagate_freq (loop->header, tovisit);
1727 BITMAP_FREE (tovisit);
1731 /* Propagates frequencies through structure of loops. */
1733 static void
1734 estimate_loops (void)
1736 bitmap tovisit = BITMAP_ALLOC (NULL);
1737 basic_block bb;
1739 /* Start by estimating the frequencies in the loops. */
1740 if (number_of_loops () > 1)
1741 estimate_loops_at_level (current_loops->tree_root->inner);
1743 /* Now propagate the frequencies through all the blocks. */
1744 FOR_ALL_BB (bb)
1746 bitmap_set_bit (tovisit, bb->index);
1748 propagate_freq (ENTRY_BLOCK_PTR, tovisit);
1749 BITMAP_FREE (tovisit);
1752 /* Convert counts measured by profile driven feedback to frequencies.
1753 Return nonzero iff there was any nonzero execution count. */
1756 counts_to_freqs (void)
1758 gcov_type count_max, true_count_max = 0;
1759 basic_block bb;
1761 FOR_EACH_BB (bb)
1762 true_count_max = MAX (bb->count, true_count_max);
1764 count_max = MAX (true_count_max, 1);
1765 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1766 bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
1768 return true_count_max;
1771 /* Return true if function is likely to be expensive, so there is no point to
1772 optimize performance of prologue, epilogue or do inlining at the expense
1773 of code size growth. THRESHOLD is the limit of number of instructions
1774 function can execute at average to be still considered not expensive. */
1776 bool
1777 expensive_function_p (int threshold)
1779 unsigned int sum = 0;
1780 basic_block bb;
1781 unsigned int limit;
1783 /* We can not compute accurately for large thresholds due to scaled
1784 frequencies. */
1785 gcc_assert (threshold <= BB_FREQ_MAX);
1787 /* Frequencies are out of range. This either means that function contains
1788 internal loop executing more than BB_FREQ_MAX times or profile feedback
1789 is available and function has not been executed at all. */
1790 if (ENTRY_BLOCK_PTR->frequency == 0)
1791 return true;
1793 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
1794 limit = ENTRY_BLOCK_PTR->frequency * threshold;
1795 FOR_EACH_BB (bb)
1797 rtx insn;
1799 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
1800 insn = NEXT_INSN (insn))
1801 if (active_insn_p (insn))
1803 sum += bb->frequency;
1804 if (sum > limit)
1805 return true;
1809 return false;
1812 /* Estimate basic blocks frequency by given branch probabilities. */
1814 void
1815 estimate_bb_frequencies (void)
1817 basic_block bb;
1818 sreal freq_max;
1820 if (!flag_branch_probabilities || !counts_to_freqs ())
1822 static int real_values_initialized = 0;
1824 if (!real_values_initialized)
1826 real_values_initialized = 1;
1827 sreal_init (&real_zero, 0, 0);
1828 sreal_init (&real_one, 1, 0);
1829 sreal_init (&real_br_prob_base, REG_BR_PROB_BASE, 0);
1830 sreal_init (&real_bb_freq_max, BB_FREQ_MAX, 0);
1831 sreal_init (&real_one_half, 1, -1);
1832 sreal_div (&real_inv_br_prob_base, &real_one, &real_br_prob_base);
1833 sreal_sub (&real_almost_one, &real_one, &real_inv_br_prob_base);
1836 mark_dfs_back_edges ();
1838 single_succ_edge (ENTRY_BLOCK_PTR)->probability = REG_BR_PROB_BASE;
1840 /* Set up block info for each basic block. */
1841 alloc_aux_for_blocks (sizeof (struct block_info_def));
1842 alloc_aux_for_edges (sizeof (struct edge_info_def));
1843 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1845 edge e;
1846 edge_iterator ei;
1848 FOR_EACH_EDGE (e, ei, bb->succs)
1850 sreal_init (&EDGE_INFO (e)->back_edge_prob, e->probability, 0);
1851 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1852 &EDGE_INFO (e)->back_edge_prob,
1853 &real_inv_br_prob_base);
1857 /* First compute probabilities locally for each loop from innermost
1858 to outermost to examine probabilities for back edges. */
1859 estimate_loops ();
1861 memcpy (&freq_max, &real_zero, sizeof (real_zero));
1862 FOR_EACH_BB (bb)
1863 if (sreal_compare (&freq_max, &BLOCK_INFO (bb)->frequency) < 0)
1864 memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
1866 sreal_div (&freq_max, &real_bb_freq_max, &freq_max);
1867 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1869 sreal tmp;
1871 sreal_mul (&tmp, &BLOCK_INFO (bb)->frequency, &freq_max);
1872 sreal_add (&tmp, &tmp, &real_one_half);
1873 bb->frequency = sreal_to_int (&tmp);
1876 free_aux_for_blocks ();
1877 free_aux_for_edges ();
1879 compute_function_frequency ();
1880 if (flag_reorder_functions)
1881 choose_function_section ();
1884 /* Decide whether function is hot, cold or unlikely executed. */
1885 static void
1886 compute_function_frequency (void)
1888 basic_block bb;
1890 if (!profile_info || !flag_branch_probabilities)
1892 if (lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl))
1893 != NULL)
1894 cfun->function_frequency = FUNCTION_FREQUENCY_UNLIKELY_EXECUTED;
1895 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (current_function_decl))
1896 != NULL)
1897 cfun->function_frequency = FUNCTION_FREQUENCY_HOT;
1898 return;
1900 cfun->function_frequency = FUNCTION_FREQUENCY_UNLIKELY_EXECUTED;
1901 FOR_EACH_BB (bb)
1903 if (maybe_hot_bb_p (bb))
1905 cfun->function_frequency = FUNCTION_FREQUENCY_HOT;
1906 return;
1908 if (!probably_never_executed_bb_p (bb))
1909 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
1913 /* Choose appropriate section for the function. */
1914 static void
1915 choose_function_section (void)
1917 if (DECL_SECTION_NAME (current_function_decl)
1918 || !targetm.have_named_sections
1919 /* Theoretically we can split the gnu.linkonce text section too,
1920 but this requires more work as the frequency needs to match
1921 for all generated objects so we need to merge the frequency
1922 of all instances. For now just never set frequency for these. */
1923 || DECL_ONE_ONLY (current_function_decl))
1924 return;
1926 /* If we are doing the partitioning optimization, let the optimization
1927 choose the correct section into which to put things. */
1929 if (flag_reorder_blocks_and_partition)
1930 return;
1932 if (cfun->function_frequency == FUNCTION_FREQUENCY_HOT)
1933 DECL_SECTION_NAME (current_function_decl) =
1934 build_string (strlen (HOT_TEXT_SECTION_NAME), HOT_TEXT_SECTION_NAME);
1935 if (cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
1936 DECL_SECTION_NAME (current_function_decl) =
1937 build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME),
1938 UNLIKELY_EXECUTED_TEXT_SECTION_NAME);
1941 static bool
1942 gate_estimate_probability (void)
1944 return flag_guess_branch_prob;
1947 /* Build PREDICT_EXPR. */
1948 tree
1949 build_predict_expr (enum br_predictor predictor, enum prediction taken)
1951 tree t = build1 (PREDICT_EXPR, NULL_TREE, build_int_cst (NULL, predictor));
1952 PREDICT_EXPR_OUTCOME (t) = taken;
1953 return t;
1956 const char *
1957 predictor_name (enum br_predictor predictor)
1959 return predictor_info[predictor].name;
1962 struct gimple_opt_pass pass_profile =
1965 GIMPLE_PASS,
1966 "profile", /* name */
1967 gate_estimate_probability, /* gate */
1968 tree_estimate_probability, /* execute */
1969 NULL, /* sub */
1970 NULL, /* next */
1971 0, /* static_pass_number */
1972 TV_BRANCH_PROB, /* tv_id */
1973 PROP_cfg, /* properties_required */
1974 0, /* properties_provided */
1975 0, /* properties_destroyed */
1976 0, /* todo_flags_start */
1977 TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */