libgcc/
[official-gcc.git] / gcc / predict.c
blobc93586bd50272ec0d8a0efdaa451118dcb8259eb
1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* References:
23 [1] "Branch Prediction for Free"
24 Ball and Larus; PLDI '93.
25 [2] "Static Branch Frequency and Program Profile Analysis"
26 Wu and Larus; MICRO-27.
27 [3] "Corpus-based Static Branch Prediction"
28 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
31 #include "config.h"
32 #include "system.h"
33 #include "coretypes.h"
34 #include "tm.h"
35 #include "tree.h"
36 #include "rtl.h"
37 #include "tm_p.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "insn-config.h"
41 #include "regs.h"
42 #include "flags.h"
43 #include "output.h"
44 #include "function.h"
45 #include "except.h"
46 #include "diagnostic-core.h"
47 #include "recog.h"
48 #include "expr.h"
49 #include "predict.h"
50 #include "coverage.h"
51 #include "sreal.h"
52 #include "params.h"
53 #include "target.h"
54 #include "cfgloop.h"
55 #include "tree-flow.h"
56 #include "ggc.h"
57 #include "tree-dump.h"
58 #include "tree-pass.h"
59 #include "timevar.h"
60 #include "tree-scalar-evolution.h"
61 #include "cfgloop.h"
62 #include "pointer-set.h"
64 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
65 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
66 static sreal real_zero, real_one, real_almost_one, real_br_prob_base,
67 real_inv_br_prob_base, real_one_half, real_bb_freq_max;
69 /* Random guesstimation given names.
70 PROV_VERY_UNLIKELY should be small enough so basic block predicted
71 by it gets bellow HOT_BB_FREQUENCY_FRANCTION. */
72 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 2000 - 1)
73 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
74 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
75 #define PROB_ALWAYS (REG_BR_PROB_BASE)
77 static void combine_predictions_for_insn (rtx, basic_block);
78 static void dump_prediction (FILE *, enum br_predictor, int, basic_block, int);
79 static void predict_paths_leading_to (basic_block, enum br_predictor, enum prediction);
80 static void predict_paths_leading_to_edge (edge, enum br_predictor, enum prediction);
81 static bool can_predict_insn_p (const_rtx);
83 /* Information we hold about each branch predictor.
84 Filled using information from predict.def. */
86 struct predictor_info
88 const char *const name; /* Name used in the debugging dumps. */
89 const int hitrate; /* Expected hitrate used by
90 predict_insn_def call. */
91 const int flags;
94 /* Use given predictor without Dempster-Shaffer theory if it matches
95 using first_match heuristics. */
96 #define PRED_FLAG_FIRST_MATCH 1
98 /* Recompute hitrate in percent to our representation. */
100 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
102 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
103 static const struct predictor_info predictor_info[]= {
104 #include "predict.def"
106 /* Upper bound on predictors. */
107 {NULL, 0, 0}
109 #undef DEF_PREDICTOR
111 /* Return TRUE if frequency FREQ is considered to be hot. */
113 static inline bool
114 maybe_hot_frequency_p (int freq)
116 struct cgraph_node *node = cgraph_get_node (current_function_decl);
117 if (!profile_info || !flag_branch_probabilities)
119 if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
120 return false;
121 if (node->frequency == NODE_FREQUENCY_HOT)
122 return true;
124 if (profile_status == PROFILE_ABSENT)
125 return true;
126 if (node->frequency == NODE_FREQUENCY_EXECUTED_ONCE
127 && freq < (ENTRY_BLOCK_PTR->frequency * 2 / 3))
128 return false;
129 if (freq < ENTRY_BLOCK_PTR->frequency / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
130 return false;
131 return true;
134 /* Return TRUE if frequency FREQ is considered to be hot. */
136 static inline bool
137 maybe_hot_count_p (gcov_type count)
139 if (profile_status != PROFILE_READ)
140 return true;
141 /* Code executed at most once is not hot. */
142 if (profile_info->runs >= count)
143 return false;
144 return (count
145 > profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION));
148 /* Return true in case BB can be CPU intensive and should be optimized
149 for maximal performance. */
151 bool
152 maybe_hot_bb_p (const_basic_block bb)
154 if (profile_status == PROFILE_READ)
155 return maybe_hot_count_p (bb->count);
156 return maybe_hot_frequency_p (bb->frequency);
159 /* Return true if the call can be hot. */
161 bool
162 cgraph_maybe_hot_edge_p (struct cgraph_edge *edge)
164 if (profile_info && flag_branch_probabilities
165 && (edge->count
166 <= profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
167 return false;
168 if (edge->caller->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED
169 || edge->callee->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
170 return false;
171 if (edge->caller->frequency > NODE_FREQUENCY_UNLIKELY_EXECUTED
172 && edge->callee->frequency <= NODE_FREQUENCY_EXECUTED_ONCE)
173 return false;
174 if (optimize_size)
175 return false;
176 if (edge->caller->frequency == NODE_FREQUENCY_HOT)
177 return true;
178 if (edge->caller->frequency == NODE_FREQUENCY_EXECUTED_ONCE
179 && edge->frequency < CGRAPH_FREQ_BASE * 3 / 2)
180 return false;
181 if (flag_guess_branch_prob
182 && edge->frequency <= (CGRAPH_FREQ_BASE
183 / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION)))
184 return false;
185 return true;
188 /* Return true in case BB can be CPU intensive and should be optimized
189 for maximal performance. */
191 bool
192 maybe_hot_edge_p (edge e)
194 if (profile_status == PROFILE_READ)
195 return maybe_hot_count_p (e->count);
196 return maybe_hot_frequency_p (EDGE_FREQUENCY (e));
200 /* Return true in case BB is probably never executed. */
202 bool
203 probably_never_executed_bb_p (const_basic_block bb)
205 if (profile_info && flag_branch_probabilities)
206 return ((bb->count + profile_info->runs / 2) / profile_info->runs) == 0;
207 if ((!profile_info || !flag_branch_probabilities)
208 && (cgraph_get_node (current_function_decl)->frequency
209 == NODE_FREQUENCY_UNLIKELY_EXECUTED))
210 return true;
211 return false;
214 /* Return true if NODE should be optimized for size. */
216 bool
217 cgraph_optimize_for_size_p (struct cgraph_node *node)
219 if (optimize_size)
220 return true;
221 if (node && (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED))
222 return true;
223 else
224 return false;
227 /* Return true when current function should always be optimized for size. */
229 bool
230 optimize_function_for_size_p (struct function *fun)
232 if (optimize_size)
233 return true;
234 if (!fun || !fun->decl)
235 return false;
236 return cgraph_optimize_for_size_p (cgraph_get_node (fun->decl));
239 /* Return true when current function should always be optimized for speed. */
241 bool
242 optimize_function_for_speed_p (struct function *fun)
244 return !optimize_function_for_size_p (fun);
247 /* Return TRUE when BB should be optimized for size. */
249 bool
250 optimize_bb_for_size_p (const_basic_block bb)
252 return optimize_function_for_size_p (cfun) || !maybe_hot_bb_p (bb);
255 /* Return TRUE when BB should be optimized for speed. */
257 bool
258 optimize_bb_for_speed_p (const_basic_block bb)
260 return !optimize_bb_for_size_p (bb);
263 /* Return TRUE when BB should be optimized for size. */
265 bool
266 optimize_edge_for_size_p (edge e)
268 return optimize_function_for_size_p (cfun) || !maybe_hot_edge_p (e);
271 /* Return TRUE when BB should be optimized for speed. */
273 bool
274 optimize_edge_for_speed_p (edge e)
276 return !optimize_edge_for_size_p (e);
279 /* Return TRUE when BB should be optimized for size. */
281 bool
282 optimize_insn_for_size_p (void)
284 return optimize_function_for_size_p (cfun) || !crtl->maybe_hot_insn_p;
287 /* Return TRUE when BB should be optimized for speed. */
289 bool
290 optimize_insn_for_speed_p (void)
292 return !optimize_insn_for_size_p ();
295 /* Return TRUE when LOOP should be optimized for size. */
297 bool
298 optimize_loop_for_size_p (struct loop *loop)
300 return optimize_bb_for_size_p (loop->header);
303 /* Return TRUE when LOOP should be optimized for speed. */
305 bool
306 optimize_loop_for_speed_p (struct loop *loop)
308 return optimize_bb_for_speed_p (loop->header);
311 /* Return TRUE when LOOP nest should be optimized for speed. */
313 bool
314 optimize_loop_nest_for_speed_p (struct loop *loop)
316 struct loop *l = loop;
317 if (optimize_loop_for_speed_p (loop))
318 return true;
319 l = loop->inner;
320 while (l && l != loop)
322 if (optimize_loop_for_speed_p (l))
323 return true;
324 if (l->inner)
325 l = l->inner;
326 else if (l->next)
327 l = l->next;
328 else
330 while (l != loop && !l->next)
331 l = loop_outer (l);
332 if (l != loop)
333 l = l->next;
336 return false;
339 /* Return TRUE when LOOP nest should be optimized for size. */
341 bool
342 optimize_loop_nest_for_size_p (struct loop *loop)
344 return !optimize_loop_nest_for_speed_p (loop);
347 /* Return true when edge E is likely to be well predictable by branch
348 predictor. */
350 bool
351 predictable_edge_p (edge e)
353 if (profile_status == PROFILE_ABSENT)
354 return false;
355 if ((e->probability
356 <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME) * REG_BR_PROB_BASE / 100)
357 || (REG_BR_PROB_BASE - e->probability
358 <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME) * REG_BR_PROB_BASE / 100))
359 return true;
360 return false;
364 /* Set RTL expansion for BB profile. */
366 void
367 rtl_profile_for_bb (basic_block bb)
369 crtl->maybe_hot_insn_p = maybe_hot_bb_p (bb);
372 /* Set RTL expansion for edge profile. */
374 void
375 rtl_profile_for_edge (edge e)
377 crtl->maybe_hot_insn_p = maybe_hot_edge_p (e);
380 /* Set RTL expansion to default mode (i.e. when profile info is not known). */
381 void
382 default_rtl_profile (void)
384 crtl->maybe_hot_insn_p = true;
387 /* Return true if the one of outgoing edges is already predicted by
388 PREDICTOR. */
390 bool
391 rtl_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
393 rtx note;
394 if (!INSN_P (BB_END (bb)))
395 return false;
396 for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
397 if (REG_NOTE_KIND (note) == REG_BR_PRED
398 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
399 return true;
400 return false;
403 /* This map contains for a basic block the list of predictions for the
404 outgoing edges. */
406 static struct pointer_map_t *bb_predictions;
408 /* Structure representing predictions in tree level. */
410 struct edge_prediction {
411 struct edge_prediction *ep_next;
412 edge ep_edge;
413 enum br_predictor ep_predictor;
414 int ep_probability;
417 /* Return true if the one of outgoing edges is already predicted by
418 PREDICTOR. */
420 bool
421 gimple_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
423 struct edge_prediction *i;
424 void **preds = pointer_map_contains (bb_predictions, bb);
426 if (!preds)
427 return false;
429 for (i = (struct edge_prediction *) *preds; i; i = i->ep_next)
430 if (i->ep_predictor == predictor)
431 return true;
432 return false;
435 /* Return true when the probability of edge is reliable.
437 The profile guessing code is good at predicting branch outcome (ie.
438 taken/not taken), that is predicted right slightly over 75% of time.
439 It is however notoriously poor on predicting the probability itself.
440 In general the profile appear a lot flatter (with probabilities closer
441 to 50%) than the reality so it is bad idea to use it to drive optimization
442 such as those disabling dynamic branch prediction for well predictable
443 branches.
445 There are two exceptions - edges leading to noreturn edges and edges
446 predicted by number of iterations heuristics are predicted well. This macro
447 should be able to distinguish those, but at the moment it simply check for
448 noreturn heuristic that is only one giving probability over 99% or bellow
449 1%. In future we might want to propagate reliability information across the
450 CFG if we find this information useful on multiple places. */
451 static bool
452 probability_reliable_p (int prob)
454 return (profile_status == PROFILE_READ
455 || (profile_status == PROFILE_GUESSED
456 && (prob <= HITRATE (1) || prob >= HITRATE (99))));
459 /* Same predicate as above, working on edges. */
460 bool
461 edge_probability_reliable_p (const_edge e)
463 return probability_reliable_p (e->probability);
466 /* Same predicate as edge_probability_reliable_p, working on notes. */
467 bool
468 br_prob_note_reliable_p (const_rtx note)
470 gcc_assert (REG_NOTE_KIND (note) == REG_BR_PROB);
471 return probability_reliable_p (INTVAL (XEXP (note, 0)));
474 static void
475 predict_insn (rtx insn, enum br_predictor predictor, int probability)
477 gcc_assert (any_condjump_p (insn));
478 if (!flag_guess_branch_prob)
479 return;
481 add_reg_note (insn, REG_BR_PRED,
482 gen_rtx_CONCAT (VOIDmode,
483 GEN_INT ((int) predictor),
484 GEN_INT ((int) probability)));
487 /* Predict insn by given predictor. */
489 void
490 predict_insn_def (rtx insn, enum br_predictor predictor,
491 enum prediction taken)
493 int probability = predictor_info[(int) predictor].hitrate;
495 if (taken != TAKEN)
496 probability = REG_BR_PROB_BASE - probability;
498 predict_insn (insn, predictor, probability);
501 /* Predict edge E with given probability if possible. */
503 void
504 rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
506 rtx last_insn;
507 last_insn = BB_END (e->src);
509 /* We can store the branch prediction information only about
510 conditional jumps. */
511 if (!any_condjump_p (last_insn))
512 return;
514 /* We always store probability of branching. */
515 if (e->flags & EDGE_FALLTHRU)
516 probability = REG_BR_PROB_BASE - probability;
518 predict_insn (last_insn, predictor, probability);
521 /* Predict edge E with the given PROBABILITY. */
522 void
523 gimple_predict_edge (edge e, enum br_predictor predictor, int probability)
525 gcc_assert (profile_status != PROFILE_GUESSED);
526 if ((e->src != ENTRY_BLOCK_PTR && EDGE_COUNT (e->src->succs) > 1)
527 && flag_guess_branch_prob && optimize)
529 struct edge_prediction *i = XNEW (struct edge_prediction);
530 void **preds = pointer_map_insert (bb_predictions, e->src);
532 i->ep_next = (struct edge_prediction *) *preds;
533 *preds = i;
534 i->ep_probability = probability;
535 i->ep_predictor = predictor;
536 i->ep_edge = e;
540 /* Remove all predictions on given basic block that are attached
541 to edge E. */
542 void
543 remove_predictions_associated_with_edge (edge e)
545 void **preds;
547 if (!bb_predictions)
548 return;
550 preds = pointer_map_contains (bb_predictions, e->src);
552 if (preds)
554 struct edge_prediction **prediction = (struct edge_prediction **) preds;
555 struct edge_prediction *next;
557 while (*prediction)
559 if ((*prediction)->ep_edge == e)
561 next = (*prediction)->ep_next;
562 free (*prediction);
563 *prediction = next;
565 else
566 prediction = &((*prediction)->ep_next);
571 /* Clears the list of predictions stored for BB. */
573 static void
574 clear_bb_predictions (basic_block bb)
576 void **preds = pointer_map_contains (bb_predictions, bb);
577 struct edge_prediction *pred, *next;
579 if (!preds)
580 return;
582 for (pred = (struct edge_prediction *) *preds; pred; pred = next)
584 next = pred->ep_next;
585 free (pred);
587 *preds = NULL;
590 /* Return true when we can store prediction on insn INSN.
591 At the moment we represent predictions only on conditional
592 jumps, not at computed jump or other complicated cases. */
593 static bool
594 can_predict_insn_p (const_rtx insn)
596 return (JUMP_P (insn)
597 && any_condjump_p (insn)
598 && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs) >= 2);
601 /* Predict edge E by given predictor if possible. */
603 void
604 predict_edge_def (edge e, enum br_predictor predictor,
605 enum prediction taken)
607 int probability = predictor_info[(int) predictor].hitrate;
609 if (taken != TAKEN)
610 probability = REG_BR_PROB_BASE - probability;
612 predict_edge (e, predictor, probability);
615 /* Invert all branch predictions or probability notes in the INSN. This needs
616 to be done each time we invert the condition used by the jump. */
618 void
619 invert_br_probabilities (rtx insn)
621 rtx note;
623 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
624 if (REG_NOTE_KIND (note) == REG_BR_PROB)
625 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
626 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
627 XEXP (XEXP (note, 0), 1)
628 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
631 /* Dump information about the branch prediction to the output file. */
633 static void
634 dump_prediction (FILE *file, enum br_predictor predictor, int probability,
635 basic_block bb, int used)
637 edge e;
638 edge_iterator ei;
640 if (!file)
641 return;
643 FOR_EACH_EDGE (e, ei, bb->succs)
644 if (! (e->flags & EDGE_FALLTHRU))
645 break;
647 fprintf (file, " %s heuristics%s: %.1f%%",
648 predictor_info[predictor].name,
649 used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
651 if (bb->count)
653 fprintf (file, " exec ");
654 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
655 if (e)
657 fprintf (file, " hit ");
658 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, e->count);
659 fprintf (file, " (%.1f%%)", e->count * 100.0 / bb->count);
663 fprintf (file, "\n");
666 /* We can not predict the probabilities of outgoing edges of bb. Set them
667 evenly and hope for the best. */
668 static void
669 set_even_probabilities (basic_block bb)
671 int nedges = 0;
672 edge e;
673 edge_iterator ei;
675 FOR_EACH_EDGE (e, ei, bb->succs)
676 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
677 nedges ++;
678 FOR_EACH_EDGE (e, ei, bb->succs)
679 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
680 e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
681 else
682 e->probability = 0;
685 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
686 note if not already present. Remove now useless REG_BR_PRED notes. */
688 static void
689 combine_predictions_for_insn (rtx insn, basic_block bb)
691 rtx prob_note;
692 rtx *pnote;
693 rtx note;
694 int best_probability = PROB_EVEN;
695 enum br_predictor best_predictor = END_PREDICTORS;
696 int combined_probability = REG_BR_PROB_BASE / 2;
697 int d;
698 bool first_match = false;
699 bool found = false;
701 if (!can_predict_insn_p (insn))
703 set_even_probabilities (bb);
704 return;
707 prob_note = find_reg_note (insn, REG_BR_PROB, 0);
708 pnote = &REG_NOTES (insn);
709 if (dump_file)
710 fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
711 bb->index);
713 /* We implement "first match" heuristics and use probability guessed
714 by predictor with smallest index. */
715 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
716 if (REG_NOTE_KIND (note) == REG_BR_PRED)
718 enum br_predictor predictor = ((enum br_predictor)
719 INTVAL (XEXP (XEXP (note, 0), 0)));
720 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
722 found = true;
723 if (best_predictor > predictor)
724 best_probability = probability, best_predictor = predictor;
726 d = (combined_probability * probability
727 + (REG_BR_PROB_BASE - combined_probability)
728 * (REG_BR_PROB_BASE - probability));
730 /* Use FP math to avoid overflows of 32bit integers. */
731 if (d == 0)
732 /* If one probability is 0% and one 100%, avoid division by zero. */
733 combined_probability = REG_BR_PROB_BASE / 2;
734 else
735 combined_probability = (((double) combined_probability) * probability
736 * REG_BR_PROB_BASE / d + 0.5);
739 /* Decide which heuristic to use. In case we didn't match anything,
740 use no_prediction heuristic, in case we did match, use either
741 first match or Dempster-Shaffer theory depending on the flags. */
743 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
744 first_match = true;
746 if (!found)
747 dump_prediction (dump_file, PRED_NO_PREDICTION,
748 combined_probability, bb, true);
749 else
751 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability,
752 bb, !first_match);
753 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability,
754 bb, first_match);
757 if (first_match)
758 combined_probability = best_probability;
759 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
761 while (*pnote)
763 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
765 enum br_predictor predictor = ((enum br_predictor)
766 INTVAL (XEXP (XEXP (*pnote, 0), 0)));
767 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
769 dump_prediction (dump_file, predictor, probability, bb,
770 !first_match || best_predictor == predictor);
771 *pnote = XEXP (*pnote, 1);
773 else
774 pnote = &XEXP (*pnote, 1);
777 if (!prob_note)
779 add_reg_note (insn, REG_BR_PROB, GEN_INT (combined_probability));
781 /* Save the prediction into CFG in case we are seeing non-degenerated
782 conditional jump. */
783 if (!single_succ_p (bb))
785 BRANCH_EDGE (bb)->probability = combined_probability;
786 FALLTHRU_EDGE (bb)->probability
787 = REG_BR_PROB_BASE - combined_probability;
790 else if (!single_succ_p (bb))
792 int prob = INTVAL (XEXP (prob_note, 0));
794 BRANCH_EDGE (bb)->probability = prob;
795 FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
797 else
798 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
801 /* Combine predictions into single probability and store them into CFG.
802 Remove now useless prediction entries. */
804 static void
805 combine_predictions_for_bb (basic_block bb)
807 int best_probability = PROB_EVEN;
808 enum br_predictor best_predictor = END_PREDICTORS;
809 int combined_probability = REG_BR_PROB_BASE / 2;
810 int d;
811 bool first_match = false;
812 bool found = false;
813 struct edge_prediction *pred;
814 int nedges = 0;
815 edge e, first = NULL, second = NULL;
816 edge_iterator ei;
817 void **preds;
819 FOR_EACH_EDGE (e, ei, bb->succs)
820 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
822 nedges ++;
823 if (first && !second)
824 second = e;
825 if (!first)
826 first = e;
829 /* When there is no successor or only one choice, prediction is easy.
831 We are lazy for now and predict only basic blocks with two outgoing
832 edges. It is possible to predict generic case too, but we have to
833 ignore first match heuristics and do more involved combining. Implement
834 this later. */
835 if (nedges != 2)
837 if (!bb->count)
838 set_even_probabilities (bb);
839 clear_bb_predictions (bb);
840 if (dump_file)
841 fprintf (dump_file, "%i edges in bb %i predicted to even probabilities\n",
842 nedges, bb->index);
843 return;
846 if (dump_file)
847 fprintf (dump_file, "Predictions for bb %i\n", bb->index);
849 preds = pointer_map_contains (bb_predictions, bb);
850 if (preds)
852 /* We implement "first match" heuristics and use probability guessed
853 by predictor with smallest index. */
854 for (pred = (struct edge_prediction *) *preds; pred; pred = pred->ep_next)
856 enum br_predictor predictor = pred->ep_predictor;
857 int probability = pred->ep_probability;
859 if (pred->ep_edge != first)
860 probability = REG_BR_PROB_BASE - probability;
862 found = true;
863 /* First match heuristics would be widly confused if we predicted
864 both directions. */
865 if (best_predictor > predictor)
867 struct edge_prediction *pred2;
868 int prob = probability;
870 for (pred2 = (struct edge_prediction *) *preds; pred2; pred2 = pred2->ep_next)
871 if (pred2 != pred && pred2->ep_predictor == pred->ep_predictor)
873 int probability2 = pred->ep_probability;
875 if (pred2->ep_edge != first)
876 probability2 = REG_BR_PROB_BASE - probability2;
878 if ((probability < REG_BR_PROB_BASE / 2) !=
879 (probability2 < REG_BR_PROB_BASE / 2))
880 break;
882 /* If the same predictor later gave better result, go for it! */
883 if ((probability >= REG_BR_PROB_BASE / 2 && (probability2 > probability))
884 || (probability <= REG_BR_PROB_BASE / 2 && (probability2 < probability)))
885 prob = probability2;
887 if (!pred2)
888 best_probability = prob, best_predictor = predictor;
891 d = (combined_probability * probability
892 + (REG_BR_PROB_BASE - combined_probability)
893 * (REG_BR_PROB_BASE - probability));
895 /* Use FP math to avoid overflows of 32bit integers. */
896 if (d == 0)
897 /* If one probability is 0% and one 100%, avoid division by zero. */
898 combined_probability = REG_BR_PROB_BASE / 2;
899 else
900 combined_probability = (((double) combined_probability)
901 * probability
902 * REG_BR_PROB_BASE / d + 0.5);
906 /* Decide which heuristic to use. In case we didn't match anything,
907 use no_prediction heuristic, in case we did match, use either
908 first match or Dempster-Shaffer theory depending on the flags. */
910 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
911 first_match = true;
913 if (!found)
914 dump_prediction (dump_file, PRED_NO_PREDICTION, combined_probability, bb, true);
915 else
917 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, bb,
918 !first_match);
919 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, bb,
920 first_match);
923 if (first_match)
924 combined_probability = best_probability;
925 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
927 if (preds)
929 for (pred = (struct edge_prediction *) *preds; pred; pred = pred->ep_next)
931 enum br_predictor predictor = pred->ep_predictor;
932 int probability = pred->ep_probability;
934 if (pred->ep_edge != EDGE_SUCC (bb, 0))
935 probability = REG_BR_PROB_BASE - probability;
936 dump_prediction (dump_file, predictor, probability, bb,
937 !first_match || best_predictor == predictor);
940 clear_bb_predictions (bb);
942 if (!bb->count)
944 first->probability = combined_probability;
945 second->probability = REG_BR_PROB_BASE - combined_probability;
949 /* Check if T1 and T2 satisfy the IV_COMPARE condition.
950 Return the SSA_NAME if the condition satisfies, NULL otherwise.
952 T1 and T2 should be one of the following cases:
953 1. T1 is SSA_NAME, T2 is NULL
954 2. T1 is SSA_NAME, T2 is INTEGER_CST between [-4, 4]
955 3. T2 is SSA_NAME, T1 is INTEGER_CST between [-4, 4] */
957 static tree
958 strips_small_constant (tree t1, tree t2)
960 tree ret = NULL;
961 int value = 0;
963 if (!t1)
964 return NULL;
965 else if (TREE_CODE (t1) == SSA_NAME)
966 ret = t1;
967 else if (host_integerp (t1, 0))
968 value = tree_low_cst (t1, 0);
969 else
970 return NULL;
972 if (!t2)
973 return ret;
974 else if (host_integerp (t2, 0))
975 value = tree_low_cst (t2, 0);
976 else if (TREE_CODE (t2) == SSA_NAME)
978 if (ret)
979 return NULL;
980 else
981 ret = t2;
984 if (value <= 4 && value >= -4)
985 return ret;
986 else
987 return NULL;
990 /* Return the SSA_NAME in T or T's operands.
991 Return NULL if SSA_NAME cannot be found. */
993 static tree
994 get_base_value (tree t)
996 if (TREE_CODE (t) == SSA_NAME)
997 return t;
999 if (!BINARY_CLASS_P (t))
1000 return NULL;
1002 switch (TREE_OPERAND_LENGTH (t))
1004 case 1:
1005 return strips_small_constant (TREE_OPERAND (t, 0), NULL);
1006 case 2:
1007 return strips_small_constant (TREE_OPERAND (t, 0),
1008 TREE_OPERAND (t, 1));
1009 default:
1010 return NULL;
1014 /* Check the compare STMT in LOOP. If it compares an induction
1015 variable to a loop invariant, return true, and save
1016 LOOP_INVARIANT, COMPARE_CODE and LOOP_STEP.
1017 Otherwise return false and set LOOP_INVAIANT to NULL. */
1019 static bool
1020 is_comparison_with_loop_invariant_p (gimple stmt, struct loop *loop,
1021 tree *loop_invariant,
1022 enum tree_code *compare_code,
1023 int *loop_step,
1024 tree *loop_iv_base)
1026 tree op0, op1, bound, base;
1027 affine_iv iv0, iv1;
1028 enum tree_code code;
1029 int step;
1031 code = gimple_cond_code (stmt);
1032 *loop_invariant = NULL;
1034 switch (code)
1036 case GT_EXPR:
1037 case GE_EXPR:
1038 case NE_EXPR:
1039 case LT_EXPR:
1040 case LE_EXPR:
1041 case EQ_EXPR:
1042 break;
1044 default:
1045 return false;
1048 op0 = gimple_cond_lhs (stmt);
1049 op1 = gimple_cond_rhs (stmt);
1051 if ((TREE_CODE (op0) != SSA_NAME && TREE_CODE (op0) != INTEGER_CST)
1052 || (TREE_CODE (op1) != SSA_NAME && TREE_CODE (op1) != INTEGER_CST))
1053 return false;
1054 if (!simple_iv (loop, loop_containing_stmt (stmt), op0, &iv0, true))
1055 return false;
1056 if (!simple_iv (loop, loop_containing_stmt (stmt), op1, &iv1, true))
1057 return false;
1058 if (TREE_CODE (iv0.step) != INTEGER_CST
1059 || TREE_CODE (iv1.step) != INTEGER_CST)
1060 return false;
1061 if ((integer_zerop (iv0.step) && integer_zerop (iv1.step))
1062 || (!integer_zerop (iv0.step) && !integer_zerop (iv1.step)))
1063 return false;
1065 if (integer_zerop (iv0.step))
1067 if (code != NE_EXPR && code != EQ_EXPR)
1068 code = invert_tree_comparison (code, false);
1069 bound = iv0.base;
1070 base = iv1.base;
1071 if (host_integerp (iv1.step, 0))
1072 step = tree_low_cst (iv1.step, 0);
1073 else
1074 return false;
1076 else
1078 bound = iv1.base;
1079 base = iv0.base;
1080 if (host_integerp (iv0.step, 0))
1081 step = tree_low_cst (iv0.step, 0);
1082 else
1083 return false;
1086 if (TREE_CODE (bound) != INTEGER_CST)
1087 bound = get_base_value (bound);
1088 if (!bound)
1089 return false;
1090 if (TREE_CODE (base) != INTEGER_CST)
1091 base = get_base_value (base);
1092 if (!base)
1093 return false;
1095 *loop_invariant = bound;
1096 *compare_code = code;
1097 *loop_step = step;
1098 *loop_iv_base = base;
1099 return true;
1102 /* Compare two SSA_NAMEs: returns TRUE if T1 and T2 are value coherent. */
1104 static bool
1105 expr_coherent_p (tree t1, tree t2)
1107 gimple stmt;
1108 tree ssa_name_1 = NULL;
1109 tree ssa_name_2 = NULL;
1111 gcc_assert (TREE_CODE (t1) == SSA_NAME || TREE_CODE (t1) == INTEGER_CST);
1112 gcc_assert (TREE_CODE (t2) == SSA_NAME || TREE_CODE (t2) == INTEGER_CST);
1114 if (t1 == t2)
1115 return true;
1117 if (TREE_CODE (t1) == INTEGER_CST && TREE_CODE (t2) == INTEGER_CST)
1118 return true;
1119 if (TREE_CODE (t1) == INTEGER_CST || TREE_CODE (t2) == INTEGER_CST)
1120 return false;
1122 /* Check to see if t1 is expressed/defined with t2. */
1123 stmt = SSA_NAME_DEF_STMT (t1);
1124 gcc_assert (stmt != NULL);
1125 if (is_gimple_assign (stmt))
1127 ssa_name_1 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE);
1128 if (ssa_name_1 && ssa_name_1 == t2)
1129 return true;
1132 /* Check to see if t2 is expressed/defined with t1. */
1133 stmt = SSA_NAME_DEF_STMT (t2);
1134 gcc_assert (stmt != NULL);
1135 if (is_gimple_assign (stmt))
1137 ssa_name_2 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE);
1138 if (ssa_name_2 && ssa_name_2 == t1)
1139 return true;
1142 /* Compare if t1 and t2's def_stmts are identical. */
1143 if (ssa_name_2 != NULL && ssa_name_1 == ssa_name_2)
1144 return true;
1145 else
1146 return false;
1149 /* Predict branch probability of BB when BB contains a branch that compares
1150 an induction variable in LOOP with LOOP_IV_BASE_VAR to LOOP_BOUND_VAR. The
1151 loop exit is compared using LOOP_BOUND_CODE, with step of LOOP_BOUND_STEP.
1153 E.g.
1154 for (int i = 0; i < bound; i++) {
1155 if (i < bound - 2)
1156 computation_1();
1157 else
1158 computation_2();
1161 In this loop, we will predict the branch inside the loop to be taken. */
1163 static void
1164 predict_iv_comparison (struct loop *loop, basic_block bb,
1165 tree loop_bound_var,
1166 tree loop_iv_base_var,
1167 enum tree_code loop_bound_code,
1168 int loop_bound_step)
1170 gimple stmt;
1171 tree compare_var, compare_base;
1172 enum tree_code compare_code;
1173 int compare_step;
1174 edge then_edge;
1175 edge_iterator ei;
1177 if (predicted_by_p (bb, PRED_LOOP_ITERATIONS_GUESSED)
1178 || predicted_by_p (bb, PRED_LOOP_ITERATIONS)
1179 || predicted_by_p (bb, PRED_LOOP_EXIT))
1180 return;
1182 stmt = last_stmt (bb);
1183 if (!stmt || gimple_code (stmt) != GIMPLE_COND)
1184 return;
1185 if (!is_comparison_with_loop_invariant_p (stmt, loop, &compare_var,
1186 &compare_code,
1187 &compare_step,
1188 &compare_base))
1189 return;
1191 /* Find the taken edge. */
1192 FOR_EACH_EDGE (then_edge, ei, bb->succs)
1193 if (then_edge->flags & EDGE_TRUE_VALUE)
1194 break;
1196 /* When comparing an IV to a loop invariant, NE is more likely to be
1197 taken while EQ is more likely to be not-taken. */
1198 if (compare_code == NE_EXPR)
1200 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1201 return;
1203 else if (compare_code == EQ_EXPR)
1205 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1206 return;
1209 if (!expr_coherent_p (loop_iv_base_var, compare_base))
1210 return;
1212 /* If loop bound, base and compare bound are all constants, we can
1213 calculate the probability directly. */
1214 if (host_integerp (loop_bound_var, 0)
1215 && host_integerp (compare_var, 0)
1216 && host_integerp (compare_base, 0))
1218 int probability;
1219 HOST_WIDE_INT compare_count;
1220 HOST_WIDE_INT loop_bound = tree_low_cst (loop_bound_var, 0);
1221 HOST_WIDE_INT compare_bound = tree_low_cst (compare_var, 0);
1222 HOST_WIDE_INT base = tree_low_cst (compare_base, 0);
1223 HOST_WIDE_INT loop_count = (loop_bound - base) / compare_step;
1225 if ((compare_step > 0)
1226 ^ (compare_code == LT_EXPR || compare_code == LE_EXPR))
1227 compare_count = (loop_bound - compare_bound) / compare_step;
1228 else
1229 compare_count = (compare_bound - base) / compare_step;
1231 if (compare_code == LE_EXPR || compare_code == GE_EXPR)
1232 compare_count ++;
1233 if (loop_bound_code == LE_EXPR || loop_bound_code == GE_EXPR)
1234 loop_count ++;
1235 if (compare_count < 0)
1236 compare_count = 0;
1237 if (loop_count < 0)
1238 loop_count = 0;
1240 if (loop_count == 0)
1241 probability = 0;
1242 else if (compare_count > loop_count)
1243 probability = REG_BR_PROB_BASE;
1244 else
1245 probability = (double) REG_BR_PROB_BASE * compare_count / loop_count;
1246 predict_edge (then_edge, PRED_LOOP_IV_COMPARE, probability);
1247 return;
1250 if (expr_coherent_p (loop_bound_var, compare_var))
1252 if ((loop_bound_code == LT_EXPR || loop_bound_code == LE_EXPR)
1253 && (compare_code == LT_EXPR || compare_code == LE_EXPR))
1254 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1255 else if ((loop_bound_code == GT_EXPR || loop_bound_code == GE_EXPR)
1256 && (compare_code == GT_EXPR || compare_code == GE_EXPR))
1257 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1258 else if (loop_bound_code == NE_EXPR)
1260 /* If the loop backedge condition is "(i != bound)", we do
1261 the comparison based on the step of IV:
1262 * step < 0 : backedge condition is like (i > bound)
1263 * step > 0 : backedge condition is like (i < bound) */
1264 gcc_assert (loop_bound_step != 0);
1265 if (loop_bound_step > 0
1266 && (compare_code == LT_EXPR
1267 || compare_code == LE_EXPR))
1268 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1269 else if (loop_bound_step < 0
1270 && (compare_code == GT_EXPR
1271 || compare_code == GE_EXPR))
1272 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1273 else
1274 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1276 else
1277 /* The branch is predicted not-taken if loop_bound_code is
1278 opposite with compare_code. */
1279 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1281 else if (expr_coherent_p (loop_iv_base_var, compare_var))
1283 /* For cases like:
1284 for (i = s; i < h; i++)
1285 if (i > s + 2) ....
1286 The branch should be predicted taken. */
1287 if (loop_bound_step > 0
1288 && (compare_code == GT_EXPR || compare_code == GE_EXPR))
1289 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1290 else if (loop_bound_step < 0
1291 && (compare_code == LT_EXPR || compare_code == LE_EXPR))
1292 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1293 else
1294 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1298 /* Predict edge probabilities by exploiting loop structure. */
1300 static void
1301 predict_loops (void)
1303 loop_iterator li;
1304 struct loop *loop;
1306 /* Try to predict out blocks in a loop that are not part of a
1307 natural loop. */
1308 FOR_EACH_LOOP (li, loop, 0)
1310 basic_block bb, *bbs;
1311 unsigned j, n_exits;
1312 VEC (edge, heap) *exits;
1313 struct tree_niter_desc niter_desc;
1314 edge ex;
1315 struct nb_iter_bound *nb_iter;
1316 enum tree_code loop_bound_code = ERROR_MARK;
1317 int loop_bound_step = 0;
1318 tree loop_bound_var = NULL;
1319 tree loop_iv_base = NULL;
1320 gimple stmt = NULL;
1322 exits = get_loop_exit_edges (loop);
1323 n_exits = VEC_length (edge, exits);
1325 FOR_EACH_VEC_ELT (edge, exits, j, ex)
1327 tree niter = NULL;
1328 HOST_WIDE_INT nitercst;
1329 int max = PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS);
1330 int probability;
1331 enum br_predictor predictor;
1333 if (number_of_iterations_exit (loop, ex, &niter_desc, false))
1334 niter = niter_desc.niter;
1335 if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST)
1336 niter = loop_niter_by_eval (loop, ex);
1338 if (TREE_CODE (niter) == INTEGER_CST)
1340 if (host_integerp (niter, 1)
1341 && compare_tree_int (niter, max-1) == -1)
1342 nitercst = tree_low_cst (niter, 1) + 1;
1343 else
1344 nitercst = max;
1345 predictor = PRED_LOOP_ITERATIONS;
1347 /* If we have just one exit and we can derive some information about
1348 the number of iterations of the loop from the statements inside
1349 the loop, use it to predict this exit. */
1350 else if (n_exits == 1)
1352 nitercst = estimated_stmt_executions_int (loop);
1353 if (nitercst < 0)
1354 continue;
1355 if (nitercst > max)
1356 nitercst = max;
1358 predictor = PRED_LOOP_ITERATIONS_GUESSED;
1360 else
1361 continue;
1363 probability = ((REG_BR_PROB_BASE + nitercst / 2) / nitercst);
1364 predict_edge (ex, predictor, probability);
1366 VEC_free (edge, heap, exits);
1368 /* Find information about loop bound variables. */
1369 for (nb_iter = loop->bounds; nb_iter;
1370 nb_iter = nb_iter->next)
1371 if (nb_iter->stmt
1372 && gimple_code (nb_iter->stmt) == GIMPLE_COND)
1374 stmt = nb_iter->stmt;
1375 break;
1377 if (!stmt && last_stmt (loop->header)
1378 && gimple_code (last_stmt (loop->header)) == GIMPLE_COND)
1379 stmt = last_stmt (loop->header);
1380 if (stmt)
1381 is_comparison_with_loop_invariant_p (stmt, loop,
1382 &loop_bound_var,
1383 &loop_bound_code,
1384 &loop_bound_step,
1385 &loop_iv_base);
1387 bbs = get_loop_body (loop);
1389 for (j = 0; j < loop->num_nodes; j++)
1391 int header_found = 0;
1392 edge e;
1393 edge_iterator ei;
1395 bb = bbs[j];
1397 /* Bypass loop heuristics on continue statement. These
1398 statements construct loops via "non-loop" constructs
1399 in the source language and are better to be handled
1400 separately. */
1401 if (predicted_by_p (bb, PRED_CONTINUE))
1402 continue;
1404 /* Loop branch heuristics - predict an edge back to a
1405 loop's head as taken. */
1406 if (bb == loop->latch)
1408 e = find_edge (loop->latch, loop->header);
1409 if (e)
1411 header_found = 1;
1412 predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
1416 /* Loop exit heuristics - predict an edge exiting the loop if the
1417 conditional has no loop header successors as not taken. */
1418 if (!header_found
1419 /* If we already used more reliable loop exit predictors, do not
1420 bother with PRED_LOOP_EXIT. */
1421 && !predicted_by_p (bb, PRED_LOOP_ITERATIONS_GUESSED)
1422 && !predicted_by_p (bb, PRED_LOOP_ITERATIONS))
1424 /* For loop with many exits we don't want to predict all exits
1425 with the pretty large probability, because if all exits are
1426 considered in row, the loop would be predicted to iterate
1427 almost never. The code to divide probability by number of
1428 exits is very rough. It should compute the number of exits
1429 taken in each patch through function (not the overall number
1430 of exits that might be a lot higher for loops with wide switch
1431 statements in them) and compute n-th square root.
1433 We limit the minimal probability by 2% to avoid
1434 EDGE_PROBABILITY_RELIABLE from trusting the branch prediction
1435 as this was causing regression in perl benchmark containing such
1436 a wide loop. */
1438 int probability = ((REG_BR_PROB_BASE
1439 - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
1440 / n_exits);
1441 if (probability < HITRATE (2))
1442 probability = HITRATE (2);
1443 FOR_EACH_EDGE (e, ei, bb->succs)
1444 if (e->dest->index < NUM_FIXED_BLOCKS
1445 || !flow_bb_inside_loop_p (loop, e->dest))
1446 predict_edge (e, PRED_LOOP_EXIT, probability);
1448 if (loop_bound_var)
1449 predict_iv_comparison (loop, bb, loop_bound_var, loop_iv_base,
1450 loop_bound_code,
1451 loop_bound_step);
1454 /* Free basic blocks from get_loop_body. */
1455 free (bbs);
1459 /* Attempt to predict probabilities of BB outgoing edges using local
1460 properties. */
1461 static void
1462 bb_estimate_probability_locally (basic_block bb)
1464 rtx last_insn = BB_END (bb);
1465 rtx cond;
1467 if (! can_predict_insn_p (last_insn))
1468 return;
1469 cond = get_condition (last_insn, NULL, false, false);
1470 if (! cond)
1471 return;
1473 /* Try "pointer heuristic."
1474 A comparison ptr == 0 is predicted as false.
1475 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1476 if (COMPARISON_P (cond)
1477 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
1478 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
1480 if (GET_CODE (cond) == EQ)
1481 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
1482 else if (GET_CODE (cond) == NE)
1483 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
1485 else
1487 /* Try "opcode heuristic."
1488 EQ tests are usually false and NE tests are usually true. Also,
1489 most quantities are positive, so we can make the appropriate guesses
1490 about signed comparisons against zero. */
1491 switch (GET_CODE (cond))
1493 case CONST_INT:
1494 /* Unconditional branch. */
1495 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
1496 cond == const0_rtx ? NOT_TAKEN : TAKEN);
1497 break;
1499 case EQ:
1500 case UNEQ:
1501 /* Floating point comparisons appears to behave in a very
1502 unpredictable way because of special role of = tests in
1503 FP code. */
1504 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
1506 /* Comparisons with 0 are often used for booleans and there is
1507 nothing useful to predict about them. */
1508 else if (XEXP (cond, 1) == const0_rtx
1509 || XEXP (cond, 0) == const0_rtx)
1511 else
1512 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
1513 break;
1515 case NE:
1516 case LTGT:
1517 /* Floating point comparisons appears to behave in a very
1518 unpredictable way because of special role of = tests in
1519 FP code. */
1520 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
1522 /* Comparisons with 0 are often used for booleans and there is
1523 nothing useful to predict about them. */
1524 else if (XEXP (cond, 1) == const0_rtx
1525 || XEXP (cond, 0) == const0_rtx)
1527 else
1528 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
1529 break;
1531 case ORDERED:
1532 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
1533 break;
1535 case UNORDERED:
1536 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
1537 break;
1539 case LE:
1540 case LT:
1541 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
1542 || XEXP (cond, 1) == constm1_rtx)
1543 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
1544 break;
1546 case GE:
1547 case GT:
1548 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
1549 || XEXP (cond, 1) == constm1_rtx)
1550 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
1551 break;
1553 default:
1554 break;
1558 /* Set edge->probability for each successor edge of BB. */
1559 void
1560 guess_outgoing_edge_probabilities (basic_block bb)
1562 bb_estimate_probability_locally (bb);
1563 combine_predictions_for_insn (BB_END (bb), bb);
1566 static tree expr_expected_value (tree, bitmap);
1568 /* Helper function for expr_expected_value. */
1570 static tree
1571 expr_expected_value_1 (tree type, tree op0, enum tree_code code,
1572 tree op1, bitmap visited)
1574 gimple def;
1576 if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
1578 if (TREE_CONSTANT (op0))
1579 return op0;
1581 if (code != SSA_NAME)
1582 return NULL_TREE;
1584 def = SSA_NAME_DEF_STMT (op0);
1586 /* If we were already here, break the infinite cycle. */
1587 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (op0)))
1588 return NULL;
1590 if (gimple_code (def) == GIMPLE_PHI)
1592 /* All the arguments of the PHI node must have the same constant
1593 length. */
1594 int i, n = gimple_phi_num_args (def);
1595 tree val = NULL, new_val;
1597 for (i = 0; i < n; i++)
1599 tree arg = PHI_ARG_DEF (def, i);
1601 /* If this PHI has itself as an argument, we cannot
1602 determine the string length of this argument. However,
1603 if we can find an expected constant value for the other
1604 PHI args then we can still be sure that this is
1605 likely a constant. So be optimistic and just
1606 continue with the next argument. */
1607 if (arg == PHI_RESULT (def))
1608 continue;
1610 new_val = expr_expected_value (arg, visited);
1611 if (!new_val)
1612 return NULL;
1613 if (!val)
1614 val = new_val;
1615 else if (!operand_equal_p (val, new_val, false))
1616 return NULL;
1618 return val;
1620 if (is_gimple_assign (def))
1622 if (gimple_assign_lhs (def) != op0)
1623 return NULL;
1625 return expr_expected_value_1 (TREE_TYPE (gimple_assign_lhs (def)),
1626 gimple_assign_rhs1 (def),
1627 gimple_assign_rhs_code (def),
1628 gimple_assign_rhs2 (def),
1629 visited);
1632 if (is_gimple_call (def))
1634 tree decl = gimple_call_fndecl (def);
1635 if (!decl)
1636 return NULL;
1637 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
1638 switch (DECL_FUNCTION_CODE (decl))
1640 case BUILT_IN_EXPECT:
1642 tree val;
1643 if (gimple_call_num_args (def) != 2)
1644 return NULL;
1645 val = gimple_call_arg (def, 0);
1646 if (TREE_CONSTANT (val))
1647 return val;
1648 return gimple_call_arg (def, 1);
1651 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N:
1652 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
1653 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
1654 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
1655 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
1656 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
1657 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE:
1658 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N:
1659 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
1660 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
1661 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
1662 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
1663 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
1664 /* Assume that any given atomic operation has low contention,
1665 and thus the compare-and-swap operation succeeds. */
1666 return boolean_true_node;
1670 return NULL;
1673 if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
1675 tree res;
1676 op0 = expr_expected_value (op0, visited);
1677 if (!op0)
1678 return NULL;
1679 op1 = expr_expected_value (op1, visited);
1680 if (!op1)
1681 return NULL;
1682 res = fold_build2 (code, type, op0, op1);
1683 if (TREE_CONSTANT (res))
1684 return res;
1685 return NULL;
1687 if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
1689 tree res;
1690 op0 = expr_expected_value (op0, visited);
1691 if (!op0)
1692 return NULL;
1693 res = fold_build1 (code, type, op0);
1694 if (TREE_CONSTANT (res))
1695 return res;
1696 return NULL;
1698 return NULL;
1701 /* Return constant EXPR will likely have at execution time, NULL if unknown.
1702 The function is used by builtin_expect branch predictor so the evidence
1703 must come from this construct and additional possible constant folding.
1705 We may want to implement more involved value guess (such as value range
1706 propagation based prediction), but such tricks shall go to new
1707 implementation. */
1709 static tree
1710 expr_expected_value (tree expr, bitmap visited)
1712 enum tree_code code;
1713 tree op0, op1;
1715 if (TREE_CONSTANT (expr))
1716 return expr;
1718 extract_ops_from_tree (expr, &code, &op0, &op1);
1719 return expr_expected_value_1 (TREE_TYPE (expr),
1720 op0, code, op1, visited);
1724 /* Get rid of all builtin_expect calls and GIMPLE_PREDICT statements
1725 we no longer need. */
1726 static unsigned int
1727 strip_predict_hints (void)
1729 basic_block bb;
1730 gimple ass_stmt;
1731 tree var;
1733 FOR_EACH_BB (bb)
1735 gimple_stmt_iterator bi;
1736 for (bi = gsi_start_bb (bb); !gsi_end_p (bi);)
1738 gimple stmt = gsi_stmt (bi);
1740 if (gimple_code (stmt) == GIMPLE_PREDICT)
1742 gsi_remove (&bi, true);
1743 continue;
1745 else if (gimple_code (stmt) == GIMPLE_CALL)
1747 tree fndecl = gimple_call_fndecl (stmt);
1749 if (fndecl
1750 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1751 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
1752 && gimple_call_num_args (stmt) == 2)
1754 var = gimple_call_lhs (stmt);
1755 if (var)
1757 ass_stmt
1758 = gimple_build_assign (var, gimple_call_arg (stmt, 0));
1759 gsi_replace (&bi, ass_stmt, true);
1761 else
1763 gsi_remove (&bi, true);
1764 continue;
1768 gsi_next (&bi);
1771 return 0;
1774 /* Predict using opcode of the last statement in basic block. */
1775 static void
1776 tree_predict_by_opcode (basic_block bb)
1778 gimple stmt = last_stmt (bb);
1779 edge then_edge;
1780 tree op0, op1;
1781 tree type;
1782 tree val;
1783 enum tree_code cmp;
1784 bitmap visited;
1785 edge_iterator ei;
1787 if (!stmt || gimple_code (stmt) != GIMPLE_COND)
1788 return;
1789 FOR_EACH_EDGE (then_edge, ei, bb->succs)
1790 if (then_edge->flags & EDGE_TRUE_VALUE)
1791 break;
1792 op0 = gimple_cond_lhs (stmt);
1793 op1 = gimple_cond_rhs (stmt);
1794 cmp = gimple_cond_code (stmt);
1795 type = TREE_TYPE (op0);
1796 visited = BITMAP_ALLOC (NULL);
1797 val = expr_expected_value_1 (boolean_type_node, op0, cmp, op1, visited);
1798 BITMAP_FREE (visited);
1799 if (val)
1801 if (integer_zerop (val))
1802 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, NOT_TAKEN);
1803 else
1804 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, TAKEN);
1805 return;
1807 /* Try "pointer heuristic."
1808 A comparison ptr == 0 is predicted as false.
1809 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1810 if (POINTER_TYPE_P (type))
1812 if (cmp == EQ_EXPR)
1813 predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN);
1814 else if (cmp == NE_EXPR)
1815 predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN);
1817 else
1819 /* Try "opcode heuristic."
1820 EQ tests are usually false and NE tests are usually true. Also,
1821 most quantities are positive, so we can make the appropriate guesses
1822 about signed comparisons against zero. */
1823 switch (cmp)
1825 case EQ_EXPR:
1826 case UNEQ_EXPR:
1827 /* Floating point comparisons appears to behave in a very
1828 unpredictable way because of special role of = tests in
1829 FP code. */
1830 if (FLOAT_TYPE_P (type))
1832 /* Comparisons with 0 are often used for booleans and there is
1833 nothing useful to predict about them. */
1834 else if (integer_zerop (op0) || integer_zerop (op1))
1836 else
1837 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN);
1838 break;
1840 case NE_EXPR:
1841 case LTGT_EXPR:
1842 /* Floating point comparisons appears to behave in a very
1843 unpredictable way because of special role of = tests in
1844 FP code. */
1845 if (FLOAT_TYPE_P (type))
1847 /* Comparisons with 0 are often used for booleans and there is
1848 nothing useful to predict about them. */
1849 else if (integer_zerop (op0)
1850 || integer_zerop (op1))
1852 else
1853 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN);
1854 break;
1856 case ORDERED_EXPR:
1857 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN);
1858 break;
1860 case UNORDERED_EXPR:
1861 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN);
1862 break;
1864 case LE_EXPR:
1865 case LT_EXPR:
1866 if (integer_zerop (op1)
1867 || integer_onep (op1)
1868 || integer_all_onesp (op1)
1869 || real_zerop (op1)
1870 || real_onep (op1)
1871 || real_minus_onep (op1))
1872 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN);
1873 break;
1875 case GE_EXPR:
1876 case GT_EXPR:
1877 if (integer_zerop (op1)
1878 || integer_onep (op1)
1879 || integer_all_onesp (op1)
1880 || real_zerop (op1)
1881 || real_onep (op1)
1882 || real_minus_onep (op1))
1883 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN);
1884 break;
1886 default:
1887 break;
1891 /* Try to guess whether the value of return means error code. */
1893 static enum br_predictor
1894 return_prediction (tree val, enum prediction *prediction)
1896 /* VOID. */
1897 if (!val)
1898 return PRED_NO_PREDICTION;
1899 /* Different heuristics for pointers and scalars. */
1900 if (POINTER_TYPE_P (TREE_TYPE (val)))
1902 /* NULL is usually not returned. */
1903 if (integer_zerop (val))
1905 *prediction = NOT_TAKEN;
1906 return PRED_NULL_RETURN;
1909 else if (INTEGRAL_TYPE_P (TREE_TYPE (val)))
1911 /* Negative return values are often used to indicate
1912 errors. */
1913 if (TREE_CODE (val) == INTEGER_CST
1914 && tree_int_cst_sgn (val) < 0)
1916 *prediction = NOT_TAKEN;
1917 return PRED_NEGATIVE_RETURN;
1919 /* Constant return values seems to be commonly taken.
1920 Zero/one often represent booleans so exclude them from the
1921 heuristics. */
1922 if (TREE_CONSTANT (val)
1923 && (!integer_zerop (val) && !integer_onep (val)))
1925 *prediction = TAKEN;
1926 return PRED_CONST_RETURN;
1929 return PRED_NO_PREDICTION;
1932 /* Find the basic block with return expression and look up for possible
1933 return value trying to apply RETURN_PREDICTION heuristics. */
1934 static void
1935 apply_return_prediction (void)
1937 gimple return_stmt = NULL;
1938 tree return_val;
1939 edge e;
1940 gimple phi;
1941 int phi_num_args, i;
1942 enum br_predictor pred;
1943 enum prediction direction;
1944 edge_iterator ei;
1946 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1948 return_stmt = last_stmt (e->src);
1949 if (return_stmt
1950 && gimple_code (return_stmt) == GIMPLE_RETURN)
1951 break;
1953 if (!e)
1954 return;
1955 return_val = gimple_return_retval (return_stmt);
1956 if (!return_val)
1957 return;
1958 if (TREE_CODE (return_val) != SSA_NAME
1959 || !SSA_NAME_DEF_STMT (return_val)
1960 || gimple_code (SSA_NAME_DEF_STMT (return_val)) != GIMPLE_PHI)
1961 return;
1962 phi = SSA_NAME_DEF_STMT (return_val);
1963 phi_num_args = gimple_phi_num_args (phi);
1964 pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction);
1966 /* Avoid the degenerate case where all return values form the function
1967 belongs to same category (ie they are all positive constants)
1968 so we can hardly say something about them. */
1969 for (i = 1; i < phi_num_args; i++)
1970 if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction))
1971 break;
1972 if (i != phi_num_args)
1973 for (i = 0; i < phi_num_args; i++)
1975 pred = return_prediction (PHI_ARG_DEF (phi, i), &direction);
1976 if (pred != PRED_NO_PREDICTION)
1977 predict_paths_leading_to_edge (gimple_phi_arg_edge (phi, i), pred,
1978 direction);
1982 /* Look for basic block that contains unlikely to happen events
1983 (such as noreturn calls) and mark all paths leading to execution
1984 of this basic blocks as unlikely. */
1986 static void
1987 tree_bb_level_predictions (void)
1989 basic_block bb;
1990 bool has_return_edges = false;
1991 edge e;
1992 edge_iterator ei;
1994 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1995 if (!(e->flags & (EDGE_ABNORMAL | EDGE_FAKE | EDGE_EH)))
1997 has_return_edges = true;
1998 break;
2001 apply_return_prediction ();
2003 FOR_EACH_BB (bb)
2005 gimple_stmt_iterator gsi;
2007 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2009 gimple stmt = gsi_stmt (gsi);
2010 tree decl;
2012 if (is_gimple_call (stmt))
2014 if ((gimple_call_flags (stmt) & ECF_NORETURN)
2015 && has_return_edges)
2016 predict_paths_leading_to (bb, PRED_NORETURN,
2017 NOT_TAKEN);
2018 decl = gimple_call_fndecl (stmt);
2019 if (decl
2020 && lookup_attribute ("cold",
2021 DECL_ATTRIBUTES (decl)))
2022 predict_paths_leading_to (bb, PRED_COLD_FUNCTION,
2023 NOT_TAKEN);
2025 else if (gimple_code (stmt) == GIMPLE_PREDICT)
2027 predict_paths_leading_to (bb, gimple_predict_predictor (stmt),
2028 gimple_predict_outcome (stmt));
2029 /* Keep GIMPLE_PREDICT around so early inlining will propagate
2030 hints to callers. */
2036 #ifdef ENABLE_CHECKING
2038 /* Callback for pointer_map_traverse, asserts that the pointer map is
2039 empty. */
2041 static bool
2042 assert_is_empty (const void *key ATTRIBUTE_UNUSED, void **value,
2043 void *data ATTRIBUTE_UNUSED)
2045 gcc_assert (!*value);
2046 return false;
2048 #endif
2050 /* Predict branch probabilities and estimate profile for basic block BB. */
2052 static void
2053 tree_estimate_probability_bb (basic_block bb)
2055 edge e;
2056 edge_iterator ei;
2057 gimple last;
2059 FOR_EACH_EDGE (e, ei, bb->succs)
2061 /* Predict early returns to be probable, as we've already taken
2062 care for error returns and other cases are often used for
2063 fast paths through function.
2065 Since we've already removed the return statements, we are
2066 looking for CFG like:
2068 if (conditional)
2071 goto return_block
2073 some other blocks
2074 return_block:
2075 return_stmt. */
2076 if (e->dest != bb->next_bb
2077 && e->dest != EXIT_BLOCK_PTR
2078 && single_succ_p (e->dest)
2079 && single_succ_edge (e->dest)->dest == EXIT_BLOCK_PTR
2080 && (last = last_stmt (e->dest)) != NULL
2081 && gimple_code (last) == GIMPLE_RETURN)
2083 edge e1;
2084 edge_iterator ei1;
2086 if (single_succ_p (bb))
2088 FOR_EACH_EDGE (e1, ei1, bb->preds)
2089 if (!predicted_by_p (e1->src, PRED_NULL_RETURN)
2090 && !predicted_by_p (e1->src, PRED_CONST_RETURN)
2091 && !predicted_by_p (e1->src, PRED_NEGATIVE_RETURN))
2092 predict_edge_def (e1, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
2094 else
2095 if (!predicted_by_p (e->src, PRED_NULL_RETURN)
2096 && !predicted_by_p (e->src, PRED_CONST_RETURN)
2097 && !predicted_by_p (e->src, PRED_NEGATIVE_RETURN))
2098 predict_edge_def (e, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
2101 /* Look for block we are guarding (ie we dominate it,
2102 but it doesn't postdominate us). */
2103 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
2104 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
2105 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
2107 gimple_stmt_iterator bi;
2109 /* The call heuristic claims that a guarded function call
2110 is improbable. This is because such calls are often used
2111 to signal exceptional situations such as printing error
2112 messages. */
2113 for (bi = gsi_start_bb (e->dest); !gsi_end_p (bi);
2114 gsi_next (&bi))
2116 gimple stmt = gsi_stmt (bi);
2117 if (is_gimple_call (stmt)
2118 /* Constant and pure calls are hardly used to signalize
2119 something exceptional. */
2120 && gimple_has_side_effects (stmt))
2122 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
2123 break;
2128 tree_predict_by_opcode (bb);
2131 /* Predict branch probabilities and estimate profile of the tree CFG.
2132 This function can be called from the loop optimizers to recompute
2133 the profile information. */
2135 void
2136 tree_estimate_probability (void)
2138 basic_block bb;
2140 add_noreturn_fake_exit_edges ();
2141 connect_infinite_loops_to_exit ();
2142 /* We use loop_niter_by_eval, which requires that the loops have
2143 preheaders. */
2144 create_preheaders (CP_SIMPLE_PREHEADERS);
2145 calculate_dominance_info (CDI_POST_DOMINATORS);
2147 bb_predictions = pointer_map_create ();
2148 tree_bb_level_predictions ();
2149 record_loop_exits ();
2151 if (number_of_loops () > 1)
2152 predict_loops ();
2154 FOR_EACH_BB (bb)
2155 tree_estimate_probability_bb (bb);
2157 FOR_EACH_BB (bb)
2158 combine_predictions_for_bb (bb);
2160 #ifdef ENABLE_CHECKING
2161 pointer_map_traverse (bb_predictions, assert_is_empty, NULL);
2162 #endif
2163 pointer_map_destroy (bb_predictions);
2164 bb_predictions = NULL;
2166 estimate_bb_frequencies ();
2167 free_dominance_info (CDI_POST_DOMINATORS);
2168 remove_fake_exit_edges ();
2171 /* Predict branch probabilities and estimate profile of the tree CFG.
2172 This is the driver function for PASS_PROFILE. */
2174 static unsigned int
2175 tree_estimate_probability_driver (void)
2177 unsigned nb_loops;
2179 loop_optimizer_init (0);
2180 if (dump_file && (dump_flags & TDF_DETAILS))
2181 flow_loops_dump (dump_file, NULL, 0);
2183 mark_irreducible_loops ();
2185 nb_loops = number_of_loops ();
2186 if (nb_loops > 1)
2187 scev_initialize ();
2189 tree_estimate_probability ();
2191 if (nb_loops > 1)
2192 scev_finalize ();
2194 loop_optimizer_finalize ();
2195 if (dump_file && (dump_flags & TDF_DETAILS))
2196 gimple_dump_cfg (dump_file, dump_flags);
2197 if (profile_status == PROFILE_ABSENT)
2198 profile_status = PROFILE_GUESSED;
2199 return 0;
2202 /* Predict edges to successors of CUR whose sources are not postdominated by
2203 BB by PRED and recurse to all postdominators. */
2205 static void
2206 predict_paths_for_bb (basic_block cur, basic_block bb,
2207 enum br_predictor pred,
2208 enum prediction taken,
2209 bitmap visited)
2211 edge e;
2212 edge_iterator ei;
2213 basic_block son;
2215 /* We are looking for all edges forming edge cut induced by
2216 set of all blocks postdominated by BB. */
2217 FOR_EACH_EDGE (e, ei, cur->preds)
2218 if (e->src->index >= NUM_FIXED_BLOCKS
2219 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, bb))
2221 edge e2;
2222 edge_iterator ei2;
2223 bool found = false;
2225 /* Ignore fake edges and eh, we predict them as not taken anyway. */
2226 if (e->flags & (EDGE_EH | EDGE_FAKE))
2227 continue;
2228 gcc_assert (bb == cur || dominated_by_p (CDI_POST_DOMINATORS, cur, bb));
2230 /* See if there is an edge from e->src that is not abnormal
2231 and does not lead to BB. */
2232 FOR_EACH_EDGE (e2, ei2, e->src->succs)
2233 if (e2 != e
2234 && !(e2->flags & (EDGE_EH | EDGE_FAKE))
2235 && !dominated_by_p (CDI_POST_DOMINATORS, e2->dest, bb))
2237 found = true;
2238 break;
2241 /* If there is non-abnormal path leaving e->src, predict edge
2242 using predictor. Otherwise we need to look for paths
2243 leading to e->src.
2245 The second may lead to infinite loop in the case we are predicitng
2246 regions that are only reachable by abnormal edges. We simply
2247 prevent visiting given BB twice. */
2248 if (found)
2249 predict_edge_def (e, pred, taken);
2250 else if (bitmap_set_bit (visited, e->src->index))
2251 predict_paths_for_bb (e->src, e->src, pred, taken, visited);
2253 for (son = first_dom_son (CDI_POST_DOMINATORS, cur);
2254 son;
2255 son = next_dom_son (CDI_POST_DOMINATORS, son))
2256 predict_paths_for_bb (son, bb, pred, taken, visited);
2259 /* Sets branch probabilities according to PREDiction and
2260 FLAGS. */
2262 static void
2263 predict_paths_leading_to (basic_block bb, enum br_predictor pred,
2264 enum prediction taken)
2266 bitmap visited = BITMAP_ALLOC (NULL);
2267 predict_paths_for_bb (bb, bb, pred, taken, visited);
2268 BITMAP_FREE (visited);
2271 /* Like predict_paths_leading_to but take edge instead of basic block. */
2273 static void
2274 predict_paths_leading_to_edge (edge e, enum br_predictor pred,
2275 enum prediction taken)
2277 bool has_nonloop_edge = false;
2278 edge_iterator ei;
2279 edge e2;
2281 basic_block bb = e->src;
2282 FOR_EACH_EDGE (e2, ei, bb->succs)
2283 if (e2->dest != e->src && e2->dest != e->dest
2284 && !(e->flags & (EDGE_EH | EDGE_FAKE))
2285 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e2->dest))
2287 has_nonloop_edge = true;
2288 break;
2290 if (!has_nonloop_edge)
2292 bitmap visited = BITMAP_ALLOC (NULL);
2293 predict_paths_for_bb (bb, bb, pred, taken, visited);
2294 BITMAP_FREE (visited);
2296 else
2297 predict_edge_def (e, pred, taken);
2300 /* This is used to carry information about basic blocks. It is
2301 attached to the AUX field of the standard CFG block. */
2303 typedef struct block_info_def
2305 /* Estimated frequency of execution of basic_block. */
2306 sreal frequency;
2308 /* To keep queue of basic blocks to process. */
2309 basic_block next;
2311 /* Number of predecessors we need to visit first. */
2312 int npredecessors;
2313 } *block_info;
2315 /* Similar information for edges. */
2316 typedef struct edge_info_def
2318 /* In case edge is a loopback edge, the probability edge will be reached
2319 in case header is. Estimated number of iterations of the loop can be
2320 then computed as 1 / (1 - back_edge_prob). */
2321 sreal back_edge_prob;
2322 /* True if the edge is a loopback edge in the natural loop. */
2323 unsigned int back_edge:1;
2324 } *edge_info;
2326 #define BLOCK_INFO(B) ((block_info) (B)->aux)
2327 #define EDGE_INFO(E) ((edge_info) (E)->aux)
2329 /* Helper function for estimate_bb_frequencies.
2330 Propagate the frequencies in blocks marked in
2331 TOVISIT, starting in HEAD. */
2333 static void
2334 propagate_freq (basic_block head, bitmap tovisit)
2336 basic_block bb;
2337 basic_block last;
2338 unsigned i;
2339 edge e;
2340 basic_block nextbb;
2341 bitmap_iterator bi;
2343 /* For each basic block we need to visit count number of his predecessors
2344 we need to visit first. */
2345 EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi)
2347 edge_iterator ei;
2348 int count = 0;
2350 bb = BASIC_BLOCK (i);
2352 FOR_EACH_EDGE (e, ei, bb->preds)
2354 bool visit = bitmap_bit_p (tovisit, e->src->index);
2356 if (visit && !(e->flags & EDGE_DFS_BACK))
2357 count++;
2358 else if (visit && dump_file && !EDGE_INFO (e)->back_edge)
2359 fprintf (dump_file,
2360 "Irreducible region hit, ignoring edge to %i->%i\n",
2361 e->src->index, bb->index);
2363 BLOCK_INFO (bb)->npredecessors = count;
2364 /* When function never returns, we will never process exit block. */
2365 if (!count && bb == EXIT_BLOCK_PTR)
2366 bb->count = bb->frequency = 0;
2369 memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
2370 last = head;
2371 for (bb = head; bb; bb = nextbb)
2373 edge_iterator ei;
2374 sreal cyclic_probability, frequency;
2376 memcpy (&cyclic_probability, &real_zero, sizeof (real_zero));
2377 memcpy (&frequency, &real_zero, sizeof (real_zero));
2379 nextbb = BLOCK_INFO (bb)->next;
2380 BLOCK_INFO (bb)->next = NULL;
2382 /* Compute frequency of basic block. */
2383 if (bb != head)
2385 #ifdef ENABLE_CHECKING
2386 FOR_EACH_EDGE (e, ei, bb->preds)
2387 gcc_assert (!bitmap_bit_p (tovisit, e->src->index)
2388 || (e->flags & EDGE_DFS_BACK));
2389 #endif
2391 FOR_EACH_EDGE (e, ei, bb->preds)
2392 if (EDGE_INFO (e)->back_edge)
2394 sreal_add (&cyclic_probability, &cyclic_probability,
2395 &EDGE_INFO (e)->back_edge_prob);
2397 else if (!(e->flags & EDGE_DFS_BACK))
2399 sreal tmp;
2401 /* frequency += (e->probability
2402 * BLOCK_INFO (e->src)->frequency /
2403 REG_BR_PROB_BASE); */
2405 sreal_init (&tmp, e->probability, 0);
2406 sreal_mul (&tmp, &tmp, &BLOCK_INFO (e->src)->frequency);
2407 sreal_mul (&tmp, &tmp, &real_inv_br_prob_base);
2408 sreal_add (&frequency, &frequency, &tmp);
2411 if (sreal_compare (&cyclic_probability, &real_zero) == 0)
2413 memcpy (&BLOCK_INFO (bb)->frequency, &frequency,
2414 sizeof (frequency));
2416 else
2418 if (sreal_compare (&cyclic_probability, &real_almost_one) > 0)
2420 memcpy (&cyclic_probability, &real_almost_one,
2421 sizeof (real_almost_one));
2424 /* BLOCK_INFO (bb)->frequency = frequency
2425 / (1 - cyclic_probability) */
2427 sreal_sub (&cyclic_probability, &real_one, &cyclic_probability);
2428 sreal_div (&BLOCK_INFO (bb)->frequency,
2429 &frequency, &cyclic_probability);
2433 bitmap_clear_bit (tovisit, bb->index);
2435 e = find_edge (bb, head);
2436 if (e)
2438 sreal tmp;
2440 /* EDGE_INFO (e)->back_edge_prob
2441 = ((e->probability * BLOCK_INFO (bb)->frequency)
2442 / REG_BR_PROB_BASE); */
2444 sreal_init (&tmp, e->probability, 0);
2445 sreal_mul (&tmp, &tmp, &BLOCK_INFO (bb)->frequency);
2446 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
2447 &tmp, &real_inv_br_prob_base);
2450 /* Propagate to successor blocks. */
2451 FOR_EACH_EDGE (e, ei, bb->succs)
2452 if (!(e->flags & EDGE_DFS_BACK)
2453 && BLOCK_INFO (e->dest)->npredecessors)
2455 BLOCK_INFO (e->dest)->npredecessors--;
2456 if (!BLOCK_INFO (e->dest)->npredecessors)
2458 if (!nextbb)
2459 nextbb = e->dest;
2460 else
2461 BLOCK_INFO (last)->next = e->dest;
2463 last = e->dest;
2469 /* Estimate probabilities of loopback edges in loops at same nest level. */
2471 static void
2472 estimate_loops_at_level (struct loop *first_loop)
2474 struct loop *loop;
2476 for (loop = first_loop; loop; loop = loop->next)
2478 edge e;
2479 basic_block *bbs;
2480 unsigned i;
2481 bitmap tovisit = BITMAP_ALLOC (NULL);
2483 estimate_loops_at_level (loop->inner);
2485 /* Find current loop back edge and mark it. */
2486 e = loop_latch_edge (loop);
2487 EDGE_INFO (e)->back_edge = 1;
2489 bbs = get_loop_body (loop);
2490 for (i = 0; i < loop->num_nodes; i++)
2491 bitmap_set_bit (tovisit, bbs[i]->index);
2492 free (bbs);
2493 propagate_freq (loop->header, tovisit);
2494 BITMAP_FREE (tovisit);
2498 /* Propagates frequencies through structure of loops. */
2500 static void
2501 estimate_loops (void)
2503 bitmap tovisit = BITMAP_ALLOC (NULL);
2504 basic_block bb;
2506 /* Start by estimating the frequencies in the loops. */
2507 if (number_of_loops () > 1)
2508 estimate_loops_at_level (current_loops->tree_root->inner);
2510 /* Now propagate the frequencies through all the blocks. */
2511 FOR_ALL_BB (bb)
2513 bitmap_set_bit (tovisit, bb->index);
2515 propagate_freq (ENTRY_BLOCK_PTR, tovisit);
2516 BITMAP_FREE (tovisit);
2519 /* Convert counts measured by profile driven feedback to frequencies.
2520 Return nonzero iff there was any nonzero execution count. */
2523 counts_to_freqs (void)
2525 gcov_type count_max, true_count_max = 0;
2526 basic_block bb;
2528 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2529 true_count_max = MAX (bb->count, true_count_max);
2531 count_max = MAX (true_count_max, 1);
2532 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2533 bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
2535 return true_count_max;
2538 /* Return true if function is likely to be expensive, so there is no point to
2539 optimize performance of prologue, epilogue or do inlining at the expense
2540 of code size growth. THRESHOLD is the limit of number of instructions
2541 function can execute at average to be still considered not expensive. */
2543 bool
2544 expensive_function_p (int threshold)
2546 unsigned int sum = 0;
2547 basic_block bb;
2548 unsigned int limit;
2550 /* We can not compute accurately for large thresholds due to scaled
2551 frequencies. */
2552 gcc_assert (threshold <= BB_FREQ_MAX);
2554 /* Frequencies are out of range. This either means that function contains
2555 internal loop executing more than BB_FREQ_MAX times or profile feedback
2556 is available and function has not been executed at all. */
2557 if (ENTRY_BLOCK_PTR->frequency == 0)
2558 return true;
2560 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
2561 limit = ENTRY_BLOCK_PTR->frequency * threshold;
2562 FOR_EACH_BB (bb)
2564 rtx insn;
2566 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
2567 insn = NEXT_INSN (insn))
2568 if (active_insn_p (insn))
2570 sum += bb->frequency;
2571 if (sum > limit)
2572 return true;
2576 return false;
2579 /* Estimate basic blocks frequency by given branch probabilities. */
2581 void
2582 estimate_bb_frequencies (void)
2584 basic_block bb;
2585 sreal freq_max;
2587 if (profile_status != PROFILE_READ || !counts_to_freqs ())
2589 static int real_values_initialized = 0;
2591 if (!real_values_initialized)
2593 real_values_initialized = 1;
2594 sreal_init (&real_zero, 0, 0);
2595 sreal_init (&real_one, 1, 0);
2596 sreal_init (&real_br_prob_base, REG_BR_PROB_BASE, 0);
2597 sreal_init (&real_bb_freq_max, BB_FREQ_MAX, 0);
2598 sreal_init (&real_one_half, 1, -1);
2599 sreal_div (&real_inv_br_prob_base, &real_one, &real_br_prob_base);
2600 sreal_sub (&real_almost_one, &real_one, &real_inv_br_prob_base);
2603 mark_dfs_back_edges ();
2605 single_succ_edge (ENTRY_BLOCK_PTR)->probability = REG_BR_PROB_BASE;
2607 /* Set up block info for each basic block. */
2608 alloc_aux_for_blocks (sizeof (struct block_info_def));
2609 alloc_aux_for_edges (sizeof (struct edge_info_def));
2610 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2612 edge e;
2613 edge_iterator ei;
2615 FOR_EACH_EDGE (e, ei, bb->succs)
2617 sreal_init (&EDGE_INFO (e)->back_edge_prob, e->probability, 0);
2618 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
2619 &EDGE_INFO (e)->back_edge_prob,
2620 &real_inv_br_prob_base);
2624 /* First compute probabilities locally for each loop from innermost
2625 to outermost to examine probabilities for back edges. */
2626 estimate_loops ();
2628 memcpy (&freq_max, &real_zero, sizeof (real_zero));
2629 FOR_EACH_BB (bb)
2630 if (sreal_compare (&freq_max, &BLOCK_INFO (bb)->frequency) < 0)
2631 memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
2633 sreal_div (&freq_max, &real_bb_freq_max, &freq_max);
2634 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2636 sreal tmp;
2638 sreal_mul (&tmp, &BLOCK_INFO (bb)->frequency, &freq_max);
2639 sreal_add (&tmp, &tmp, &real_one_half);
2640 bb->frequency = sreal_to_int (&tmp);
2643 free_aux_for_blocks ();
2644 free_aux_for_edges ();
2646 compute_function_frequency ();
2649 /* Decide whether function is hot, cold or unlikely executed. */
2650 void
2651 compute_function_frequency (void)
2653 basic_block bb;
2654 struct cgraph_node *node = cgraph_get_node (current_function_decl);
2655 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
2656 || MAIN_NAME_P (DECL_NAME (current_function_decl)))
2657 node->only_called_at_startup = true;
2658 if (DECL_STATIC_DESTRUCTOR (current_function_decl))
2659 node->only_called_at_exit = true;
2661 if (!profile_info || !flag_branch_probabilities)
2663 int flags = flags_from_decl_or_type (current_function_decl);
2664 if (lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl))
2665 != NULL)
2666 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
2667 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (current_function_decl))
2668 != NULL)
2669 node->frequency = NODE_FREQUENCY_HOT;
2670 else if (flags & ECF_NORETURN)
2671 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2672 else if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
2673 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2674 else if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
2675 || DECL_STATIC_DESTRUCTOR (current_function_decl))
2676 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2677 return;
2679 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
2680 FOR_EACH_BB (bb)
2682 if (maybe_hot_bb_p (bb))
2684 node->frequency = NODE_FREQUENCY_HOT;
2685 return;
2687 if (!probably_never_executed_bb_p (bb))
2688 node->frequency = NODE_FREQUENCY_NORMAL;
2692 static bool
2693 gate_estimate_probability (void)
2695 return flag_guess_branch_prob;
2698 /* Build PREDICT_EXPR. */
2699 tree
2700 build_predict_expr (enum br_predictor predictor, enum prediction taken)
2702 tree t = build1 (PREDICT_EXPR, void_type_node,
2703 build_int_cst (integer_type_node, predictor));
2704 SET_PREDICT_EXPR_OUTCOME (t, taken);
2705 return t;
2708 const char *
2709 predictor_name (enum br_predictor predictor)
2711 return predictor_info[predictor].name;
2714 struct gimple_opt_pass pass_profile =
2717 GIMPLE_PASS,
2718 "profile_estimate", /* name */
2719 gate_estimate_probability, /* gate */
2720 tree_estimate_probability_driver, /* execute */
2721 NULL, /* sub */
2722 NULL, /* next */
2723 0, /* static_pass_number */
2724 TV_BRANCH_PROB, /* tv_id */
2725 PROP_cfg, /* properties_required */
2726 0, /* properties_provided */
2727 0, /* properties_destroyed */
2728 0, /* todo_flags_start */
2729 TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
2733 struct gimple_opt_pass pass_strip_predict_hints =
2736 GIMPLE_PASS,
2737 "*strip_predict_hints", /* name */
2738 NULL, /* gate */
2739 strip_predict_hints, /* execute */
2740 NULL, /* sub */
2741 NULL, /* next */
2742 0, /* static_pass_number */
2743 TV_BRANCH_PROB, /* tv_id */
2744 PROP_cfg, /* properties_required */
2745 0, /* properties_provided */
2746 0, /* properties_destroyed */
2747 0, /* todo_flags_start */
2748 TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
2752 /* Rebuild function frequencies. Passes are in general expected to
2753 maintain profile by hand, however in some cases this is not possible:
2754 for example when inlining several functions with loops freuqencies might run
2755 out of scale and thus needs to be recomputed. */
2757 void
2758 rebuild_frequencies (void)
2760 timevar_push (TV_REBUILD_FREQUENCIES);
2761 if (profile_status == PROFILE_GUESSED)
2763 loop_optimizer_init (0);
2764 add_noreturn_fake_exit_edges ();
2765 mark_irreducible_loops ();
2766 connect_infinite_loops_to_exit ();
2767 estimate_bb_frequencies ();
2768 remove_fake_exit_edges ();
2769 loop_optimizer_finalize ();
2771 else if (profile_status == PROFILE_READ)
2772 counts_to_freqs ();
2773 else
2774 gcc_unreachable ();
2775 timevar_pop (TV_REBUILD_FREQUENCIES);