PR c++/18747
[official-gcc.git] / gcc / predict.c
blobf0db9f40330c46e7251c8138c89410f472e252f5
1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* References:
23 [1] "Branch Prediction for Free"
24 Ball and Larus; PLDI '93.
25 [2] "Static Branch Frequency and Program Profile Analysis"
26 Wu and Larus; MICRO-27.
27 [3] "Corpus-based Static Branch Prediction"
28 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
31 #include "config.h"
32 #include "system.h"
33 #include "coretypes.h"
34 #include "tm.h"
35 #include "tree.h"
36 #include "rtl.h"
37 #include "tm_p.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "insn-config.h"
41 #include "regs.h"
42 #include "flags.h"
43 #include "function.h"
44 #include "except.h"
45 #include "diagnostic-core.h"
46 #include "recog.h"
47 #include "expr.h"
48 #include "predict.h"
49 #include "coverage.h"
50 #include "sreal.h"
51 #include "params.h"
52 #include "target.h"
53 #include "cfgloop.h"
54 #include "tree-flow.h"
55 #include "ggc.h"
56 #include "tree-pass.h"
57 #include "tree-scalar-evolution.h"
58 #include "cfgloop.h"
59 #include "pointer-set.h"
61 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
62 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
63 static sreal real_zero, real_one, real_almost_one, real_br_prob_base,
64 real_inv_br_prob_base, real_one_half, real_bb_freq_max;
66 /* Random guesstimation given names.
67 PROV_VERY_UNLIKELY should be small enough so basic block predicted
68 by it gets bellow HOT_BB_FREQUENCY_FRANCTION. */
69 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 2000 - 1)
70 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
71 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
72 #define PROB_ALWAYS (REG_BR_PROB_BASE)
74 static void combine_predictions_for_insn (rtx, basic_block);
75 static void dump_prediction (FILE *, enum br_predictor, int, basic_block, int);
76 static void predict_paths_leading_to (basic_block, enum br_predictor, enum prediction);
77 static void predict_paths_leading_to_edge (edge, enum br_predictor, enum prediction);
78 static bool can_predict_insn_p (const_rtx);
80 /* Information we hold about each branch predictor.
81 Filled using information from predict.def. */
83 struct predictor_info
85 const char *const name; /* Name used in the debugging dumps. */
86 const int hitrate; /* Expected hitrate used by
87 predict_insn_def call. */
88 const int flags;
91 /* Use given predictor without Dempster-Shaffer theory if it matches
92 using first_match heuristics. */
93 #define PRED_FLAG_FIRST_MATCH 1
95 /* Recompute hitrate in percent to our representation. */
97 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
99 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
100 static const struct predictor_info predictor_info[]= {
101 #include "predict.def"
103 /* Upper bound on predictors. */
104 {NULL, 0, 0}
106 #undef DEF_PREDICTOR
108 /* Return TRUE if frequency FREQ is considered to be hot. */
110 static inline bool
111 maybe_hot_frequency_p (struct function *fun, int freq)
113 struct cgraph_node *node = cgraph_get_node (fun->decl);
114 if (!profile_info || !flag_branch_probabilities)
116 if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
117 return false;
118 if (node->frequency == NODE_FREQUENCY_HOT)
119 return true;
121 if (profile_status_for_function (fun) == PROFILE_ABSENT)
122 return true;
123 if (node->frequency == NODE_FREQUENCY_EXECUTED_ONCE
124 && freq < (ENTRY_BLOCK_PTR_FOR_FUNCTION (fun)->frequency * 2 / 3))
125 return false;
126 if (freq < (ENTRY_BLOCK_PTR_FOR_FUNCTION (fun)->frequency
127 / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION)))
128 return false;
129 return true;
132 /* Return TRUE if frequency FREQ is considered to be hot. */
134 static inline bool
135 maybe_hot_count_p (struct function *fun, gcov_type count)
137 if (profile_status_for_function (fun) != PROFILE_READ)
138 return true;
139 /* Code executed at most once is not hot. */
140 if (profile_info->runs >= count)
141 return false;
142 return (count
143 > profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION));
146 /* Return true in case BB can be CPU intensive and should be optimized
147 for maximal performance. */
149 bool
150 maybe_hot_bb_p (struct function *fun, const_basic_block bb)
152 gcc_checking_assert (fun);
153 if (profile_status_for_function (fun) == PROFILE_READ)
154 return maybe_hot_count_p (fun, bb->count);
155 return maybe_hot_frequency_p (fun, bb->frequency);
158 /* Return true if the call can be hot. */
160 bool
161 cgraph_maybe_hot_edge_p (struct cgraph_edge *edge)
163 if (profile_info && flag_branch_probabilities
164 && (edge->count
165 <= profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
166 return false;
167 if (edge->caller->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED
168 || (edge->callee
169 && edge->callee->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED))
170 return false;
171 if (edge->caller->frequency > NODE_FREQUENCY_UNLIKELY_EXECUTED
172 && (edge->callee
173 && edge->callee->frequency <= NODE_FREQUENCY_EXECUTED_ONCE))
174 return false;
175 if (optimize_size)
176 return false;
177 if (edge->caller->frequency == NODE_FREQUENCY_HOT)
178 return true;
179 if (edge->caller->frequency == NODE_FREQUENCY_EXECUTED_ONCE
180 && edge->frequency < CGRAPH_FREQ_BASE * 3 / 2)
181 return false;
182 if (flag_guess_branch_prob
183 && edge->frequency <= (CGRAPH_FREQ_BASE
184 / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION)))
185 return false;
186 return true;
189 /* Return true in case BB can be CPU intensive and should be optimized
190 for maximal performance. */
192 bool
193 maybe_hot_edge_p (edge e)
195 if (profile_status == PROFILE_READ)
196 return maybe_hot_count_p (cfun, e->count);
197 return maybe_hot_frequency_p (cfun, EDGE_FREQUENCY (e));
201 /* Return true in case BB is probably never executed. */
203 bool
204 probably_never_executed_bb_p (struct function *fun, const_basic_block bb)
206 gcc_checking_assert (fun);
207 if (profile_info && flag_branch_probabilities)
208 return ((bb->count + profile_info->runs / 2) / profile_info->runs) == 0;
209 if ((!profile_info || !flag_branch_probabilities)
210 && (cgraph_get_node (fun->decl)->frequency
211 == NODE_FREQUENCY_UNLIKELY_EXECUTED))
212 return true;
213 return false;
216 /* Return true if NODE should be optimized for size. */
218 bool
219 cgraph_optimize_for_size_p (struct cgraph_node *node)
221 if (optimize_size)
222 return true;
223 if (node && (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED))
224 return true;
225 else
226 return false;
229 /* Return true when current function should always be optimized for size. */
231 bool
232 optimize_function_for_size_p (struct function *fun)
234 if (optimize_size)
235 return true;
236 if (!fun || !fun->decl)
237 return false;
238 return cgraph_optimize_for_size_p (cgraph_get_node (fun->decl));
241 /* Return true when current function should always be optimized for speed. */
243 bool
244 optimize_function_for_speed_p (struct function *fun)
246 return !optimize_function_for_size_p (fun);
249 /* Return TRUE when BB should be optimized for size. */
251 bool
252 optimize_bb_for_size_p (const_basic_block bb)
254 return optimize_function_for_size_p (cfun) || !maybe_hot_bb_p (cfun, bb);
257 /* Return TRUE when BB should be optimized for speed. */
259 bool
260 optimize_bb_for_speed_p (const_basic_block bb)
262 return !optimize_bb_for_size_p (bb);
265 /* Return TRUE when BB should be optimized for size. */
267 bool
268 optimize_edge_for_size_p (edge e)
270 return optimize_function_for_size_p (cfun) || !maybe_hot_edge_p (e);
273 /* Return TRUE when BB should be optimized for speed. */
275 bool
276 optimize_edge_for_speed_p (edge e)
278 return !optimize_edge_for_size_p (e);
281 /* Return TRUE when BB should be optimized for size. */
283 bool
284 optimize_insn_for_size_p (void)
286 return optimize_function_for_size_p (cfun) || !crtl->maybe_hot_insn_p;
289 /* Return TRUE when BB should be optimized for speed. */
291 bool
292 optimize_insn_for_speed_p (void)
294 return !optimize_insn_for_size_p ();
297 /* Return TRUE when LOOP should be optimized for size. */
299 bool
300 optimize_loop_for_size_p (struct loop *loop)
302 return optimize_bb_for_size_p (loop->header);
305 /* Return TRUE when LOOP should be optimized for speed. */
307 bool
308 optimize_loop_for_speed_p (struct loop *loop)
310 return optimize_bb_for_speed_p (loop->header);
313 /* Return TRUE when LOOP nest should be optimized for speed. */
315 bool
316 optimize_loop_nest_for_speed_p (struct loop *loop)
318 struct loop *l = loop;
319 if (optimize_loop_for_speed_p (loop))
320 return true;
321 l = loop->inner;
322 while (l && l != loop)
324 if (optimize_loop_for_speed_p (l))
325 return true;
326 if (l->inner)
327 l = l->inner;
328 else if (l->next)
329 l = l->next;
330 else
332 while (l != loop && !l->next)
333 l = loop_outer (l);
334 if (l != loop)
335 l = l->next;
338 return false;
341 /* Return TRUE when LOOP nest should be optimized for size. */
343 bool
344 optimize_loop_nest_for_size_p (struct loop *loop)
346 return !optimize_loop_nest_for_speed_p (loop);
349 /* Return true when edge E is likely to be well predictable by branch
350 predictor. */
352 bool
353 predictable_edge_p (edge e)
355 if (profile_status == PROFILE_ABSENT)
356 return false;
357 if ((e->probability
358 <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME) * REG_BR_PROB_BASE / 100)
359 || (REG_BR_PROB_BASE - e->probability
360 <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME) * REG_BR_PROB_BASE / 100))
361 return true;
362 return false;
366 /* Set RTL expansion for BB profile. */
368 void
369 rtl_profile_for_bb (basic_block bb)
371 crtl->maybe_hot_insn_p = maybe_hot_bb_p (cfun, bb);
374 /* Set RTL expansion for edge profile. */
376 void
377 rtl_profile_for_edge (edge e)
379 crtl->maybe_hot_insn_p = maybe_hot_edge_p (e);
382 /* Set RTL expansion to default mode (i.e. when profile info is not known). */
383 void
384 default_rtl_profile (void)
386 crtl->maybe_hot_insn_p = true;
389 /* Return true if the one of outgoing edges is already predicted by
390 PREDICTOR. */
392 bool
393 rtl_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
395 rtx note;
396 if (!INSN_P (BB_END (bb)))
397 return false;
398 for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
399 if (REG_NOTE_KIND (note) == REG_BR_PRED
400 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
401 return true;
402 return false;
405 /* This map contains for a basic block the list of predictions for the
406 outgoing edges. */
408 static struct pointer_map_t *bb_predictions;
410 /* Structure representing predictions in tree level. */
412 struct edge_prediction {
413 struct edge_prediction *ep_next;
414 edge ep_edge;
415 enum br_predictor ep_predictor;
416 int ep_probability;
419 /* Return true if the one of outgoing edges is already predicted by
420 PREDICTOR. */
422 bool
423 gimple_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
425 struct edge_prediction *i;
426 void **preds = pointer_map_contains (bb_predictions, bb);
428 if (!preds)
429 return false;
431 for (i = (struct edge_prediction *) *preds; i; i = i->ep_next)
432 if (i->ep_predictor == predictor)
433 return true;
434 return false;
437 /* Return true when the probability of edge is reliable.
439 The profile guessing code is good at predicting branch outcome (ie.
440 taken/not taken), that is predicted right slightly over 75% of time.
441 It is however notoriously poor on predicting the probability itself.
442 In general the profile appear a lot flatter (with probabilities closer
443 to 50%) than the reality so it is bad idea to use it to drive optimization
444 such as those disabling dynamic branch prediction for well predictable
445 branches.
447 There are two exceptions - edges leading to noreturn edges and edges
448 predicted by number of iterations heuristics are predicted well. This macro
449 should be able to distinguish those, but at the moment it simply check for
450 noreturn heuristic that is only one giving probability over 99% or bellow
451 1%. In future we might want to propagate reliability information across the
452 CFG if we find this information useful on multiple places. */
453 static bool
454 probability_reliable_p (int prob)
456 return (profile_status == PROFILE_READ
457 || (profile_status == PROFILE_GUESSED
458 && (prob <= HITRATE (1) || prob >= HITRATE (99))));
461 /* Same predicate as above, working on edges. */
462 bool
463 edge_probability_reliable_p (const_edge e)
465 return probability_reliable_p (e->probability);
468 /* Same predicate as edge_probability_reliable_p, working on notes. */
469 bool
470 br_prob_note_reliable_p (const_rtx note)
472 gcc_assert (REG_NOTE_KIND (note) == REG_BR_PROB);
473 return probability_reliable_p (INTVAL (XEXP (note, 0)));
476 static void
477 predict_insn (rtx insn, enum br_predictor predictor, int probability)
479 gcc_assert (any_condjump_p (insn));
480 if (!flag_guess_branch_prob)
481 return;
483 add_reg_note (insn, REG_BR_PRED,
484 gen_rtx_CONCAT (VOIDmode,
485 GEN_INT ((int) predictor),
486 GEN_INT ((int) probability)));
489 /* Predict insn by given predictor. */
491 void
492 predict_insn_def (rtx insn, enum br_predictor predictor,
493 enum prediction taken)
495 int probability = predictor_info[(int) predictor].hitrate;
497 if (taken != TAKEN)
498 probability = REG_BR_PROB_BASE - probability;
500 predict_insn (insn, predictor, probability);
503 /* Predict edge E with given probability if possible. */
505 void
506 rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
508 rtx last_insn;
509 last_insn = BB_END (e->src);
511 /* We can store the branch prediction information only about
512 conditional jumps. */
513 if (!any_condjump_p (last_insn))
514 return;
516 /* We always store probability of branching. */
517 if (e->flags & EDGE_FALLTHRU)
518 probability = REG_BR_PROB_BASE - probability;
520 predict_insn (last_insn, predictor, probability);
523 /* Predict edge E with the given PROBABILITY. */
524 void
525 gimple_predict_edge (edge e, enum br_predictor predictor, int probability)
527 gcc_assert (profile_status != PROFILE_GUESSED);
528 if ((e->src != ENTRY_BLOCK_PTR && EDGE_COUNT (e->src->succs) > 1)
529 && flag_guess_branch_prob && optimize)
531 struct edge_prediction *i = XNEW (struct edge_prediction);
532 void **preds = pointer_map_insert (bb_predictions, e->src);
534 i->ep_next = (struct edge_prediction *) *preds;
535 *preds = i;
536 i->ep_probability = probability;
537 i->ep_predictor = predictor;
538 i->ep_edge = e;
542 /* Remove all predictions on given basic block that are attached
543 to edge E. */
544 void
545 remove_predictions_associated_with_edge (edge e)
547 void **preds;
549 if (!bb_predictions)
550 return;
552 preds = pointer_map_contains (bb_predictions, e->src);
554 if (preds)
556 struct edge_prediction **prediction = (struct edge_prediction **) preds;
557 struct edge_prediction *next;
559 while (*prediction)
561 if ((*prediction)->ep_edge == e)
563 next = (*prediction)->ep_next;
564 free (*prediction);
565 *prediction = next;
567 else
568 prediction = &((*prediction)->ep_next);
573 /* Clears the list of predictions stored for BB. */
575 static void
576 clear_bb_predictions (basic_block bb)
578 void **preds = pointer_map_contains (bb_predictions, bb);
579 struct edge_prediction *pred, *next;
581 if (!preds)
582 return;
584 for (pred = (struct edge_prediction *) *preds; pred; pred = next)
586 next = pred->ep_next;
587 free (pred);
589 *preds = NULL;
592 /* Return true when we can store prediction on insn INSN.
593 At the moment we represent predictions only on conditional
594 jumps, not at computed jump or other complicated cases. */
595 static bool
596 can_predict_insn_p (const_rtx insn)
598 return (JUMP_P (insn)
599 && any_condjump_p (insn)
600 && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs) >= 2);
603 /* Predict edge E by given predictor if possible. */
605 void
606 predict_edge_def (edge e, enum br_predictor predictor,
607 enum prediction taken)
609 int probability = predictor_info[(int) predictor].hitrate;
611 if (taken != TAKEN)
612 probability = REG_BR_PROB_BASE - probability;
614 predict_edge (e, predictor, probability);
617 /* Invert all branch predictions or probability notes in the INSN. This needs
618 to be done each time we invert the condition used by the jump. */
620 void
621 invert_br_probabilities (rtx insn)
623 rtx note;
625 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
626 if (REG_NOTE_KIND (note) == REG_BR_PROB)
627 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
628 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
629 XEXP (XEXP (note, 0), 1)
630 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
633 /* Dump information about the branch prediction to the output file. */
635 static void
636 dump_prediction (FILE *file, enum br_predictor predictor, int probability,
637 basic_block bb, int used)
639 edge e;
640 edge_iterator ei;
642 if (!file)
643 return;
645 FOR_EACH_EDGE (e, ei, bb->succs)
646 if (! (e->flags & EDGE_FALLTHRU))
647 break;
649 fprintf (file, " %s heuristics%s: %.1f%%",
650 predictor_info[predictor].name,
651 used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
653 if (bb->count)
655 fprintf (file, " exec ");
656 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
657 if (e)
659 fprintf (file, " hit ");
660 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, e->count);
661 fprintf (file, " (%.1f%%)", e->count * 100.0 / bb->count);
665 fprintf (file, "\n");
668 /* We can not predict the probabilities of outgoing edges of bb. Set them
669 evenly and hope for the best. */
670 static void
671 set_even_probabilities (basic_block bb)
673 int nedges = 0;
674 edge e;
675 edge_iterator ei;
677 FOR_EACH_EDGE (e, ei, bb->succs)
678 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
679 nedges ++;
680 FOR_EACH_EDGE (e, ei, bb->succs)
681 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
682 e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
683 else
684 e->probability = 0;
687 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
688 note if not already present. Remove now useless REG_BR_PRED notes. */
690 static void
691 combine_predictions_for_insn (rtx insn, basic_block bb)
693 rtx prob_note;
694 rtx *pnote;
695 rtx note;
696 int best_probability = PROB_EVEN;
697 enum br_predictor best_predictor = END_PREDICTORS;
698 int combined_probability = REG_BR_PROB_BASE / 2;
699 int d;
700 bool first_match = false;
701 bool found = false;
703 if (!can_predict_insn_p (insn))
705 set_even_probabilities (bb);
706 return;
709 prob_note = find_reg_note (insn, REG_BR_PROB, 0);
710 pnote = &REG_NOTES (insn);
711 if (dump_file)
712 fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
713 bb->index);
715 /* We implement "first match" heuristics and use probability guessed
716 by predictor with smallest index. */
717 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
718 if (REG_NOTE_KIND (note) == REG_BR_PRED)
720 enum br_predictor predictor = ((enum br_predictor)
721 INTVAL (XEXP (XEXP (note, 0), 0)));
722 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
724 found = true;
725 if (best_predictor > predictor)
726 best_probability = probability, best_predictor = predictor;
728 d = (combined_probability * probability
729 + (REG_BR_PROB_BASE - combined_probability)
730 * (REG_BR_PROB_BASE - probability));
732 /* Use FP math to avoid overflows of 32bit integers. */
733 if (d == 0)
734 /* If one probability is 0% and one 100%, avoid division by zero. */
735 combined_probability = REG_BR_PROB_BASE / 2;
736 else
737 combined_probability = (((double) combined_probability) * probability
738 * REG_BR_PROB_BASE / d + 0.5);
741 /* Decide which heuristic to use. In case we didn't match anything,
742 use no_prediction heuristic, in case we did match, use either
743 first match or Dempster-Shaffer theory depending on the flags. */
745 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
746 first_match = true;
748 if (!found)
749 dump_prediction (dump_file, PRED_NO_PREDICTION,
750 combined_probability, bb, true);
751 else
753 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability,
754 bb, !first_match);
755 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability,
756 bb, first_match);
759 if (first_match)
760 combined_probability = best_probability;
761 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
763 while (*pnote)
765 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
767 enum br_predictor predictor = ((enum br_predictor)
768 INTVAL (XEXP (XEXP (*pnote, 0), 0)));
769 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
771 dump_prediction (dump_file, predictor, probability, bb,
772 !first_match || best_predictor == predictor);
773 *pnote = XEXP (*pnote, 1);
775 else
776 pnote = &XEXP (*pnote, 1);
779 if (!prob_note)
781 add_reg_note (insn, REG_BR_PROB, GEN_INT (combined_probability));
783 /* Save the prediction into CFG in case we are seeing non-degenerated
784 conditional jump. */
785 if (!single_succ_p (bb))
787 BRANCH_EDGE (bb)->probability = combined_probability;
788 FALLTHRU_EDGE (bb)->probability
789 = REG_BR_PROB_BASE - combined_probability;
792 else if (!single_succ_p (bb))
794 int prob = INTVAL (XEXP (prob_note, 0));
796 BRANCH_EDGE (bb)->probability = prob;
797 FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
799 else
800 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
803 /* Combine predictions into single probability and store them into CFG.
804 Remove now useless prediction entries. */
806 static void
807 combine_predictions_for_bb (basic_block bb)
809 int best_probability = PROB_EVEN;
810 enum br_predictor best_predictor = END_PREDICTORS;
811 int combined_probability = REG_BR_PROB_BASE / 2;
812 int d;
813 bool first_match = false;
814 bool found = false;
815 struct edge_prediction *pred;
816 int nedges = 0;
817 edge e, first = NULL, second = NULL;
818 edge_iterator ei;
819 void **preds;
821 FOR_EACH_EDGE (e, ei, bb->succs)
822 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
824 nedges ++;
825 if (first && !second)
826 second = e;
827 if (!first)
828 first = e;
831 /* When there is no successor or only one choice, prediction is easy.
833 We are lazy for now and predict only basic blocks with two outgoing
834 edges. It is possible to predict generic case too, but we have to
835 ignore first match heuristics and do more involved combining. Implement
836 this later. */
837 if (nedges != 2)
839 if (!bb->count)
840 set_even_probabilities (bb);
841 clear_bb_predictions (bb);
842 if (dump_file)
843 fprintf (dump_file, "%i edges in bb %i predicted to even probabilities\n",
844 nedges, bb->index);
845 return;
848 if (dump_file)
849 fprintf (dump_file, "Predictions for bb %i\n", bb->index);
851 preds = pointer_map_contains (bb_predictions, bb);
852 if (preds)
854 /* We implement "first match" heuristics and use probability guessed
855 by predictor with smallest index. */
856 for (pred = (struct edge_prediction *) *preds; pred; pred = pred->ep_next)
858 enum br_predictor predictor = pred->ep_predictor;
859 int probability = pred->ep_probability;
861 if (pred->ep_edge != first)
862 probability = REG_BR_PROB_BASE - probability;
864 found = true;
865 /* First match heuristics would be widly confused if we predicted
866 both directions. */
867 if (best_predictor > predictor)
869 struct edge_prediction *pred2;
870 int prob = probability;
872 for (pred2 = (struct edge_prediction *) *preds; pred2; pred2 = pred2->ep_next)
873 if (pred2 != pred && pred2->ep_predictor == pred->ep_predictor)
875 int probability2 = pred->ep_probability;
877 if (pred2->ep_edge != first)
878 probability2 = REG_BR_PROB_BASE - probability2;
880 if ((probability < REG_BR_PROB_BASE / 2) !=
881 (probability2 < REG_BR_PROB_BASE / 2))
882 break;
884 /* If the same predictor later gave better result, go for it! */
885 if ((probability >= REG_BR_PROB_BASE / 2 && (probability2 > probability))
886 || (probability <= REG_BR_PROB_BASE / 2 && (probability2 < probability)))
887 prob = probability2;
889 if (!pred2)
890 best_probability = prob, best_predictor = predictor;
893 d = (combined_probability * probability
894 + (REG_BR_PROB_BASE - combined_probability)
895 * (REG_BR_PROB_BASE - probability));
897 /* Use FP math to avoid overflows of 32bit integers. */
898 if (d == 0)
899 /* If one probability is 0% and one 100%, avoid division by zero. */
900 combined_probability = REG_BR_PROB_BASE / 2;
901 else
902 combined_probability = (((double) combined_probability)
903 * probability
904 * REG_BR_PROB_BASE / d + 0.5);
908 /* Decide which heuristic to use. In case we didn't match anything,
909 use no_prediction heuristic, in case we did match, use either
910 first match or Dempster-Shaffer theory depending on the flags. */
912 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
913 first_match = true;
915 if (!found)
916 dump_prediction (dump_file, PRED_NO_PREDICTION, combined_probability, bb, true);
917 else
919 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, bb,
920 !first_match);
921 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, bb,
922 first_match);
925 if (first_match)
926 combined_probability = best_probability;
927 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
929 if (preds)
931 for (pred = (struct edge_prediction *) *preds; pred; pred = pred->ep_next)
933 enum br_predictor predictor = pred->ep_predictor;
934 int probability = pred->ep_probability;
936 if (pred->ep_edge != EDGE_SUCC (bb, 0))
937 probability = REG_BR_PROB_BASE - probability;
938 dump_prediction (dump_file, predictor, probability, bb,
939 !first_match || best_predictor == predictor);
942 clear_bb_predictions (bb);
944 if (!bb->count)
946 first->probability = combined_probability;
947 second->probability = REG_BR_PROB_BASE - combined_probability;
951 /* Check if T1 and T2 satisfy the IV_COMPARE condition.
952 Return the SSA_NAME if the condition satisfies, NULL otherwise.
954 T1 and T2 should be one of the following cases:
955 1. T1 is SSA_NAME, T2 is NULL
956 2. T1 is SSA_NAME, T2 is INTEGER_CST between [-4, 4]
957 3. T2 is SSA_NAME, T1 is INTEGER_CST between [-4, 4] */
959 static tree
960 strips_small_constant (tree t1, tree t2)
962 tree ret = NULL;
963 int value = 0;
965 if (!t1)
966 return NULL;
967 else if (TREE_CODE (t1) == SSA_NAME)
968 ret = t1;
969 else if (host_integerp (t1, 0))
970 value = tree_low_cst (t1, 0);
971 else
972 return NULL;
974 if (!t2)
975 return ret;
976 else if (host_integerp (t2, 0))
977 value = tree_low_cst (t2, 0);
978 else if (TREE_CODE (t2) == SSA_NAME)
980 if (ret)
981 return NULL;
982 else
983 ret = t2;
986 if (value <= 4 && value >= -4)
987 return ret;
988 else
989 return NULL;
992 /* Return the SSA_NAME in T or T's operands.
993 Return NULL if SSA_NAME cannot be found. */
995 static tree
996 get_base_value (tree t)
998 if (TREE_CODE (t) == SSA_NAME)
999 return t;
1001 if (!BINARY_CLASS_P (t))
1002 return NULL;
1004 switch (TREE_OPERAND_LENGTH (t))
1006 case 1:
1007 return strips_small_constant (TREE_OPERAND (t, 0), NULL);
1008 case 2:
1009 return strips_small_constant (TREE_OPERAND (t, 0),
1010 TREE_OPERAND (t, 1));
1011 default:
1012 return NULL;
1016 /* Check the compare STMT in LOOP. If it compares an induction
1017 variable to a loop invariant, return true, and save
1018 LOOP_INVARIANT, COMPARE_CODE and LOOP_STEP.
1019 Otherwise return false and set LOOP_INVAIANT to NULL. */
1021 static bool
1022 is_comparison_with_loop_invariant_p (gimple stmt, struct loop *loop,
1023 tree *loop_invariant,
1024 enum tree_code *compare_code,
1025 int *loop_step,
1026 tree *loop_iv_base)
1028 tree op0, op1, bound, base;
1029 affine_iv iv0, iv1;
1030 enum tree_code code;
1031 int step;
1033 code = gimple_cond_code (stmt);
1034 *loop_invariant = NULL;
1036 switch (code)
1038 case GT_EXPR:
1039 case GE_EXPR:
1040 case NE_EXPR:
1041 case LT_EXPR:
1042 case LE_EXPR:
1043 case EQ_EXPR:
1044 break;
1046 default:
1047 return false;
1050 op0 = gimple_cond_lhs (stmt);
1051 op1 = gimple_cond_rhs (stmt);
1053 if ((TREE_CODE (op0) != SSA_NAME && TREE_CODE (op0) != INTEGER_CST)
1054 || (TREE_CODE (op1) != SSA_NAME && TREE_CODE (op1) != INTEGER_CST))
1055 return false;
1056 if (!simple_iv (loop, loop_containing_stmt (stmt), op0, &iv0, true))
1057 return false;
1058 if (!simple_iv (loop, loop_containing_stmt (stmt), op1, &iv1, true))
1059 return false;
1060 if (TREE_CODE (iv0.step) != INTEGER_CST
1061 || TREE_CODE (iv1.step) != INTEGER_CST)
1062 return false;
1063 if ((integer_zerop (iv0.step) && integer_zerop (iv1.step))
1064 || (!integer_zerop (iv0.step) && !integer_zerop (iv1.step)))
1065 return false;
1067 if (integer_zerop (iv0.step))
1069 if (code != NE_EXPR && code != EQ_EXPR)
1070 code = invert_tree_comparison (code, false);
1071 bound = iv0.base;
1072 base = iv1.base;
1073 if (host_integerp (iv1.step, 0))
1074 step = tree_low_cst (iv1.step, 0);
1075 else
1076 return false;
1078 else
1080 bound = iv1.base;
1081 base = iv0.base;
1082 if (host_integerp (iv0.step, 0))
1083 step = tree_low_cst (iv0.step, 0);
1084 else
1085 return false;
1088 if (TREE_CODE (bound) != INTEGER_CST)
1089 bound = get_base_value (bound);
1090 if (!bound)
1091 return false;
1092 if (TREE_CODE (base) != INTEGER_CST)
1093 base = get_base_value (base);
1094 if (!base)
1095 return false;
1097 *loop_invariant = bound;
1098 *compare_code = code;
1099 *loop_step = step;
1100 *loop_iv_base = base;
1101 return true;
1104 /* Compare two SSA_NAMEs: returns TRUE if T1 and T2 are value coherent. */
1106 static bool
1107 expr_coherent_p (tree t1, tree t2)
1109 gimple stmt;
1110 tree ssa_name_1 = NULL;
1111 tree ssa_name_2 = NULL;
1113 gcc_assert (TREE_CODE (t1) == SSA_NAME || TREE_CODE (t1) == INTEGER_CST);
1114 gcc_assert (TREE_CODE (t2) == SSA_NAME || TREE_CODE (t2) == INTEGER_CST);
1116 if (t1 == t2)
1117 return true;
1119 if (TREE_CODE (t1) == INTEGER_CST && TREE_CODE (t2) == INTEGER_CST)
1120 return true;
1121 if (TREE_CODE (t1) == INTEGER_CST || TREE_CODE (t2) == INTEGER_CST)
1122 return false;
1124 /* Check to see if t1 is expressed/defined with t2. */
1125 stmt = SSA_NAME_DEF_STMT (t1);
1126 gcc_assert (stmt != NULL);
1127 if (is_gimple_assign (stmt))
1129 ssa_name_1 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE);
1130 if (ssa_name_1 && ssa_name_1 == t2)
1131 return true;
1134 /* Check to see if t2 is expressed/defined with t1. */
1135 stmt = SSA_NAME_DEF_STMT (t2);
1136 gcc_assert (stmt != NULL);
1137 if (is_gimple_assign (stmt))
1139 ssa_name_2 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE);
1140 if (ssa_name_2 && ssa_name_2 == t1)
1141 return true;
1144 /* Compare if t1 and t2's def_stmts are identical. */
1145 if (ssa_name_2 != NULL && ssa_name_1 == ssa_name_2)
1146 return true;
1147 else
1148 return false;
1151 /* Predict branch probability of BB when BB contains a branch that compares
1152 an induction variable in LOOP with LOOP_IV_BASE_VAR to LOOP_BOUND_VAR. The
1153 loop exit is compared using LOOP_BOUND_CODE, with step of LOOP_BOUND_STEP.
1155 E.g.
1156 for (int i = 0; i < bound; i++) {
1157 if (i < bound - 2)
1158 computation_1();
1159 else
1160 computation_2();
1163 In this loop, we will predict the branch inside the loop to be taken. */
1165 static void
1166 predict_iv_comparison (struct loop *loop, basic_block bb,
1167 tree loop_bound_var,
1168 tree loop_iv_base_var,
1169 enum tree_code loop_bound_code,
1170 int loop_bound_step)
1172 gimple stmt;
1173 tree compare_var, compare_base;
1174 enum tree_code compare_code;
1175 int compare_step;
1176 edge then_edge;
1177 edge_iterator ei;
1179 if (predicted_by_p (bb, PRED_LOOP_ITERATIONS_GUESSED)
1180 || predicted_by_p (bb, PRED_LOOP_ITERATIONS)
1181 || predicted_by_p (bb, PRED_LOOP_EXIT))
1182 return;
1184 stmt = last_stmt (bb);
1185 if (!stmt || gimple_code (stmt) != GIMPLE_COND)
1186 return;
1187 if (!is_comparison_with_loop_invariant_p (stmt, loop, &compare_var,
1188 &compare_code,
1189 &compare_step,
1190 &compare_base))
1191 return;
1193 /* Find the taken edge. */
1194 FOR_EACH_EDGE (then_edge, ei, bb->succs)
1195 if (then_edge->flags & EDGE_TRUE_VALUE)
1196 break;
1198 /* When comparing an IV to a loop invariant, NE is more likely to be
1199 taken while EQ is more likely to be not-taken. */
1200 if (compare_code == NE_EXPR)
1202 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1203 return;
1205 else if (compare_code == EQ_EXPR)
1207 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1208 return;
1211 if (!expr_coherent_p (loop_iv_base_var, compare_base))
1212 return;
1214 /* If loop bound, base and compare bound are all constants, we can
1215 calculate the probability directly. */
1216 if (host_integerp (loop_bound_var, 0)
1217 && host_integerp (compare_var, 0)
1218 && host_integerp (compare_base, 0))
1220 int probability;
1221 HOST_WIDE_INT compare_count;
1222 HOST_WIDE_INT loop_bound = tree_low_cst (loop_bound_var, 0);
1223 HOST_WIDE_INT compare_bound = tree_low_cst (compare_var, 0);
1224 HOST_WIDE_INT base = tree_low_cst (compare_base, 0);
1225 HOST_WIDE_INT loop_count = (loop_bound - base) / compare_step;
1227 if ((compare_step > 0)
1228 ^ (compare_code == LT_EXPR || compare_code == LE_EXPR))
1229 compare_count = (loop_bound - compare_bound) / compare_step;
1230 else
1231 compare_count = (compare_bound - base) / compare_step;
1233 if (compare_code == LE_EXPR || compare_code == GE_EXPR)
1234 compare_count ++;
1235 if (loop_bound_code == LE_EXPR || loop_bound_code == GE_EXPR)
1236 loop_count ++;
1237 if (compare_count < 0)
1238 compare_count = 0;
1239 if (loop_count < 0)
1240 loop_count = 0;
1242 if (loop_count == 0)
1243 probability = 0;
1244 else if (compare_count > loop_count)
1245 probability = REG_BR_PROB_BASE;
1246 else
1247 probability = (double) REG_BR_PROB_BASE * compare_count / loop_count;
1248 predict_edge (then_edge, PRED_LOOP_IV_COMPARE, probability);
1249 return;
1252 if (expr_coherent_p (loop_bound_var, compare_var))
1254 if ((loop_bound_code == LT_EXPR || loop_bound_code == LE_EXPR)
1255 && (compare_code == LT_EXPR || compare_code == LE_EXPR))
1256 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1257 else if ((loop_bound_code == GT_EXPR || loop_bound_code == GE_EXPR)
1258 && (compare_code == GT_EXPR || compare_code == GE_EXPR))
1259 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1260 else if (loop_bound_code == NE_EXPR)
1262 /* If the loop backedge condition is "(i != bound)", we do
1263 the comparison based on the step of IV:
1264 * step < 0 : backedge condition is like (i > bound)
1265 * step > 0 : backedge condition is like (i < bound) */
1266 gcc_assert (loop_bound_step != 0);
1267 if (loop_bound_step > 0
1268 && (compare_code == LT_EXPR
1269 || compare_code == LE_EXPR))
1270 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1271 else if (loop_bound_step < 0
1272 && (compare_code == GT_EXPR
1273 || compare_code == GE_EXPR))
1274 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1275 else
1276 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1278 else
1279 /* The branch is predicted not-taken if loop_bound_code is
1280 opposite with compare_code. */
1281 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1283 else if (expr_coherent_p (loop_iv_base_var, compare_var))
1285 /* For cases like:
1286 for (i = s; i < h; i++)
1287 if (i > s + 2) ....
1288 The branch should be predicted taken. */
1289 if (loop_bound_step > 0
1290 && (compare_code == GT_EXPR || compare_code == GE_EXPR))
1291 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1292 else if (loop_bound_step < 0
1293 && (compare_code == LT_EXPR || compare_code == LE_EXPR))
1294 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1295 else
1296 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1300 /* Predict edge probabilities by exploiting loop structure. */
1302 static void
1303 predict_loops (void)
1305 loop_iterator li;
1306 struct loop *loop;
1308 /* Try to predict out blocks in a loop that are not part of a
1309 natural loop. */
1310 FOR_EACH_LOOP (li, loop, 0)
1312 basic_block bb, *bbs;
1313 unsigned j, n_exits;
1314 VEC (edge, heap) *exits;
1315 struct tree_niter_desc niter_desc;
1316 edge ex;
1317 struct nb_iter_bound *nb_iter;
1318 enum tree_code loop_bound_code = ERROR_MARK;
1319 int loop_bound_step = 0;
1320 tree loop_bound_var = NULL;
1321 tree loop_iv_base = NULL;
1322 gimple stmt = NULL;
1324 exits = get_loop_exit_edges (loop);
1325 n_exits = VEC_length (edge, exits);
1327 FOR_EACH_VEC_ELT (edge, exits, j, ex)
1329 tree niter = NULL;
1330 HOST_WIDE_INT nitercst;
1331 int max = PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS);
1332 int probability;
1333 enum br_predictor predictor;
1335 if (number_of_iterations_exit (loop, ex, &niter_desc, false))
1336 niter = niter_desc.niter;
1337 if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST)
1338 niter = loop_niter_by_eval (loop, ex);
1340 if (TREE_CODE (niter) == INTEGER_CST)
1342 if (host_integerp (niter, 1)
1343 && compare_tree_int (niter, max-1) == -1)
1344 nitercst = tree_low_cst (niter, 1) + 1;
1345 else
1346 nitercst = max;
1347 predictor = PRED_LOOP_ITERATIONS;
1349 /* If we have just one exit and we can derive some information about
1350 the number of iterations of the loop from the statements inside
1351 the loop, use it to predict this exit. */
1352 else if (n_exits == 1)
1354 nitercst = estimated_stmt_executions_int (loop);
1355 if (nitercst < 0)
1356 continue;
1357 if (nitercst > max)
1358 nitercst = max;
1360 predictor = PRED_LOOP_ITERATIONS_GUESSED;
1362 else
1363 continue;
1365 probability = ((REG_BR_PROB_BASE + nitercst / 2) / nitercst);
1366 predict_edge (ex, predictor, probability);
1368 VEC_free (edge, heap, exits);
1370 /* Find information about loop bound variables. */
1371 for (nb_iter = loop->bounds; nb_iter;
1372 nb_iter = nb_iter->next)
1373 if (nb_iter->stmt
1374 && gimple_code (nb_iter->stmt) == GIMPLE_COND)
1376 stmt = nb_iter->stmt;
1377 break;
1379 if (!stmt && last_stmt (loop->header)
1380 && gimple_code (last_stmt (loop->header)) == GIMPLE_COND)
1381 stmt = last_stmt (loop->header);
1382 if (stmt)
1383 is_comparison_with_loop_invariant_p (stmt, loop,
1384 &loop_bound_var,
1385 &loop_bound_code,
1386 &loop_bound_step,
1387 &loop_iv_base);
1389 bbs = get_loop_body (loop);
1391 for (j = 0; j < loop->num_nodes; j++)
1393 int header_found = 0;
1394 edge e;
1395 edge_iterator ei;
1397 bb = bbs[j];
1399 /* Bypass loop heuristics on continue statement. These
1400 statements construct loops via "non-loop" constructs
1401 in the source language and are better to be handled
1402 separately. */
1403 if (predicted_by_p (bb, PRED_CONTINUE))
1404 continue;
1406 /* Loop branch heuristics - predict an edge back to a
1407 loop's head as taken. */
1408 if (bb == loop->latch)
1410 e = find_edge (loop->latch, loop->header);
1411 if (e)
1413 header_found = 1;
1414 predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
1418 /* Loop exit heuristics - predict an edge exiting the loop if the
1419 conditional has no loop header successors as not taken. */
1420 if (!header_found
1421 /* If we already used more reliable loop exit predictors, do not
1422 bother with PRED_LOOP_EXIT. */
1423 && !predicted_by_p (bb, PRED_LOOP_ITERATIONS_GUESSED)
1424 && !predicted_by_p (bb, PRED_LOOP_ITERATIONS))
1426 /* For loop with many exits we don't want to predict all exits
1427 with the pretty large probability, because if all exits are
1428 considered in row, the loop would be predicted to iterate
1429 almost never. The code to divide probability by number of
1430 exits is very rough. It should compute the number of exits
1431 taken in each patch through function (not the overall number
1432 of exits that might be a lot higher for loops with wide switch
1433 statements in them) and compute n-th square root.
1435 We limit the minimal probability by 2% to avoid
1436 EDGE_PROBABILITY_RELIABLE from trusting the branch prediction
1437 as this was causing regression in perl benchmark containing such
1438 a wide loop. */
1440 int probability = ((REG_BR_PROB_BASE
1441 - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
1442 / n_exits);
1443 if (probability < HITRATE (2))
1444 probability = HITRATE (2);
1445 FOR_EACH_EDGE (e, ei, bb->succs)
1446 if (e->dest->index < NUM_FIXED_BLOCKS
1447 || !flow_bb_inside_loop_p (loop, e->dest))
1448 predict_edge (e, PRED_LOOP_EXIT, probability);
1450 if (loop_bound_var)
1451 predict_iv_comparison (loop, bb, loop_bound_var, loop_iv_base,
1452 loop_bound_code,
1453 loop_bound_step);
1456 /* Free basic blocks from get_loop_body. */
1457 free (bbs);
1461 /* Attempt to predict probabilities of BB outgoing edges using local
1462 properties. */
1463 static void
1464 bb_estimate_probability_locally (basic_block bb)
1466 rtx last_insn = BB_END (bb);
1467 rtx cond;
1469 if (! can_predict_insn_p (last_insn))
1470 return;
1471 cond = get_condition (last_insn, NULL, false, false);
1472 if (! cond)
1473 return;
1475 /* Try "pointer heuristic."
1476 A comparison ptr == 0 is predicted as false.
1477 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1478 if (COMPARISON_P (cond)
1479 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
1480 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
1482 if (GET_CODE (cond) == EQ)
1483 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
1484 else if (GET_CODE (cond) == NE)
1485 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
1487 else
1489 /* Try "opcode heuristic."
1490 EQ tests are usually false and NE tests are usually true. Also,
1491 most quantities are positive, so we can make the appropriate guesses
1492 about signed comparisons against zero. */
1493 switch (GET_CODE (cond))
1495 case CONST_INT:
1496 /* Unconditional branch. */
1497 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
1498 cond == const0_rtx ? NOT_TAKEN : TAKEN);
1499 break;
1501 case EQ:
1502 case UNEQ:
1503 /* Floating point comparisons appears to behave in a very
1504 unpredictable way because of special role of = tests in
1505 FP code. */
1506 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
1508 /* Comparisons with 0 are often used for booleans and there is
1509 nothing useful to predict about them. */
1510 else if (XEXP (cond, 1) == const0_rtx
1511 || XEXP (cond, 0) == const0_rtx)
1513 else
1514 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
1515 break;
1517 case NE:
1518 case LTGT:
1519 /* Floating point comparisons appears to behave in a very
1520 unpredictable way because of special role of = tests in
1521 FP code. */
1522 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
1524 /* Comparisons with 0 are often used for booleans and there is
1525 nothing useful to predict about them. */
1526 else if (XEXP (cond, 1) == const0_rtx
1527 || XEXP (cond, 0) == const0_rtx)
1529 else
1530 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
1531 break;
1533 case ORDERED:
1534 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
1535 break;
1537 case UNORDERED:
1538 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
1539 break;
1541 case LE:
1542 case LT:
1543 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
1544 || XEXP (cond, 1) == constm1_rtx)
1545 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
1546 break;
1548 case GE:
1549 case GT:
1550 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
1551 || XEXP (cond, 1) == constm1_rtx)
1552 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
1553 break;
1555 default:
1556 break;
1560 /* Set edge->probability for each successor edge of BB. */
1561 void
1562 guess_outgoing_edge_probabilities (basic_block bb)
1564 bb_estimate_probability_locally (bb);
1565 combine_predictions_for_insn (BB_END (bb), bb);
1568 static tree expr_expected_value (tree, bitmap);
1570 /* Helper function for expr_expected_value. */
1572 static tree
1573 expr_expected_value_1 (tree type, tree op0, enum tree_code code,
1574 tree op1, bitmap visited)
1576 gimple def;
1578 if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
1580 if (TREE_CONSTANT (op0))
1581 return op0;
1583 if (code != SSA_NAME)
1584 return NULL_TREE;
1586 def = SSA_NAME_DEF_STMT (op0);
1588 /* If we were already here, break the infinite cycle. */
1589 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (op0)))
1590 return NULL;
1592 if (gimple_code (def) == GIMPLE_PHI)
1594 /* All the arguments of the PHI node must have the same constant
1595 length. */
1596 int i, n = gimple_phi_num_args (def);
1597 tree val = NULL, new_val;
1599 for (i = 0; i < n; i++)
1601 tree arg = PHI_ARG_DEF (def, i);
1603 /* If this PHI has itself as an argument, we cannot
1604 determine the string length of this argument. However,
1605 if we can find an expected constant value for the other
1606 PHI args then we can still be sure that this is
1607 likely a constant. So be optimistic and just
1608 continue with the next argument. */
1609 if (arg == PHI_RESULT (def))
1610 continue;
1612 new_val = expr_expected_value (arg, visited);
1613 if (!new_val)
1614 return NULL;
1615 if (!val)
1616 val = new_val;
1617 else if (!operand_equal_p (val, new_val, false))
1618 return NULL;
1620 return val;
1622 if (is_gimple_assign (def))
1624 if (gimple_assign_lhs (def) != op0)
1625 return NULL;
1627 return expr_expected_value_1 (TREE_TYPE (gimple_assign_lhs (def)),
1628 gimple_assign_rhs1 (def),
1629 gimple_assign_rhs_code (def),
1630 gimple_assign_rhs2 (def),
1631 visited);
1634 if (is_gimple_call (def))
1636 tree decl = gimple_call_fndecl (def);
1637 if (!decl)
1638 return NULL;
1639 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
1640 switch (DECL_FUNCTION_CODE (decl))
1642 case BUILT_IN_EXPECT:
1644 tree val;
1645 if (gimple_call_num_args (def) != 2)
1646 return NULL;
1647 val = gimple_call_arg (def, 0);
1648 if (TREE_CONSTANT (val))
1649 return val;
1650 return gimple_call_arg (def, 1);
1653 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N:
1654 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
1655 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
1656 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
1657 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
1658 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
1659 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE:
1660 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N:
1661 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
1662 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
1663 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
1664 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
1665 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
1666 /* Assume that any given atomic operation has low contention,
1667 and thus the compare-and-swap operation succeeds. */
1668 return boolean_true_node;
1672 return NULL;
1675 if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
1677 tree res;
1678 op0 = expr_expected_value (op0, visited);
1679 if (!op0)
1680 return NULL;
1681 op1 = expr_expected_value (op1, visited);
1682 if (!op1)
1683 return NULL;
1684 res = fold_build2 (code, type, op0, op1);
1685 if (TREE_CONSTANT (res))
1686 return res;
1687 return NULL;
1689 if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
1691 tree res;
1692 op0 = expr_expected_value (op0, visited);
1693 if (!op0)
1694 return NULL;
1695 res = fold_build1 (code, type, op0);
1696 if (TREE_CONSTANT (res))
1697 return res;
1698 return NULL;
1700 return NULL;
1703 /* Return constant EXPR will likely have at execution time, NULL if unknown.
1704 The function is used by builtin_expect branch predictor so the evidence
1705 must come from this construct and additional possible constant folding.
1707 We may want to implement more involved value guess (such as value range
1708 propagation based prediction), but such tricks shall go to new
1709 implementation. */
1711 static tree
1712 expr_expected_value (tree expr, bitmap visited)
1714 enum tree_code code;
1715 tree op0, op1;
1717 if (TREE_CONSTANT (expr))
1718 return expr;
1720 extract_ops_from_tree (expr, &code, &op0, &op1);
1721 return expr_expected_value_1 (TREE_TYPE (expr),
1722 op0, code, op1, visited);
1726 /* Get rid of all builtin_expect calls and GIMPLE_PREDICT statements
1727 we no longer need. */
1728 static unsigned int
1729 strip_predict_hints (void)
1731 basic_block bb;
1732 gimple ass_stmt;
1733 tree var;
1735 FOR_EACH_BB (bb)
1737 gimple_stmt_iterator bi;
1738 for (bi = gsi_start_bb (bb); !gsi_end_p (bi);)
1740 gimple stmt = gsi_stmt (bi);
1742 if (gimple_code (stmt) == GIMPLE_PREDICT)
1744 gsi_remove (&bi, true);
1745 continue;
1747 else if (gimple_code (stmt) == GIMPLE_CALL)
1749 tree fndecl = gimple_call_fndecl (stmt);
1751 if (fndecl
1752 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1753 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
1754 && gimple_call_num_args (stmt) == 2)
1756 var = gimple_call_lhs (stmt);
1757 if (var)
1759 ass_stmt
1760 = gimple_build_assign (var, gimple_call_arg (stmt, 0));
1761 gsi_replace (&bi, ass_stmt, true);
1763 else
1765 gsi_remove (&bi, true);
1766 continue;
1770 gsi_next (&bi);
1773 return 0;
1776 /* Predict using opcode of the last statement in basic block. */
1777 static void
1778 tree_predict_by_opcode (basic_block bb)
1780 gimple stmt = last_stmt (bb);
1781 edge then_edge;
1782 tree op0, op1;
1783 tree type;
1784 tree val;
1785 enum tree_code cmp;
1786 bitmap visited;
1787 edge_iterator ei;
1789 if (!stmt || gimple_code (stmt) != GIMPLE_COND)
1790 return;
1791 FOR_EACH_EDGE (then_edge, ei, bb->succs)
1792 if (then_edge->flags & EDGE_TRUE_VALUE)
1793 break;
1794 op0 = gimple_cond_lhs (stmt);
1795 op1 = gimple_cond_rhs (stmt);
1796 cmp = gimple_cond_code (stmt);
1797 type = TREE_TYPE (op0);
1798 visited = BITMAP_ALLOC (NULL);
1799 val = expr_expected_value_1 (boolean_type_node, op0, cmp, op1, visited);
1800 BITMAP_FREE (visited);
1801 if (val)
1803 if (integer_zerop (val))
1804 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, NOT_TAKEN);
1805 else
1806 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, TAKEN);
1807 return;
1809 /* Try "pointer heuristic."
1810 A comparison ptr == 0 is predicted as false.
1811 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1812 if (POINTER_TYPE_P (type))
1814 if (cmp == EQ_EXPR)
1815 predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN);
1816 else if (cmp == NE_EXPR)
1817 predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN);
1819 else
1821 /* Try "opcode heuristic."
1822 EQ tests are usually false and NE tests are usually true. Also,
1823 most quantities are positive, so we can make the appropriate guesses
1824 about signed comparisons against zero. */
1825 switch (cmp)
1827 case EQ_EXPR:
1828 case UNEQ_EXPR:
1829 /* Floating point comparisons appears to behave in a very
1830 unpredictable way because of special role of = tests in
1831 FP code. */
1832 if (FLOAT_TYPE_P (type))
1834 /* Comparisons with 0 are often used for booleans and there is
1835 nothing useful to predict about them. */
1836 else if (integer_zerop (op0) || integer_zerop (op1))
1838 else
1839 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN);
1840 break;
1842 case NE_EXPR:
1843 case LTGT_EXPR:
1844 /* Floating point comparisons appears to behave in a very
1845 unpredictable way because of special role of = tests in
1846 FP code. */
1847 if (FLOAT_TYPE_P (type))
1849 /* Comparisons with 0 are often used for booleans and there is
1850 nothing useful to predict about them. */
1851 else if (integer_zerop (op0)
1852 || integer_zerop (op1))
1854 else
1855 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN);
1856 break;
1858 case ORDERED_EXPR:
1859 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN);
1860 break;
1862 case UNORDERED_EXPR:
1863 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN);
1864 break;
1866 case LE_EXPR:
1867 case LT_EXPR:
1868 if (integer_zerop (op1)
1869 || integer_onep (op1)
1870 || integer_all_onesp (op1)
1871 || real_zerop (op1)
1872 || real_onep (op1)
1873 || real_minus_onep (op1))
1874 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN);
1875 break;
1877 case GE_EXPR:
1878 case GT_EXPR:
1879 if (integer_zerop (op1)
1880 || integer_onep (op1)
1881 || integer_all_onesp (op1)
1882 || real_zerop (op1)
1883 || real_onep (op1)
1884 || real_minus_onep (op1))
1885 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN);
1886 break;
1888 default:
1889 break;
1893 /* Try to guess whether the value of return means error code. */
1895 static enum br_predictor
1896 return_prediction (tree val, enum prediction *prediction)
1898 /* VOID. */
1899 if (!val)
1900 return PRED_NO_PREDICTION;
1901 /* Different heuristics for pointers and scalars. */
1902 if (POINTER_TYPE_P (TREE_TYPE (val)))
1904 /* NULL is usually not returned. */
1905 if (integer_zerop (val))
1907 *prediction = NOT_TAKEN;
1908 return PRED_NULL_RETURN;
1911 else if (INTEGRAL_TYPE_P (TREE_TYPE (val)))
1913 /* Negative return values are often used to indicate
1914 errors. */
1915 if (TREE_CODE (val) == INTEGER_CST
1916 && tree_int_cst_sgn (val) < 0)
1918 *prediction = NOT_TAKEN;
1919 return PRED_NEGATIVE_RETURN;
1921 /* Constant return values seems to be commonly taken.
1922 Zero/one often represent booleans so exclude them from the
1923 heuristics. */
1924 if (TREE_CONSTANT (val)
1925 && (!integer_zerop (val) && !integer_onep (val)))
1927 *prediction = TAKEN;
1928 return PRED_CONST_RETURN;
1931 return PRED_NO_PREDICTION;
1934 /* Find the basic block with return expression and look up for possible
1935 return value trying to apply RETURN_PREDICTION heuristics. */
1936 static void
1937 apply_return_prediction (void)
1939 gimple return_stmt = NULL;
1940 tree return_val;
1941 edge e;
1942 gimple phi;
1943 int phi_num_args, i;
1944 enum br_predictor pred;
1945 enum prediction direction;
1946 edge_iterator ei;
1948 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1950 return_stmt = last_stmt (e->src);
1951 if (return_stmt
1952 && gimple_code (return_stmt) == GIMPLE_RETURN)
1953 break;
1955 if (!e)
1956 return;
1957 return_val = gimple_return_retval (return_stmt);
1958 if (!return_val)
1959 return;
1960 if (TREE_CODE (return_val) != SSA_NAME
1961 || !SSA_NAME_DEF_STMT (return_val)
1962 || gimple_code (SSA_NAME_DEF_STMT (return_val)) != GIMPLE_PHI)
1963 return;
1964 phi = SSA_NAME_DEF_STMT (return_val);
1965 phi_num_args = gimple_phi_num_args (phi);
1966 pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction);
1968 /* Avoid the degenerate case where all return values form the function
1969 belongs to same category (ie they are all positive constants)
1970 so we can hardly say something about them. */
1971 for (i = 1; i < phi_num_args; i++)
1972 if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction))
1973 break;
1974 if (i != phi_num_args)
1975 for (i = 0; i < phi_num_args; i++)
1977 pred = return_prediction (PHI_ARG_DEF (phi, i), &direction);
1978 if (pred != PRED_NO_PREDICTION)
1979 predict_paths_leading_to_edge (gimple_phi_arg_edge (phi, i), pred,
1980 direction);
1984 /* Look for basic block that contains unlikely to happen events
1985 (such as noreturn calls) and mark all paths leading to execution
1986 of this basic blocks as unlikely. */
1988 static void
1989 tree_bb_level_predictions (void)
1991 basic_block bb;
1992 bool has_return_edges = false;
1993 edge e;
1994 edge_iterator ei;
1996 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1997 if (!(e->flags & (EDGE_ABNORMAL | EDGE_FAKE | EDGE_EH)))
1999 has_return_edges = true;
2000 break;
2003 apply_return_prediction ();
2005 FOR_EACH_BB (bb)
2007 gimple_stmt_iterator gsi;
2009 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2011 gimple stmt = gsi_stmt (gsi);
2012 tree decl;
2014 if (is_gimple_call (stmt))
2016 if ((gimple_call_flags (stmt) & ECF_NORETURN)
2017 && has_return_edges)
2018 predict_paths_leading_to (bb, PRED_NORETURN,
2019 NOT_TAKEN);
2020 decl = gimple_call_fndecl (stmt);
2021 if (decl
2022 && lookup_attribute ("cold",
2023 DECL_ATTRIBUTES (decl)))
2024 predict_paths_leading_to (bb, PRED_COLD_FUNCTION,
2025 NOT_TAKEN);
2027 else if (gimple_code (stmt) == GIMPLE_PREDICT)
2029 predict_paths_leading_to (bb, gimple_predict_predictor (stmt),
2030 gimple_predict_outcome (stmt));
2031 /* Keep GIMPLE_PREDICT around so early inlining will propagate
2032 hints to callers. */
2038 #ifdef ENABLE_CHECKING
2040 /* Callback for pointer_map_traverse, asserts that the pointer map is
2041 empty. */
2043 static bool
2044 assert_is_empty (const void *key ATTRIBUTE_UNUSED, void **value,
2045 void *data ATTRIBUTE_UNUSED)
2047 gcc_assert (!*value);
2048 return false;
2050 #endif
2052 /* Predict branch probabilities and estimate profile for basic block BB. */
2054 static void
2055 tree_estimate_probability_bb (basic_block bb)
2057 edge e;
2058 edge_iterator ei;
2059 gimple last;
2061 FOR_EACH_EDGE (e, ei, bb->succs)
2063 /* Predict edges to user labels with attributes. */
2064 if (e->dest != EXIT_BLOCK_PTR)
2066 gimple_stmt_iterator gi;
2067 for (gi = gsi_start_bb (e->dest); !gsi_end_p (gi); gsi_next (&gi))
2069 gimple stmt = gsi_stmt (gi);
2070 tree decl;
2072 if (gimple_code (stmt) != GIMPLE_LABEL)
2073 break;
2074 decl = gimple_label_label (stmt);
2075 if (DECL_ARTIFICIAL (decl))
2076 continue;
2078 /* Finally, we have a user-defined label. */
2079 if (lookup_attribute ("cold", DECL_ATTRIBUTES (decl)))
2080 predict_edge_def (e, PRED_COLD_LABEL, NOT_TAKEN);
2081 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (decl)))
2082 predict_edge_def (e, PRED_HOT_LABEL, TAKEN);
2086 /* Predict early returns to be probable, as we've already taken
2087 care for error returns and other cases are often used for
2088 fast paths through function.
2090 Since we've already removed the return statements, we are
2091 looking for CFG like:
2093 if (conditional)
2096 goto return_block
2098 some other blocks
2099 return_block:
2100 return_stmt. */
2101 if (e->dest != bb->next_bb
2102 && e->dest != EXIT_BLOCK_PTR
2103 && single_succ_p (e->dest)
2104 && single_succ_edge (e->dest)->dest == EXIT_BLOCK_PTR
2105 && (last = last_stmt (e->dest)) != NULL
2106 && gimple_code (last) == GIMPLE_RETURN)
2108 edge e1;
2109 edge_iterator ei1;
2111 if (single_succ_p (bb))
2113 FOR_EACH_EDGE (e1, ei1, bb->preds)
2114 if (!predicted_by_p (e1->src, PRED_NULL_RETURN)
2115 && !predicted_by_p (e1->src, PRED_CONST_RETURN)
2116 && !predicted_by_p (e1->src, PRED_NEGATIVE_RETURN))
2117 predict_edge_def (e1, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
2119 else
2120 if (!predicted_by_p (e->src, PRED_NULL_RETURN)
2121 && !predicted_by_p (e->src, PRED_CONST_RETURN)
2122 && !predicted_by_p (e->src, PRED_NEGATIVE_RETURN))
2123 predict_edge_def (e, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
2126 /* Look for block we are guarding (ie we dominate it,
2127 but it doesn't postdominate us). */
2128 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
2129 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
2130 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
2132 gimple_stmt_iterator bi;
2134 /* The call heuristic claims that a guarded function call
2135 is improbable. This is because such calls are often used
2136 to signal exceptional situations such as printing error
2137 messages. */
2138 for (bi = gsi_start_bb (e->dest); !gsi_end_p (bi);
2139 gsi_next (&bi))
2141 gimple stmt = gsi_stmt (bi);
2142 if (is_gimple_call (stmt)
2143 /* Constant and pure calls are hardly used to signalize
2144 something exceptional. */
2145 && gimple_has_side_effects (stmt))
2147 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
2148 break;
2153 tree_predict_by_opcode (bb);
2156 /* Predict branch probabilities and estimate profile of the tree CFG.
2157 This function can be called from the loop optimizers to recompute
2158 the profile information. */
2160 void
2161 tree_estimate_probability (void)
2163 basic_block bb;
2165 add_noreturn_fake_exit_edges ();
2166 connect_infinite_loops_to_exit ();
2167 /* We use loop_niter_by_eval, which requires that the loops have
2168 preheaders. */
2169 create_preheaders (CP_SIMPLE_PREHEADERS);
2170 calculate_dominance_info (CDI_POST_DOMINATORS);
2172 bb_predictions = pointer_map_create ();
2173 tree_bb_level_predictions ();
2174 record_loop_exits ();
2176 if (number_of_loops () > 1)
2177 predict_loops ();
2179 FOR_EACH_BB (bb)
2180 tree_estimate_probability_bb (bb);
2182 FOR_EACH_BB (bb)
2183 combine_predictions_for_bb (bb);
2185 #ifdef ENABLE_CHECKING
2186 pointer_map_traverse (bb_predictions, assert_is_empty, NULL);
2187 #endif
2188 pointer_map_destroy (bb_predictions);
2189 bb_predictions = NULL;
2191 estimate_bb_frequencies ();
2192 free_dominance_info (CDI_POST_DOMINATORS);
2193 remove_fake_exit_edges ();
2196 /* Predict branch probabilities and estimate profile of the tree CFG.
2197 This is the driver function for PASS_PROFILE. */
2199 static unsigned int
2200 tree_estimate_probability_driver (void)
2202 unsigned nb_loops;
2204 loop_optimizer_init (LOOPS_NORMAL);
2205 if (dump_file && (dump_flags & TDF_DETAILS))
2206 flow_loops_dump (dump_file, NULL, 0);
2208 mark_irreducible_loops ();
2210 nb_loops = number_of_loops ();
2211 if (nb_loops > 1)
2212 scev_initialize ();
2214 tree_estimate_probability ();
2216 if (nb_loops > 1)
2217 scev_finalize ();
2219 loop_optimizer_finalize ();
2220 if (dump_file && (dump_flags & TDF_DETAILS))
2221 gimple_dump_cfg (dump_file, dump_flags);
2222 if (profile_status == PROFILE_ABSENT)
2223 profile_status = PROFILE_GUESSED;
2224 return 0;
2227 /* Predict edges to successors of CUR whose sources are not postdominated by
2228 BB by PRED and recurse to all postdominators. */
2230 static void
2231 predict_paths_for_bb (basic_block cur, basic_block bb,
2232 enum br_predictor pred,
2233 enum prediction taken,
2234 bitmap visited)
2236 edge e;
2237 edge_iterator ei;
2238 basic_block son;
2240 /* We are looking for all edges forming edge cut induced by
2241 set of all blocks postdominated by BB. */
2242 FOR_EACH_EDGE (e, ei, cur->preds)
2243 if (e->src->index >= NUM_FIXED_BLOCKS
2244 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, bb))
2246 edge e2;
2247 edge_iterator ei2;
2248 bool found = false;
2250 /* Ignore fake edges and eh, we predict them as not taken anyway. */
2251 if (e->flags & (EDGE_EH | EDGE_FAKE))
2252 continue;
2253 gcc_assert (bb == cur || dominated_by_p (CDI_POST_DOMINATORS, cur, bb));
2255 /* See if there is an edge from e->src that is not abnormal
2256 and does not lead to BB. */
2257 FOR_EACH_EDGE (e2, ei2, e->src->succs)
2258 if (e2 != e
2259 && !(e2->flags & (EDGE_EH | EDGE_FAKE))
2260 && !dominated_by_p (CDI_POST_DOMINATORS, e2->dest, bb))
2262 found = true;
2263 break;
2266 /* If there is non-abnormal path leaving e->src, predict edge
2267 using predictor. Otherwise we need to look for paths
2268 leading to e->src.
2270 The second may lead to infinite loop in the case we are predicitng
2271 regions that are only reachable by abnormal edges. We simply
2272 prevent visiting given BB twice. */
2273 if (found)
2274 predict_edge_def (e, pred, taken);
2275 else if (bitmap_set_bit (visited, e->src->index))
2276 predict_paths_for_bb (e->src, e->src, pred, taken, visited);
2278 for (son = first_dom_son (CDI_POST_DOMINATORS, cur);
2279 son;
2280 son = next_dom_son (CDI_POST_DOMINATORS, son))
2281 predict_paths_for_bb (son, bb, pred, taken, visited);
2284 /* Sets branch probabilities according to PREDiction and
2285 FLAGS. */
2287 static void
2288 predict_paths_leading_to (basic_block bb, enum br_predictor pred,
2289 enum prediction taken)
2291 bitmap visited = BITMAP_ALLOC (NULL);
2292 predict_paths_for_bb (bb, bb, pred, taken, visited);
2293 BITMAP_FREE (visited);
2296 /* Like predict_paths_leading_to but take edge instead of basic block. */
2298 static void
2299 predict_paths_leading_to_edge (edge e, enum br_predictor pred,
2300 enum prediction taken)
2302 bool has_nonloop_edge = false;
2303 edge_iterator ei;
2304 edge e2;
2306 basic_block bb = e->src;
2307 FOR_EACH_EDGE (e2, ei, bb->succs)
2308 if (e2->dest != e->src && e2->dest != e->dest
2309 && !(e->flags & (EDGE_EH | EDGE_FAKE))
2310 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e2->dest))
2312 has_nonloop_edge = true;
2313 break;
2315 if (!has_nonloop_edge)
2317 bitmap visited = BITMAP_ALLOC (NULL);
2318 predict_paths_for_bb (bb, bb, pred, taken, visited);
2319 BITMAP_FREE (visited);
2321 else
2322 predict_edge_def (e, pred, taken);
2325 /* This is used to carry information about basic blocks. It is
2326 attached to the AUX field of the standard CFG block. */
2328 typedef struct block_info_def
2330 /* Estimated frequency of execution of basic_block. */
2331 sreal frequency;
2333 /* To keep queue of basic blocks to process. */
2334 basic_block next;
2336 /* Number of predecessors we need to visit first. */
2337 int npredecessors;
2338 } *block_info;
2340 /* Similar information for edges. */
2341 typedef struct edge_info_def
2343 /* In case edge is a loopback edge, the probability edge will be reached
2344 in case header is. Estimated number of iterations of the loop can be
2345 then computed as 1 / (1 - back_edge_prob). */
2346 sreal back_edge_prob;
2347 /* True if the edge is a loopback edge in the natural loop. */
2348 unsigned int back_edge:1;
2349 } *edge_info;
2351 #define BLOCK_INFO(B) ((block_info) (B)->aux)
2352 #define EDGE_INFO(E) ((edge_info) (E)->aux)
2354 /* Helper function for estimate_bb_frequencies.
2355 Propagate the frequencies in blocks marked in
2356 TOVISIT, starting in HEAD. */
2358 static void
2359 propagate_freq (basic_block head, bitmap tovisit)
2361 basic_block bb;
2362 basic_block last;
2363 unsigned i;
2364 edge e;
2365 basic_block nextbb;
2366 bitmap_iterator bi;
2368 /* For each basic block we need to visit count number of his predecessors
2369 we need to visit first. */
2370 EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi)
2372 edge_iterator ei;
2373 int count = 0;
2375 bb = BASIC_BLOCK (i);
2377 FOR_EACH_EDGE (e, ei, bb->preds)
2379 bool visit = bitmap_bit_p (tovisit, e->src->index);
2381 if (visit && !(e->flags & EDGE_DFS_BACK))
2382 count++;
2383 else if (visit && dump_file && !EDGE_INFO (e)->back_edge)
2384 fprintf (dump_file,
2385 "Irreducible region hit, ignoring edge to %i->%i\n",
2386 e->src->index, bb->index);
2388 BLOCK_INFO (bb)->npredecessors = count;
2389 /* When function never returns, we will never process exit block. */
2390 if (!count && bb == EXIT_BLOCK_PTR)
2391 bb->count = bb->frequency = 0;
2394 memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
2395 last = head;
2396 for (bb = head; bb; bb = nextbb)
2398 edge_iterator ei;
2399 sreal cyclic_probability, frequency;
2401 memcpy (&cyclic_probability, &real_zero, sizeof (real_zero));
2402 memcpy (&frequency, &real_zero, sizeof (real_zero));
2404 nextbb = BLOCK_INFO (bb)->next;
2405 BLOCK_INFO (bb)->next = NULL;
2407 /* Compute frequency of basic block. */
2408 if (bb != head)
2410 #ifdef ENABLE_CHECKING
2411 FOR_EACH_EDGE (e, ei, bb->preds)
2412 gcc_assert (!bitmap_bit_p (tovisit, e->src->index)
2413 || (e->flags & EDGE_DFS_BACK));
2414 #endif
2416 FOR_EACH_EDGE (e, ei, bb->preds)
2417 if (EDGE_INFO (e)->back_edge)
2419 sreal_add (&cyclic_probability, &cyclic_probability,
2420 &EDGE_INFO (e)->back_edge_prob);
2422 else if (!(e->flags & EDGE_DFS_BACK))
2424 sreal tmp;
2426 /* frequency += (e->probability
2427 * BLOCK_INFO (e->src)->frequency /
2428 REG_BR_PROB_BASE); */
2430 sreal_init (&tmp, e->probability, 0);
2431 sreal_mul (&tmp, &tmp, &BLOCK_INFO (e->src)->frequency);
2432 sreal_mul (&tmp, &tmp, &real_inv_br_prob_base);
2433 sreal_add (&frequency, &frequency, &tmp);
2436 if (sreal_compare (&cyclic_probability, &real_zero) == 0)
2438 memcpy (&BLOCK_INFO (bb)->frequency, &frequency,
2439 sizeof (frequency));
2441 else
2443 if (sreal_compare (&cyclic_probability, &real_almost_one) > 0)
2445 memcpy (&cyclic_probability, &real_almost_one,
2446 sizeof (real_almost_one));
2449 /* BLOCK_INFO (bb)->frequency = frequency
2450 / (1 - cyclic_probability) */
2452 sreal_sub (&cyclic_probability, &real_one, &cyclic_probability);
2453 sreal_div (&BLOCK_INFO (bb)->frequency,
2454 &frequency, &cyclic_probability);
2458 bitmap_clear_bit (tovisit, bb->index);
2460 e = find_edge (bb, head);
2461 if (e)
2463 sreal tmp;
2465 /* EDGE_INFO (e)->back_edge_prob
2466 = ((e->probability * BLOCK_INFO (bb)->frequency)
2467 / REG_BR_PROB_BASE); */
2469 sreal_init (&tmp, e->probability, 0);
2470 sreal_mul (&tmp, &tmp, &BLOCK_INFO (bb)->frequency);
2471 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
2472 &tmp, &real_inv_br_prob_base);
2475 /* Propagate to successor blocks. */
2476 FOR_EACH_EDGE (e, ei, bb->succs)
2477 if (!(e->flags & EDGE_DFS_BACK)
2478 && BLOCK_INFO (e->dest)->npredecessors)
2480 BLOCK_INFO (e->dest)->npredecessors--;
2481 if (!BLOCK_INFO (e->dest)->npredecessors)
2483 if (!nextbb)
2484 nextbb = e->dest;
2485 else
2486 BLOCK_INFO (last)->next = e->dest;
2488 last = e->dest;
2494 /* Estimate probabilities of loopback edges in loops at same nest level. */
2496 static void
2497 estimate_loops_at_level (struct loop *first_loop)
2499 struct loop *loop;
2501 for (loop = first_loop; loop; loop = loop->next)
2503 edge e;
2504 basic_block *bbs;
2505 unsigned i;
2506 bitmap tovisit = BITMAP_ALLOC (NULL);
2508 estimate_loops_at_level (loop->inner);
2510 /* Find current loop back edge and mark it. */
2511 e = loop_latch_edge (loop);
2512 EDGE_INFO (e)->back_edge = 1;
2514 bbs = get_loop_body (loop);
2515 for (i = 0; i < loop->num_nodes; i++)
2516 bitmap_set_bit (tovisit, bbs[i]->index);
2517 free (bbs);
2518 propagate_freq (loop->header, tovisit);
2519 BITMAP_FREE (tovisit);
2523 /* Propagates frequencies through structure of loops. */
2525 static void
2526 estimate_loops (void)
2528 bitmap tovisit = BITMAP_ALLOC (NULL);
2529 basic_block bb;
2531 /* Start by estimating the frequencies in the loops. */
2532 if (number_of_loops () > 1)
2533 estimate_loops_at_level (current_loops->tree_root->inner);
2535 /* Now propagate the frequencies through all the blocks. */
2536 FOR_ALL_BB (bb)
2538 bitmap_set_bit (tovisit, bb->index);
2540 propagate_freq (ENTRY_BLOCK_PTR, tovisit);
2541 BITMAP_FREE (tovisit);
2544 /* Convert counts measured by profile driven feedback to frequencies.
2545 Return nonzero iff there was any nonzero execution count. */
2548 counts_to_freqs (void)
2550 gcov_type count_max, true_count_max = 0;
2551 basic_block bb;
2553 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2554 true_count_max = MAX (bb->count, true_count_max);
2556 count_max = MAX (true_count_max, 1);
2557 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2558 bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
2560 return true_count_max;
2563 /* Return true if function is likely to be expensive, so there is no point to
2564 optimize performance of prologue, epilogue or do inlining at the expense
2565 of code size growth. THRESHOLD is the limit of number of instructions
2566 function can execute at average to be still considered not expensive. */
2568 bool
2569 expensive_function_p (int threshold)
2571 unsigned int sum = 0;
2572 basic_block bb;
2573 unsigned int limit;
2575 /* We can not compute accurately for large thresholds due to scaled
2576 frequencies. */
2577 gcc_assert (threshold <= BB_FREQ_MAX);
2579 /* Frequencies are out of range. This either means that function contains
2580 internal loop executing more than BB_FREQ_MAX times or profile feedback
2581 is available and function has not been executed at all. */
2582 if (ENTRY_BLOCK_PTR->frequency == 0)
2583 return true;
2585 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
2586 limit = ENTRY_BLOCK_PTR->frequency * threshold;
2587 FOR_EACH_BB (bb)
2589 rtx insn;
2591 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
2592 insn = NEXT_INSN (insn))
2593 if (active_insn_p (insn))
2595 sum += bb->frequency;
2596 if (sum > limit)
2597 return true;
2601 return false;
2604 /* Estimate basic blocks frequency by given branch probabilities. */
2606 void
2607 estimate_bb_frequencies (void)
2609 basic_block bb;
2610 sreal freq_max;
2612 if (profile_status != PROFILE_READ || !counts_to_freqs ())
2614 static int real_values_initialized = 0;
2616 if (!real_values_initialized)
2618 real_values_initialized = 1;
2619 sreal_init (&real_zero, 0, 0);
2620 sreal_init (&real_one, 1, 0);
2621 sreal_init (&real_br_prob_base, REG_BR_PROB_BASE, 0);
2622 sreal_init (&real_bb_freq_max, BB_FREQ_MAX, 0);
2623 sreal_init (&real_one_half, 1, -1);
2624 sreal_div (&real_inv_br_prob_base, &real_one, &real_br_prob_base);
2625 sreal_sub (&real_almost_one, &real_one, &real_inv_br_prob_base);
2628 mark_dfs_back_edges ();
2630 single_succ_edge (ENTRY_BLOCK_PTR)->probability = REG_BR_PROB_BASE;
2632 /* Set up block info for each basic block. */
2633 alloc_aux_for_blocks (sizeof (struct block_info_def));
2634 alloc_aux_for_edges (sizeof (struct edge_info_def));
2635 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2637 edge e;
2638 edge_iterator ei;
2640 FOR_EACH_EDGE (e, ei, bb->succs)
2642 sreal_init (&EDGE_INFO (e)->back_edge_prob, e->probability, 0);
2643 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
2644 &EDGE_INFO (e)->back_edge_prob,
2645 &real_inv_br_prob_base);
2649 /* First compute probabilities locally for each loop from innermost
2650 to outermost to examine probabilities for back edges. */
2651 estimate_loops ();
2653 memcpy (&freq_max, &real_zero, sizeof (real_zero));
2654 FOR_EACH_BB (bb)
2655 if (sreal_compare (&freq_max, &BLOCK_INFO (bb)->frequency) < 0)
2656 memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
2658 sreal_div (&freq_max, &real_bb_freq_max, &freq_max);
2659 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2661 sreal tmp;
2663 sreal_mul (&tmp, &BLOCK_INFO (bb)->frequency, &freq_max);
2664 sreal_add (&tmp, &tmp, &real_one_half);
2665 bb->frequency = sreal_to_int (&tmp);
2668 free_aux_for_blocks ();
2669 free_aux_for_edges ();
2671 compute_function_frequency ();
2674 /* Decide whether function is hot, cold or unlikely executed. */
2675 void
2676 compute_function_frequency (void)
2678 basic_block bb;
2679 struct cgraph_node *node = cgraph_get_node (current_function_decl);
2680 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
2681 || MAIN_NAME_P (DECL_NAME (current_function_decl)))
2682 node->only_called_at_startup = true;
2683 if (DECL_STATIC_DESTRUCTOR (current_function_decl))
2684 node->only_called_at_exit = true;
2686 if (!profile_info || !flag_branch_probabilities)
2688 int flags = flags_from_decl_or_type (current_function_decl);
2689 if (lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl))
2690 != NULL)
2691 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
2692 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (current_function_decl))
2693 != NULL)
2694 node->frequency = NODE_FREQUENCY_HOT;
2695 else if (flags & ECF_NORETURN)
2696 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2697 else if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
2698 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2699 else if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
2700 || DECL_STATIC_DESTRUCTOR (current_function_decl))
2701 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2702 return;
2704 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
2705 FOR_EACH_BB (bb)
2707 if (maybe_hot_bb_p (cfun, bb))
2709 node->frequency = NODE_FREQUENCY_HOT;
2710 return;
2712 if (!probably_never_executed_bb_p (cfun, bb))
2713 node->frequency = NODE_FREQUENCY_NORMAL;
2717 static bool
2718 gate_estimate_probability (void)
2720 return flag_guess_branch_prob;
2723 /* Build PREDICT_EXPR. */
2724 tree
2725 build_predict_expr (enum br_predictor predictor, enum prediction taken)
2727 tree t = build1 (PREDICT_EXPR, void_type_node,
2728 build_int_cst (integer_type_node, predictor));
2729 SET_PREDICT_EXPR_OUTCOME (t, taken);
2730 return t;
2733 const char *
2734 predictor_name (enum br_predictor predictor)
2736 return predictor_info[predictor].name;
2739 struct gimple_opt_pass pass_profile =
2742 GIMPLE_PASS,
2743 "profile_estimate", /* name */
2744 gate_estimate_probability, /* gate */
2745 tree_estimate_probability_driver, /* execute */
2746 NULL, /* sub */
2747 NULL, /* next */
2748 0, /* static_pass_number */
2749 TV_BRANCH_PROB, /* tv_id */
2750 PROP_cfg, /* properties_required */
2751 0, /* properties_provided */
2752 0, /* properties_destroyed */
2753 0, /* todo_flags_start */
2754 TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
2758 struct gimple_opt_pass pass_strip_predict_hints =
2761 GIMPLE_PASS,
2762 "*strip_predict_hints", /* name */
2763 NULL, /* gate */
2764 strip_predict_hints, /* execute */
2765 NULL, /* sub */
2766 NULL, /* next */
2767 0, /* static_pass_number */
2768 TV_BRANCH_PROB, /* tv_id */
2769 PROP_cfg, /* properties_required */
2770 0, /* properties_provided */
2771 0, /* properties_destroyed */
2772 0, /* todo_flags_start */
2773 TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
2777 /* Rebuild function frequencies. Passes are in general expected to
2778 maintain profile by hand, however in some cases this is not possible:
2779 for example when inlining several functions with loops freuqencies might run
2780 out of scale and thus needs to be recomputed. */
2782 void
2783 rebuild_frequencies (void)
2785 timevar_push (TV_REBUILD_FREQUENCIES);
2786 if (profile_status == PROFILE_GUESSED)
2788 loop_optimizer_init (0);
2789 add_noreturn_fake_exit_edges ();
2790 mark_irreducible_loops ();
2791 connect_infinite_loops_to_exit ();
2792 estimate_bb_frequencies ();
2793 remove_fake_exit_edges ();
2794 loop_optimizer_finalize ();
2796 else if (profile_status == PROFILE_READ)
2797 counts_to_freqs ();
2798 else
2799 gcc_unreachable ();
2800 timevar_pop (TV_REBUILD_FREQUENCIES);