Bump date stamp to 20121110
[official-gcc.git] / gcc / predict.c
blob8b9d62b286b8c1183d4ea33455dbd5cb23eba01e
1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* References:
23 [1] "Branch Prediction for Free"
24 Ball and Larus; PLDI '93.
25 [2] "Static Branch Frequency and Program Profile Analysis"
26 Wu and Larus; MICRO-27.
27 [3] "Corpus-based Static Branch Prediction"
28 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
31 #include "config.h"
32 #include "system.h"
33 #include "coretypes.h"
34 #include "tm.h"
35 #include "tree.h"
36 #include "rtl.h"
37 #include "tm_p.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "insn-config.h"
41 #include "regs.h"
42 #include "flags.h"
43 #include "function.h"
44 #include "except.h"
45 #include "diagnostic-core.h"
46 #include "recog.h"
47 #include "expr.h"
48 #include "predict.h"
49 #include "coverage.h"
50 #include "sreal.h"
51 #include "params.h"
52 #include "target.h"
53 #include "cfgloop.h"
54 #include "tree-flow.h"
55 #include "ggc.h"
56 #include "tree-pass.h"
57 #include "tree-scalar-evolution.h"
58 #include "cfgloop.h"
59 #include "pointer-set.h"
61 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
62 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
63 static sreal real_zero, real_one, real_almost_one, real_br_prob_base,
64 real_inv_br_prob_base, real_one_half, real_bb_freq_max;
66 /* Random guesstimation given names.
67 PROV_VERY_UNLIKELY should be small enough so basic block predicted
68 by it gets bellow HOT_BB_FREQUENCY_FRANCTION. */
69 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 2000 - 1)
70 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
71 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
72 #define PROB_ALWAYS (REG_BR_PROB_BASE)
74 static void combine_predictions_for_insn (rtx, basic_block);
75 static void dump_prediction (FILE *, enum br_predictor, int, basic_block, int);
76 static void predict_paths_leading_to (basic_block, enum br_predictor, enum prediction);
77 static void predict_paths_leading_to_edge (edge, enum br_predictor, enum prediction);
78 static bool can_predict_insn_p (const_rtx);
80 /* Information we hold about each branch predictor.
81 Filled using information from predict.def. */
83 struct predictor_info
85 const char *const name; /* Name used in the debugging dumps. */
86 const int hitrate; /* Expected hitrate used by
87 predict_insn_def call. */
88 const int flags;
91 /* Use given predictor without Dempster-Shaffer theory if it matches
92 using first_match heuristics. */
93 #define PRED_FLAG_FIRST_MATCH 1
95 /* Recompute hitrate in percent to our representation. */
97 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
99 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
100 static const struct predictor_info predictor_info[]= {
101 #include "predict.def"
103 /* Upper bound on predictors. */
104 {NULL, 0, 0}
106 #undef DEF_PREDICTOR
108 /* Return TRUE if frequency FREQ is considered to be hot. */
110 static inline bool
111 maybe_hot_frequency_p (struct function *fun, int freq)
113 struct cgraph_node *node = cgraph_get_node (fun->decl);
114 if (!profile_info || !flag_branch_probabilities)
116 if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
117 return false;
118 if (node->frequency == NODE_FREQUENCY_HOT)
119 return true;
121 if (profile_status_for_function (fun) == PROFILE_ABSENT)
122 return true;
123 if (node->frequency == NODE_FREQUENCY_EXECUTED_ONCE
124 && freq < (ENTRY_BLOCK_PTR_FOR_FUNCTION (fun)->frequency * 2 / 3))
125 return false;
126 if (freq < (ENTRY_BLOCK_PTR_FOR_FUNCTION (fun)->frequency
127 / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION)))
128 return false;
129 return true;
132 /* Return TRUE if frequency FREQ is considered to be hot. */
134 static inline bool
135 maybe_hot_count_p (struct function *fun, gcov_type count)
137 if (profile_status_for_function (fun) != PROFILE_READ)
138 return true;
139 /* Code executed at most once is not hot. */
140 if (profile_info->runs >= count)
141 return false;
142 return (count
143 > profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION));
146 /* Return true in case BB can be CPU intensive and should be optimized
147 for maximal performance. */
149 bool
150 maybe_hot_bb_p (struct function *fun, const_basic_block bb)
152 gcc_checking_assert (fun);
153 if (profile_status_for_function (fun) == PROFILE_READ)
154 return maybe_hot_count_p (fun, bb->count);
155 return maybe_hot_frequency_p (fun, bb->frequency);
158 /* Return true if the call can be hot. */
160 bool
161 cgraph_maybe_hot_edge_p (struct cgraph_edge *edge)
163 if (profile_info && flag_branch_probabilities
164 && (edge->count
165 <= profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
166 return false;
167 if (edge->caller->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED
168 || (edge->callee
169 && edge->callee->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED))
170 return false;
171 if (edge->caller->frequency > NODE_FREQUENCY_UNLIKELY_EXECUTED
172 && (edge->callee
173 && edge->callee->frequency <= NODE_FREQUENCY_EXECUTED_ONCE))
174 return false;
175 if (optimize_size)
176 return false;
177 if (edge->caller->frequency == NODE_FREQUENCY_HOT)
178 return true;
179 if (edge->caller->frequency == NODE_FREQUENCY_EXECUTED_ONCE
180 && edge->frequency < CGRAPH_FREQ_BASE * 3 / 2)
181 return false;
182 if (flag_guess_branch_prob
183 && edge->frequency <= (CGRAPH_FREQ_BASE
184 / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION)))
185 return false;
186 return true;
189 /* Return true in case BB can be CPU intensive and should be optimized
190 for maximal performance. */
192 bool
193 maybe_hot_edge_p (edge e)
195 if (profile_status == PROFILE_READ)
196 return maybe_hot_count_p (cfun, e->count);
197 return maybe_hot_frequency_p (cfun, EDGE_FREQUENCY (e));
201 /* Return true in case BB is probably never executed. */
203 bool
204 probably_never_executed_bb_p (struct function *fun, const_basic_block bb)
206 gcc_checking_assert (fun);
207 if (profile_info && flag_branch_probabilities)
208 return ((bb->count + profile_info->runs / 2) / profile_info->runs) == 0;
209 if ((!profile_info || !flag_branch_probabilities)
210 && (cgraph_get_node (fun->decl)->frequency
211 == NODE_FREQUENCY_UNLIKELY_EXECUTED))
212 return true;
213 return false;
216 /* Return true if NODE should be optimized for size. */
218 bool
219 cgraph_optimize_for_size_p (struct cgraph_node *node)
221 if (optimize_size)
222 return true;
223 if (node && (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED))
224 return true;
225 else
226 return false;
229 /* Return true when current function should always be optimized for size. */
231 bool
232 optimize_function_for_size_p (struct function *fun)
234 if (optimize_size)
235 return true;
236 if (!fun || !fun->decl)
237 return false;
238 return cgraph_optimize_for_size_p (cgraph_get_node (fun->decl));
241 /* Return true when current function should always be optimized for speed. */
243 bool
244 optimize_function_for_speed_p (struct function *fun)
246 return !optimize_function_for_size_p (fun);
249 /* Return TRUE when BB should be optimized for size. */
251 bool
252 optimize_bb_for_size_p (const_basic_block bb)
254 return optimize_function_for_size_p (cfun) || !maybe_hot_bb_p (cfun, bb);
257 /* Return TRUE when BB should be optimized for speed. */
259 bool
260 optimize_bb_for_speed_p (const_basic_block bb)
262 return !optimize_bb_for_size_p (bb);
265 /* Return TRUE when BB should be optimized for size. */
267 bool
268 optimize_edge_for_size_p (edge e)
270 return optimize_function_for_size_p (cfun) || !maybe_hot_edge_p (e);
273 /* Return TRUE when BB should be optimized for speed. */
275 bool
276 optimize_edge_for_speed_p (edge e)
278 return !optimize_edge_for_size_p (e);
281 /* Return TRUE when BB should be optimized for size. */
283 bool
284 optimize_insn_for_size_p (void)
286 return optimize_function_for_size_p (cfun) || !crtl->maybe_hot_insn_p;
289 /* Return TRUE when BB should be optimized for speed. */
291 bool
292 optimize_insn_for_speed_p (void)
294 return !optimize_insn_for_size_p ();
297 /* Return TRUE when LOOP should be optimized for size. */
299 bool
300 optimize_loop_for_size_p (struct loop *loop)
302 return optimize_bb_for_size_p (loop->header);
305 /* Return TRUE when LOOP should be optimized for speed. */
307 bool
308 optimize_loop_for_speed_p (struct loop *loop)
310 return optimize_bb_for_speed_p (loop->header);
313 /* Return TRUE when LOOP nest should be optimized for speed. */
315 bool
316 optimize_loop_nest_for_speed_p (struct loop *loop)
318 struct loop *l = loop;
319 if (optimize_loop_for_speed_p (loop))
320 return true;
321 l = loop->inner;
322 while (l && l != loop)
324 if (optimize_loop_for_speed_p (l))
325 return true;
326 if (l->inner)
327 l = l->inner;
328 else if (l->next)
329 l = l->next;
330 else
332 while (l != loop && !l->next)
333 l = loop_outer (l);
334 if (l != loop)
335 l = l->next;
338 return false;
341 /* Return TRUE when LOOP nest should be optimized for size. */
343 bool
344 optimize_loop_nest_for_size_p (struct loop *loop)
346 return !optimize_loop_nest_for_speed_p (loop);
349 /* Return true when edge E is likely to be well predictable by branch
350 predictor. */
352 bool
353 predictable_edge_p (edge e)
355 if (profile_status == PROFILE_ABSENT)
356 return false;
357 if ((e->probability
358 <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME) * REG_BR_PROB_BASE / 100)
359 || (REG_BR_PROB_BASE - e->probability
360 <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME) * REG_BR_PROB_BASE / 100))
361 return true;
362 return false;
366 /* Set RTL expansion for BB profile. */
368 void
369 rtl_profile_for_bb (basic_block bb)
371 crtl->maybe_hot_insn_p = maybe_hot_bb_p (cfun, bb);
374 /* Set RTL expansion for edge profile. */
376 void
377 rtl_profile_for_edge (edge e)
379 crtl->maybe_hot_insn_p = maybe_hot_edge_p (e);
382 /* Set RTL expansion to default mode (i.e. when profile info is not known). */
383 void
384 default_rtl_profile (void)
386 crtl->maybe_hot_insn_p = true;
389 /* Return true if the one of outgoing edges is already predicted by
390 PREDICTOR. */
392 bool
393 rtl_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
395 rtx note;
396 if (!INSN_P (BB_END (bb)))
397 return false;
398 for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
399 if (REG_NOTE_KIND (note) == REG_BR_PRED
400 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
401 return true;
402 return false;
405 /* This map contains for a basic block the list of predictions for the
406 outgoing edges. */
408 static struct pointer_map_t *bb_predictions;
410 /* Structure representing predictions in tree level. */
412 struct edge_prediction {
413 struct edge_prediction *ep_next;
414 edge ep_edge;
415 enum br_predictor ep_predictor;
416 int ep_probability;
419 /* Return true if the one of outgoing edges is already predicted by
420 PREDICTOR. */
422 bool
423 gimple_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
425 struct edge_prediction *i;
426 void **preds = pointer_map_contains (bb_predictions, bb);
428 if (!preds)
429 return false;
431 for (i = (struct edge_prediction *) *preds; i; i = i->ep_next)
432 if (i->ep_predictor == predictor)
433 return true;
434 return false;
437 /* Return true when the probability of edge is reliable.
439 The profile guessing code is good at predicting branch outcome (ie.
440 taken/not taken), that is predicted right slightly over 75% of time.
441 It is however notoriously poor on predicting the probability itself.
442 In general the profile appear a lot flatter (with probabilities closer
443 to 50%) than the reality so it is bad idea to use it to drive optimization
444 such as those disabling dynamic branch prediction for well predictable
445 branches.
447 There are two exceptions - edges leading to noreturn edges and edges
448 predicted by number of iterations heuristics are predicted well. This macro
449 should be able to distinguish those, but at the moment it simply check for
450 noreturn heuristic that is only one giving probability over 99% or bellow
451 1%. In future we might want to propagate reliability information across the
452 CFG if we find this information useful on multiple places. */
453 static bool
454 probability_reliable_p (int prob)
456 return (profile_status == PROFILE_READ
457 || (profile_status == PROFILE_GUESSED
458 && (prob <= HITRATE (1) || prob >= HITRATE (99))));
461 /* Same predicate as above, working on edges. */
462 bool
463 edge_probability_reliable_p (const_edge e)
465 return probability_reliable_p (e->probability);
468 /* Same predicate as edge_probability_reliable_p, working on notes. */
469 bool
470 br_prob_note_reliable_p (const_rtx note)
472 gcc_assert (REG_NOTE_KIND (note) == REG_BR_PROB);
473 return probability_reliable_p (INTVAL (XEXP (note, 0)));
476 static void
477 predict_insn (rtx insn, enum br_predictor predictor, int probability)
479 gcc_assert (any_condjump_p (insn));
480 if (!flag_guess_branch_prob)
481 return;
483 add_reg_note (insn, REG_BR_PRED,
484 gen_rtx_CONCAT (VOIDmode,
485 GEN_INT ((int) predictor),
486 GEN_INT ((int) probability)));
489 /* Predict insn by given predictor. */
491 void
492 predict_insn_def (rtx insn, enum br_predictor predictor,
493 enum prediction taken)
495 int probability = predictor_info[(int) predictor].hitrate;
497 if (taken != TAKEN)
498 probability = REG_BR_PROB_BASE - probability;
500 predict_insn (insn, predictor, probability);
503 /* Predict edge E with given probability if possible. */
505 void
506 rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
508 rtx last_insn;
509 last_insn = BB_END (e->src);
511 /* We can store the branch prediction information only about
512 conditional jumps. */
513 if (!any_condjump_p (last_insn))
514 return;
516 /* We always store probability of branching. */
517 if (e->flags & EDGE_FALLTHRU)
518 probability = REG_BR_PROB_BASE - probability;
520 predict_insn (last_insn, predictor, probability);
523 /* Predict edge E with the given PROBABILITY. */
524 void
525 gimple_predict_edge (edge e, enum br_predictor predictor, int probability)
527 gcc_assert (profile_status != PROFILE_GUESSED);
528 if ((e->src != ENTRY_BLOCK_PTR && EDGE_COUNT (e->src->succs) > 1)
529 && flag_guess_branch_prob && optimize)
531 struct edge_prediction *i = XNEW (struct edge_prediction);
532 void **preds = pointer_map_insert (bb_predictions, e->src);
534 i->ep_next = (struct edge_prediction *) *preds;
535 *preds = i;
536 i->ep_probability = probability;
537 i->ep_predictor = predictor;
538 i->ep_edge = e;
542 /* Remove all predictions on given basic block that are attached
543 to edge E. */
544 void
545 remove_predictions_associated_with_edge (edge e)
547 void **preds;
549 if (!bb_predictions)
550 return;
552 preds = pointer_map_contains (bb_predictions, e->src);
554 if (preds)
556 struct edge_prediction **prediction = (struct edge_prediction **) preds;
557 struct edge_prediction *next;
559 while (*prediction)
561 if ((*prediction)->ep_edge == e)
563 next = (*prediction)->ep_next;
564 free (*prediction);
565 *prediction = next;
567 else
568 prediction = &((*prediction)->ep_next);
573 /* Clears the list of predictions stored for BB. */
575 static void
576 clear_bb_predictions (basic_block bb)
578 void **preds = pointer_map_contains (bb_predictions, bb);
579 struct edge_prediction *pred, *next;
581 if (!preds)
582 return;
584 for (pred = (struct edge_prediction *) *preds; pred; pred = next)
586 next = pred->ep_next;
587 free (pred);
589 *preds = NULL;
592 /* Return true when we can store prediction on insn INSN.
593 At the moment we represent predictions only on conditional
594 jumps, not at computed jump or other complicated cases. */
595 static bool
596 can_predict_insn_p (const_rtx insn)
598 return (JUMP_P (insn)
599 && any_condjump_p (insn)
600 && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs) >= 2);
603 /* Predict edge E by given predictor if possible. */
605 void
606 predict_edge_def (edge e, enum br_predictor predictor,
607 enum prediction taken)
609 int probability = predictor_info[(int) predictor].hitrate;
611 if (taken != TAKEN)
612 probability = REG_BR_PROB_BASE - probability;
614 predict_edge (e, predictor, probability);
617 /* Invert all branch predictions or probability notes in the INSN. This needs
618 to be done each time we invert the condition used by the jump. */
620 void
621 invert_br_probabilities (rtx insn)
623 rtx note;
625 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
626 if (REG_NOTE_KIND (note) == REG_BR_PROB)
627 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
628 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
629 XEXP (XEXP (note, 0), 1)
630 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
633 /* Dump information about the branch prediction to the output file. */
635 static void
636 dump_prediction (FILE *file, enum br_predictor predictor, int probability,
637 basic_block bb, int used)
639 edge e;
640 edge_iterator ei;
642 if (!file)
643 return;
645 FOR_EACH_EDGE (e, ei, bb->succs)
646 if (! (e->flags & EDGE_FALLTHRU))
647 break;
649 fprintf (file, " %s heuristics%s: %.1f%%",
650 predictor_info[predictor].name,
651 used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
653 if (bb->count)
655 fprintf (file, " exec ");
656 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
657 if (e)
659 fprintf (file, " hit ");
660 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, e->count);
661 fprintf (file, " (%.1f%%)", e->count * 100.0 / bb->count);
665 fprintf (file, "\n");
668 /* We can not predict the probabilities of outgoing edges of bb. Set them
669 evenly and hope for the best. */
670 static void
671 set_even_probabilities (basic_block bb)
673 int nedges = 0;
674 edge e;
675 edge_iterator ei;
677 FOR_EACH_EDGE (e, ei, bb->succs)
678 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
679 nedges ++;
680 FOR_EACH_EDGE (e, ei, bb->succs)
681 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
682 e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
683 else
684 e->probability = 0;
687 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
688 note if not already present. Remove now useless REG_BR_PRED notes. */
690 static void
691 combine_predictions_for_insn (rtx insn, basic_block bb)
693 rtx prob_note;
694 rtx *pnote;
695 rtx note;
696 int best_probability = PROB_EVEN;
697 enum br_predictor best_predictor = END_PREDICTORS;
698 int combined_probability = REG_BR_PROB_BASE / 2;
699 int d;
700 bool first_match = false;
701 bool found = false;
703 if (!can_predict_insn_p (insn))
705 set_even_probabilities (bb);
706 return;
709 prob_note = find_reg_note (insn, REG_BR_PROB, 0);
710 pnote = &REG_NOTES (insn);
711 if (dump_file)
712 fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
713 bb->index);
715 /* We implement "first match" heuristics and use probability guessed
716 by predictor with smallest index. */
717 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
718 if (REG_NOTE_KIND (note) == REG_BR_PRED)
720 enum br_predictor predictor = ((enum br_predictor)
721 INTVAL (XEXP (XEXP (note, 0), 0)));
722 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
724 found = true;
725 if (best_predictor > predictor)
726 best_probability = probability, best_predictor = predictor;
728 d = (combined_probability * probability
729 + (REG_BR_PROB_BASE - combined_probability)
730 * (REG_BR_PROB_BASE - probability));
732 /* Use FP math to avoid overflows of 32bit integers. */
733 if (d == 0)
734 /* If one probability is 0% and one 100%, avoid division by zero. */
735 combined_probability = REG_BR_PROB_BASE / 2;
736 else
737 combined_probability = (((double) combined_probability) * probability
738 * REG_BR_PROB_BASE / d + 0.5);
741 /* Decide which heuristic to use. In case we didn't match anything,
742 use no_prediction heuristic, in case we did match, use either
743 first match or Dempster-Shaffer theory depending on the flags. */
745 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
746 first_match = true;
748 if (!found)
749 dump_prediction (dump_file, PRED_NO_PREDICTION,
750 combined_probability, bb, true);
751 else
753 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability,
754 bb, !first_match);
755 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability,
756 bb, first_match);
759 if (first_match)
760 combined_probability = best_probability;
761 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
763 while (*pnote)
765 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
767 enum br_predictor predictor = ((enum br_predictor)
768 INTVAL (XEXP (XEXP (*pnote, 0), 0)));
769 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
771 dump_prediction (dump_file, predictor, probability, bb,
772 !first_match || best_predictor == predictor);
773 *pnote = XEXP (*pnote, 1);
775 else
776 pnote = &XEXP (*pnote, 1);
779 if (!prob_note)
781 add_reg_note (insn, REG_BR_PROB, GEN_INT (combined_probability));
783 /* Save the prediction into CFG in case we are seeing non-degenerated
784 conditional jump. */
785 if (!single_succ_p (bb))
787 BRANCH_EDGE (bb)->probability = combined_probability;
788 FALLTHRU_EDGE (bb)->probability
789 = REG_BR_PROB_BASE - combined_probability;
792 else if (!single_succ_p (bb))
794 int prob = INTVAL (XEXP (prob_note, 0));
796 BRANCH_EDGE (bb)->probability = prob;
797 FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
799 else
800 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
803 /* Combine predictions into single probability and store them into CFG.
804 Remove now useless prediction entries. */
806 static void
807 combine_predictions_for_bb (basic_block bb)
809 int best_probability = PROB_EVEN;
810 enum br_predictor best_predictor = END_PREDICTORS;
811 int combined_probability = REG_BR_PROB_BASE / 2;
812 int d;
813 bool first_match = false;
814 bool found = false;
815 struct edge_prediction *pred;
816 int nedges = 0;
817 edge e, first = NULL, second = NULL;
818 edge_iterator ei;
819 void **preds;
821 FOR_EACH_EDGE (e, ei, bb->succs)
822 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
824 nedges ++;
825 if (first && !second)
826 second = e;
827 if (!first)
828 first = e;
831 /* When there is no successor or only one choice, prediction is easy.
833 We are lazy for now and predict only basic blocks with two outgoing
834 edges. It is possible to predict generic case too, but we have to
835 ignore first match heuristics and do more involved combining. Implement
836 this later. */
837 if (nedges != 2)
839 if (!bb->count)
840 set_even_probabilities (bb);
841 clear_bb_predictions (bb);
842 if (dump_file)
843 fprintf (dump_file, "%i edges in bb %i predicted to even probabilities\n",
844 nedges, bb->index);
845 return;
848 if (dump_file)
849 fprintf (dump_file, "Predictions for bb %i\n", bb->index);
851 preds = pointer_map_contains (bb_predictions, bb);
852 if (preds)
854 /* We implement "first match" heuristics and use probability guessed
855 by predictor with smallest index. */
856 for (pred = (struct edge_prediction *) *preds; pred; pred = pred->ep_next)
858 enum br_predictor predictor = pred->ep_predictor;
859 int probability = pred->ep_probability;
861 if (pred->ep_edge != first)
862 probability = REG_BR_PROB_BASE - probability;
864 found = true;
865 /* First match heuristics would be widly confused if we predicted
866 both directions. */
867 if (best_predictor > predictor)
869 struct edge_prediction *pred2;
870 int prob = probability;
872 for (pred2 = (struct edge_prediction *) *preds; pred2; pred2 = pred2->ep_next)
873 if (pred2 != pred && pred2->ep_predictor == pred->ep_predictor)
875 int probability2 = pred->ep_probability;
877 if (pred2->ep_edge != first)
878 probability2 = REG_BR_PROB_BASE - probability2;
880 if ((probability < REG_BR_PROB_BASE / 2) !=
881 (probability2 < REG_BR_PROB_BASE / 2))
882 break;
884 /* If the same predictor later gave better result, go for it! */
885 if ((probability >= REG_BR_PROB_BASE / 2 && (probability2 > probability))
886 || (probability <= REG_BR_PROB_BASE / 2 && (probability2 < probability)))
887 prob = probability2;
889 if (!pred2)
890 best_probability = prob, best_predictor = predictor;
893 d = (combined_probability * probability
894 + (REG_BR_PROB_BASE - combined_probability)
895 * (REG_BR_PROB_BASE - probability));
897 /* Use FP math to avoid overflows of 32bit integers. */
898 if (d == 0)
899 /* If one probability is 0% and one 100%, avoid division by zero. */
900 combined_probability = REG_BR_PROB_BASE / 2;
901 else
902 combined_probability = (((double) combined_probability)
903 * probability
904 * REG_BR_PROB_BASE / d + 0.5);
908 /* Decide which heuristic to use. In case we didn't match anything,
909 use no_prediction heuristic, in case we did match, use either
910 first match or Dempster-Shaffer theory depending on the flags. */
912 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
913 first_match = true;
915 if (!found)
916 dump_prediction (dump_file, PRED_NO_PREDICTION, combined_probability, bb, true);
917 else
919 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, bb,
920 !first_match);
921 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, bb,
922 first_match);
925 if (first_match)
926 combined_probability = best_probability;
927 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
929 if (preds)
931 for (pred = (struct edge_prediction *) *preds; pred; pred = pred->ep_next)
933 enum br_predictor predictor = pred->ep_predictor;
934 int probability = pred->ep_probability;
936 if (pred->ep_edge != EDGE_SUCC (bb, 0))
937 probability = REG_BR_PROB_BASE - probability;
938 dump_prediction (dump_file, predictor, probability, bb,
939 !first_match || best_predictor == predictor);
942 clear_bb_predictions (bb);
944 if (!bb->count)
946 first->probability = combined_probability;
947 second->probability = REG_BR_PROB_BASE - combined_probability;
951 /* Check if T1 and T2 satisfy the IV_COMPARE condition.
952 Return the SSA_NAME if the condition satisfies, NULL otherwise.
954 T1 and T2 should be one of the following cases:
955 1. T1 is SSA_NAME, T2 is NULL
956 2. T1 is SSA_NAME, T2 is INTEGER_CST between [-4, 4]
957 3. T2 is SSA_NAME, T1 is INTEGER_CST between [-4, 4] */
959 static tree
960 strips_small_constant (tree t1, tree t2)
962 tree ret = NULL;
963 int value = 0;
965 if (!t1)
966 return NULL;
967 else if (TREE_CODE (t1) == SSA_NAME)
968 ret = t1;
969 else if (host_integerp (t1, 0))
970 value = tree_low_cst (t1, 0);
971 else
972 return NULL;
974 if (!t2)
975 return ret;
976 else if (host_integerp (t2, 0))
977 value = tree_low_cst (t2, 0);
978 else if (TREE_CODE (t2) == SSA_NAME)
980 if (ret)
981 return NULL;
982 else
983 ret = t2;
986 if (value <= 4 && value >= -4)
987 return ret;
988 else
989 return NULL;
992 /* Return the SSA_NAME in T or T's operands.
993 Return NULL if SSA_NAME cannot be found. */
995 static tree
996 get_base_value (tree t)
998 if (TREE_CODE (t) == SSA_NAME)
999 return t;
1001 if (!BINARY_CLASS_P (t))
1002 return NULL;
1004 switch (TREE_OPERAND_LENGTH (t))
1006 case 1:
1007 return strips_small_constant (TREE_OPERAND (t, 0), NULL);
1008 case 2:
1009 return strips_small_constant (TREE_OPERAND (t, 0),
1010 TREE_OPERAND (t, 1));
1011 default:
1012 return NULL;
1016 /* Check the compare STMT in LOOP. If it compares an induction
1017 variable to a loop invariant, return true, and save
1018 LOOP_INVARIANT, COMPARE_CODE and LOOP_STEP.
1019 Otherwise return false and set LOOP_INVAIANT to NULL. */
1021 static bool
1022 is_comparison_with_loop_invariant_p (gimple stmt, struct loop *loop,
1023 tree *loop_invariant,
1024 enum tree_code *compare_code,
1025 int *loop_step,
1026 tree *loop_iv_base)
1028 tree op0, op1, bound, base;
1029 affine_iv iv0, iv1;
1030 enum tree_code code;
1031 int step;
1033 code = gimple_cond_code (stmt);
1034 *loop_invariant = NULL;
1036 switch (code)
1038 case GT_EXPR:
1039 case GE_EXPR:
1040 case NE_EXPR:
1041 case LT_EXPR:
1042 case LE_EXPR:
1043 case EQ_EXPR:
1044 break;
1046 default:
1047 return false;
1050 op0 = gimple_cond_lhs (stmt);
1051 op1 = gimple_cond_rhs (stmt);
1053 if ((TREE_CODE (op0) != SSA_NAME && TREE_CODE (op0) != INTEGER_CST)
1054 || (TREE_CODE (op1) != SSA_NAME && TREE_CODE (op1) != INTEGER_CST))
1055 return false;
1056 if (!simple_iv (loop, loop_containing_stmt (stmt), op0, &iv0, true))
1057 return false;
1058 if (!simple_iv (loop, loop_containing_stmt (stmt), op1, &iv1, true))
1059 return false;
1060 if (TREE_CODE (iv0.step) != INTEGER_CST
1061 || TREE_CODE (iv1.step) != INTEGER_CST)
1062 return false;
1063 if ((integer_zerop (iv0.step) && integer_zerop (iv1.step))
1064 || (!integer_zerop (iv0.step) && !integer_zerop (iv1.step)))
1065 return false;
1067 if (integer_zerop (iv0.step))
1069 if (code != NE_EXPR && code != EQ_EXPR)
1070 code = invert_tree_comparison (code, false);
1071 bound = iv0.base;
1072 base = iv1.base;
1073 if (host_integerp (iv1.step, 0))
1074 step = tree_low_cst (iv1.step, 0);
1075 else
1076 return false;
1078 else
1080 bound = iv1.base;
1081 base = iv0.base;
1082 if (host_integerp (iv0.step, 0))
1083 step = tree_low_cst (iv0.step, 0);
1084 else
1085 return false;
1088 if (TREE_CODE (bound) != INTEGER_CST)
1089 bound = get_base_value (bound);
1090 if (!bound)
1091 return false;
1092 if (TREE_CODE (base) != INTEGER_CST)
1093 base = get_base_value (base);
1094 if (!base)
1095 return false;
1097 *loop_invariant = bound;
1098 *compare_code = code;
1099 *loop_step = step;
1100 *loop_iv_base = base;
1101 return true;
1104 /* Compare two SSA_NAMEs: returns TRUE if T1 and T2 are value coherent. */
1106 static bool
1107 expr_coherent_p (tree t1, tree t2)
1109 gimple stmt;
1110 tree ssa_name_1 = NULL;
1111 tree ssa_name_2 = NULL;
1113 gcc_assert (TREE_CODE (t1) == SSA_NAME || TREE_CODE (t1) == INTEGER_CST);
1114 gcc_assert (TREE_CODE (t2) == SSA_NAME || TREE_CODE (t2) == INTEGER_CST);
1116 if (t1 == t2)
1117 return true;
1119 if (TREE_CODE (t1) == INTEGER_CST && TREE_CODE (t2) == INTEGER_CST)
1120 return true;
1121 if (TREE_CODE (t1) == INTEGER_CST || TREE_CODE (t2) == INTEGER_CST)
1122 return false;
1124 /* Check to see if t1 is expressed/defined with t2. */
1125 stmt = SSA_NAME_DEF_STMT (t1);
1126 gcc_assert (stmt != NULL);
1127 if (is_gimple_assign (stmt))
1129 ssa_name_1 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE);
1130 if (ssa_name_1 && ssa_name_1 == t2)
1131 return true;
1134 /* Check to see if t2 is expressed/defined with t1. */
1135 stmt = SSA_NAME_DEF_STMT (t2);
1136 gcc_assert (stmt != NULL);
1137 if (is_gimple_assign (stmt))
1139 ssa_name_2 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE);
1140 if (ssa_name_2 && ssa_name_2 == t1)
1141 return true;
1144 /* Compare if t1 and t2's def_stmts are identical. */
1145 if (ssa_name_2 != NULL && ssa_name_1 == ssa_name_2)
1146 return true;
1147 else
1148 return false;
1151 /* Predict branch probability of BB when BB contains a branch that compares
1152 an induction variable in LOOP with LOOP_IV_BASE_VAR to LOOP_BOUND_VAR. The
1153 loop exit is compared using LOOP_BOUND_CODE, with step of LOOP_BOUND_STEP.
1155 E.g.
1156 for (int i = 0; i < bound; i++) {
1157 if (i < bound - 2)
1158 computation_1();
1159 else
1160 computation_2();
1163 In this loop, we will predict the branch inside the loop to be taken. */
1165 static void
1166 predict_iv_comparison (struct loop *loop, basic_block bb,
1167 tree loop_bound_var,
1168 tree loop_iv_base_var,
1169 enum tree_code loop_bound_code,
1170 int loop_bound_step)
1172 gimple stmt;
1173 tree compare_var, compare_base;
1174 enum tree_code compare_code;
1175 int compare_step;
1176 edge then_edge;
1177 edge_iterator ei;
1179 if (predicted_by_p (bb, PRED_LOOP_ITERATIONS_GUESSED)
1180 || predicted_by_p (bb, PRED_LOOP_ITERATIONS)
1181 || predicted_by_p (bb, PRED_LOOP_EXIT))
1182 return;
1184 stmt = last_stmt (bb);
1185 if (!stmt || gimple_code (stmt) != GIMPLE_COND)
1186 return;
1187 if (!is_comparison_with_loop_invariant_p (stmt, loop, &compare_var,
1188 &compare_code,
1189 &compare_step,
1190 &compare_base))
1191 return;
1193 /* Find the taken edge. */
1194 FOR_EACH_EDGE (then_edge, ei, bb->succs)
1195 if (then_edge->flags & EDGE_TRUE_VALUE)
1196 break;
1198 /* When comparing an IV to a loop invariant, NE is more likely to be
1199 taken while EQ is more likely to be not-taken. */
1200 if (compare_code == NE_EXPR)
1202 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1203 return;
1205 else if (compare_code == EQ_EXPR)
1207 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1208 return;
1211 if (!expr_coherent_p (loop_iv_base_var, compare_base))
1212 return;
1214 /* If loop bound, base and compare bound are all constants, we can
1215 calculate the probability directly. */
1216 if (host_integerp (loop_bound_var, 0)
1217 && host_integerp (compare_var, 0)
1218 && host_integerp (compare_base, 0))
1220 int probability;
1221 HOST_WIDE_INT compare_count;
1222 HOST_WIDE_INT loop_bound = tree_low_cst (loop_bound_var, 0);
1223 HOST_WIDE_INT compare_bound = tree_low_cst (compare_var, 0);
1224 HOST_WIDE_INT base = tree_low_cst (compare_base, 0);
1225 HOST_WIDE_INT loop_count = (loop_bound - base) / compare_step;
1227 if ((compare_step > 0)
1228 ^ (compare_code == LT_EXPR || compare_code == LE_EXPR))
1229 compare_count = (loop_bound - compare_bound) / compare_step;
1230 else
1231 compare_count = (compare_bound - base) / compare_step;
1233 if (compare_code == LE_EXPR || compare_code == GE_EXPR)
1234 compare_count ++;
1235 if (loop_bound_code == LE_EXPR || loop_bound_code == GE_EXPR)
1236 loop_count ++;
1237 if (compare_count < 0)
1238 compare_count = 0;
1239 if (loop_count < 0)
1240 loop_count = 0;
1242 if (loop_count == 0)
1243 probability = 0;
1244 else if (compare_count > loop_count)
1245 probability = REG_BR_PROB_BASE;
1246 else
1247 probability = (double) REG_BR_PROB_BASE * compare_count / loop_count;
1248 predict_edge (then_edge, PRED_LOOP_IV_COMPARE, probability);
1249 return;
1252 if (expr_coherent_p (loop_bound_var, compare_var))
1254 if ((loop_bound_code == LT_EXPR || loop_bound_code == LE_EXPR)
1255 && (compare_code == LT_EXPR || compare_code == LE_EXPR))
1256 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1257 else if ((loop_bound_code == GT_EXPR || loop_bound_code == GE_EXPR)
1258 && (compare_code == GT_EXPR || compare_code == GE_EXPR))
1259 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1260 else if (loop_bound_code == NE_EXPR)
1262 /* If the loop backedge condition is "(i != bound)", we do
1263 the comparison based on the step of IV:
1264 * step < 0 : backedge condition is like (i > bound)
1265 * step > 0 : backedge condition is like (i < bound) */
1266 gcc_assert (loop_bound_step != 0);
1267 if (loop_bound_step > 0
1268 && (compare_code == LT_EXPR
1269 || compare_code == LE_EXPR))
1270 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1271 else if (loop_bound_step < 0
1272 && (compare_code == GT_EXPR
1273 || compare_code == GE_EXPR))
1274 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1275 else
1276 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1278 else
1279 /* The branch is predicted not-taken if loop_bound_code is
1280 opposite with compare_code. */
1281 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1283 else if (expr_coherent_p (loop_iv_base_var, compare_var))
1285 /* For cases like:
1286 for (i = s; i < h; i++)
1287 if (i > s + 2) ....
1288 The branch should be predicted taken. */
1289 if (loop_bound_step > 0
1290 && (compare_code == GT_EXPR || compare_code == GE_EXPR))
1291 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1292 else if (loop_bound_step < 0
1293 && (compare_code == LT_EXPR || compare_code == LE_EXPR))
1294 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1295 else
1296 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1300 /* Predict for extra loop exits that will lead to EXIT_EDGE. The extra loop
1301 exits are resulted from short-circuit conditions that will generate an
1302 if_tmp. E.g.:
1304 if (foo() || global > 10)
1305 break;
1307 This will be translated into:
1309 BB3:
1310 loop header...
1311 BB4:
1312 if foo() goto BB6 else goto BB5
1313 BB5:
1314 if global > 10 goto BB6 else goto BB7
1315 BB6:
1316 goto BB7
1317 BB7:
1318 iftmp = (PHI 0(BB5), 1(BB6))
1319 if iftmp == 1 goto BB8 else goto BB3
1320 BB8:
1321 outside of the loop...
1323 The edge BB7->BB8 is loop exit because BB8 is outside of the loop.
1324 From the dataflow, we can infer that BB4->BB6 and BB5->BB6 are also loop
1325 exits. This function takes BB7->BB8 as input, and finds out the extra loop
1326 exits to predict them using PRED_LOOP_EXIT. */
1328 static void
1329 predict_extra_loop_exits (edge exit_edge)
1331 unsigned i;
1332 bool check_value_one;
1333 gimple phi_stmt;
1334 tree cmp_rhs, cmp_lhs;
1335 gimple cmp_stmt = last_stmt (exit_edge->src);
1337 if (!cmp_stmt || gimple_code (cmp_stmt) != GIMPLE_COND)
1338 return;
1339 cmp_rhs = gimple_cond_rhs (cmp_stmt);
1340 cmp_lhs = gimple_cond_lhs (cmp_stmt);
1341 if (!TREE_CONSTANT (cmp_rhs)
1342 || !(integer_zerop (cmp_rhs) || integer_onep (cmp_rhs)))
1343 return;
1344 if (TREE_CODE (cmp_lhs) != SSA_NAME)
1345 return;
1347 /* If check_value_one is true, only the phi_args with value '1' will lead
1348 to loop exit. Otherwise, only the phi_args with value '0' will lead to
1349 loop exit. */
1350 check_value_one = (((integer_onep (cmp_rhs))
1351 ^ (gimple_cond_code (cmp_stmt) == EQ_EXPR))
1352 ^ ((exit_edge->flags & EDGE_TRUE_VALUE) != 0));
1354 phi_stmt = SSA_NAME_DEF_STMT (cmp_lhs);
1355 if (!phi_stmt || gimple_code (phi_stmt) != GIMPLE_PHI)
1356 return;
1358 for (i = 0; i < gimple_phi_num_args (phi_stmt); i++)
1360 edge e1;
1361 edge_iterator ei;
1362 tree val = gimple_phi_arg_def (phi_stmt, i);
1363 edge e = gimple_phi_arg_edge (phi_stmt, i);
1365 if (!TREE_CONSTANT (val) || !(integer_zerop (val) || integer_onep (val)))
1366 continue;
1367 if ((check_value_one ^ integer_onep (val)) == 1)
1368 continue;
1369 if (EDGE_COUNT (e->src->succs) != 1)
1371 predict_paths_leading_to_edge (e, PRED_LOOP_EXIT, NOT_TAKEN);
1372 continue;
1375 FOR_EACH_EDGE (e1, ei, e->src->preds)
1376 predict_paths_leading_to_edge (e1, PRED_LOOP_EXIT, NOT_TAKEN);
1380 /* Predict edge probabilities by exploiting loop structure. */
1382 static void
1383 predict_loops (void)
1385 loop_iterator li;
1386 struct loop *loop;
1388 /* Try to predict out blocks in a loop that are not part of a
1389 natural loop. */
1390 FOR_EACH_LOOP (li, loop, 0)
1392 basic_block bb, *bbs;
1393 unsigned j, n_exits;
1394 VEC (edge, heap) *exits;
1395 struct tree_niter_desc niter_desc;
1396 edge ex;
1397 struct nb_iter_bound *nb_iter;
1398 enum tree_code loop_bound_code = ERROR_MARK;
1399 int loop_bound_step = 0;
1400 tree loop_bound_var = NULL;
1401 tree loop_iv_base = NULL;
1402 gimple stmt = NULL;
1404 exits = get_loop_exit_edges (loop);
1405 n_exits = VEC_length (edge, exits);
1406 if (!n_exits)
1408 VEC_free (edge, heap, exits);
1409 continue;
1412 FOR_EACH_VEC_ELT (edge, exits, j, ex)
1414 tree niter = NULL;
1415 HOST_WIDE_INT nitercst;
1416 int max = PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS);
1417 int probability;
1418 enum br_predictor predictor;
1420 predict_extra_loop_exits (ex);
1422 if (number_of_iterations_exit (loop, ex, &niter_desc, false, false))
1423 niter = niter_desc.niter;
1424 if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST)
1425 niter = loop_niter_by_eval (loop, ex);
1427 if (TREE_CODE (niter) == INTEGER_CST)
1429 if (host_integerp (niter, 1)
1430 && compare_tree_int (niter, max-1) == -1)
1431 nitercst = tree_low_cst (niter, 1) + 1;
1432 else
1433 nitercst = max;
1434 predictor = PRED_LOOP_ITERATIONS;
1436 /* If we have just one exit and we can derive some information about
1437 the number of iterations of the loop from the statements inside
1438 the loop, use it to predict this exit. */
1439 else if (n_exits == 1)
1441 nitercst = estimated_stmt_executions_int (loop);
1442 if (nitercst < 0)
1443 continue;
1444 if (nitercst > max)
1445 nitercst = max;
1447 predictor = PRED_LOOP_ITERATIONS_GUESSED;
1449 else
1450 continue;
1452 probability = ((REG_BR_PROB_BASE + nitercst / 2) / nitercst);
1453 predict_edge (ex, predictor, probability);
1455 VEC_free (edge, heap, exits);
1457 /* Find information about loop bound variables. */
1458 for (nb_iter = loop->bounds; nb_iter;
1459 nb_iter = nb_iter->next)
1460 if (nb_iter->stmt
1461 && gimple_code (nb_iter->stmt) == GIMPLE_COND)
1463 stmt = nb_iter->stmt;
1464 break;
1466 if (!stmt && last_stmt (loop->header)
1467 && gimple_code (last_stmt (loop->header)) == GIMPLE_COND)
1468 stmt = last_stmt (loop->header);
1469 if (stmt)
1470 is_comparison_with_loop_invariant_p (stmt, loop,
1471 &loop_bound_var,
1472 &loop_bound_code,
1473 &loop_bound_step,
1474 &loop_iv_base);
1476 bbs = get_loop_body (loop);
1478 for (j = 0; j < loop->num_nodes; j++)
1480 int header_found = 0;
1481 edge e;
1482 edge_iterator ei;
1484 bb = bbs[j];
1486 /* Bypass loop heuristics on continue statement. These
1487 statements construct loops via "non-loop" constructs
1488 in the source language and are better to be handled
1489 separately. */
1490 if (predicted_by_p (bb, PRED_CONTINUE))
1491 continue;
1493 /* Loop branch heuristics - predict an edge back to a
1494 loop's head as taken. */
1495 if (bb == loop->latch)
1497 e = find_edge (loop->latch, loop->header);
1498 if (e)
1500 header_found = 1;
1501 predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
1505 /* Loop exit heuristics - predict an edge exiting the loop if the
1506 conditional has no loop header successors as not taken. */
1507 if (!header_found
1508 /* If we already used more reliable loop exit predictors, do not
1509 bother with PRED_LOOP_EXIT. */
1510 && !predicted_by_p (bb, PRED_LOOP_ITERATIONS_GUESSED)
1511 && !predicted_by_p (bb, PRED_LOOP_ITERATIONS))
1513 /* For loop with many exits we don't want to predict all exits
1514 with the pretty large probability, because if all exits are
1515 considered in row, the loop would be predicted to iterate
1516 almost never. The code to divide probability by number of
1517 exits is very rough. It should compute the number of exits
1518 taken in each patch through function (not the overall number
1519 of exits that might be a lot higher for loops with wide switch
1520 statements in them) and compute n-th square root.
1522 We limit the minimal probability by 2% to avoid
1523 EDGE_PROBABILITY_RELIABLE from trusting the branch prediction
1524 as this was causing regression in perl benchmark containing such
1525 a wide loop. */
1527 int probability = ((REG_BR_PROB_BASE
1528 - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
1529 / n_exits);
1530 if (probability < HITRATE (2))
1531 probability = HITRATE (2);
1532 FOR_EACH_EDGE (e, ei, bb->succs)
1533 if (e->dest->index < NUM_FIXED_BLOCKS
1534 || !flow_bb_inside_loop_p (loop, e->dest))
1535 predict_edge (e, PRED_LOOP_EXIT, probability);
1537 if (loop_bound_var)
1538 predict_iv_comparison (loop, bb, loop_bound_var, loop_iv_base,
1539 loop_bound_code,
1540 loop_bound_step);
1543 /* Free basic blocks from get_loop_body. */
1544 free (bbs);
1548 /* Attempt to predict probabilities of BB outgoing edges using local
1549 properties. */
1550 static void
1551 bb_estimate_probability_locally (basic_block bb)
1553 rtx last_insn = BB_END (bb);
1554 rtx cond;
1556 if (! can_predict_insn_p (last_insn))
1557 return;
1558 cond = get_condition (last_insn, NULL, false, false);
1559 if (! cond)
1560 return;
1562 /* Try "pointer heuristic."
1563 A comparison ptr == 0 is predicted as false.
1564 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1565 if (COMPARISON_P (cond)
1566 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
1567 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
1569 if (GET_CODE (cond) == EQ)
1570 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
1571 else if (GET_CODE (cond) == NE)
1572 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
1574 else
1576 /* Try "opcode heuristic."
1577 EQ tests are usually false and NE tests are usually true. Also,
1578 most quantities are positive, so we can make the appropriate guesses
1579 about signed comparisons against zero. */
1580 switch (GET_CODE (cond))
1582 case CONST_INT:
1583 /* Unconditional branch. */
1584 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
1585 cond == const0_rtx ? NOT_TAKEN : TAKEN);
1586 break;
1588 case EQ:
1589 case UNEQ:
1590 /* Floating point comparisons appears to behave in a very
1591 unpredictable way because of special role of = tests in
1592 FP code. */
1593 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
1595 /* Comparisons with 0 are often used for booleans and there is
1596 nothing useful to predict about them. */
1597 else if (XEXP (cond, 1) == const0_rtx
1598 || XEXP (cond, 0) == const0_rtx)
1600 else
1601 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
1602 break;
1604 case NE:
1605 case LTGT:
1606 /* Floating point comparisons appears to behave in a very
1607 unpredictable way because of special role of = tests in
1608 FP code. */
1609 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
1611 /* Comparisons with 0 are often used for booleans and there is
1612 nothing useful to predict about them. */
1613 else if (XEXP (cond, 1) == const0_rtx
1614 || XEXP (cond, 0) == const0_rtx)
1616 else
1617 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
1618 break;
1620 case ORDERED:
1621 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
1622 break;
1624 case UNORDERED:
1625 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
1626 break;
1628 case LE:
1629 case LT:
1630 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
1631 || XEXP (cond, 1) == constm1_rtx)
1632 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
1633 break;
1635 case GE:
1636 case GT:
1637 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
1638 || XEXP (cond, 1) == constm1_rtx)
1639 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
1640 break;
1642 default:
1643 break;
1647 /* Set edge->probability for each successor edge of BB. */
1648 void
1649 guess_outgoing_edge_probabilities (basic_block bb)
1651 bb_estimate_probability_locally (bb);
1652 combine_predictions_for_insn (BB_END (bb), bb);
1655 static tree expr_expected_value (tree, bitmap);
1657 /* Helper function for expr_expected_value. */
1659 static tree
1660 expr_expected_value_1 (tree type, tree op0, enum tree_code code,
1661 tree op1, bitmap visited)
1663 gimple def;
1665 if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
1667 if (TREE_CONSTANT (op0))
1668 return op0;
1670 if (code != SSA_NAME)
1671 return NULL_TREE;
1673 def = SSA_NAME_DEF_STMT (op0);
1675 /* If we were already here, break the infinite cycle. */
1676 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (op0)))
1677 return NULL;
1679 if (gimple_code (def) == GIMPLE_PHI)
1681 /* All the arguments of the PHI node must have the same constant
1682 length. */
1683 int i, n = gimple_phi_num_args (def);
1684 tree val = NULL, new_val;
1686 for (i = 0; i < n; i++)
1688 tree arg = PHI_ARG_DEF (def, i);
1690 /* If this PHI has itself as an argument, we cannot
1691 determine the string length of this argument. However,
1692 if we can find an expected constant value for the other
1693 PHI args then we can still be sure that this is
1694 likely a constant. So be optimistic and just
1695 continue with the next argument. */
1696 if (arg == PHI_RESULT (def))
1697 continue;
1699 new_val = expr_expected_value (arg, visited);
1700 if (!new_val)
1701 return NULL;
1702 if (!val)
1703 val = new_val;
1704 else if (!operand_equal_p (val, new_val, false))
1705 return NULL;
1707 return val;
1709 if (is_gimple_assign (def))
1711 if (gimple_assign_lhs (def) != op0)
1712 return NULL;
1714 return expr_expected_value_1 (TREE_TYPE (gimple_assign_lhs (def)),
1715 gimple_assign_rhs1 (def),
1716 gimple_assign_rhs_code (def),
1717 gimple_assign_rhs2 (def),
1718 visited);
1721 if (is_gimple_call (def))
1723 tree decl = gimple_call_fndecl (def);
1724 if (!decl)
1725 return NULL;
1726 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
1727 switch (DECL_FUNCTION_CODE (decl))
1729 case BUILT_IN_EXPECT:
1731 tree val;
1732 if (gimple_call_num_args (def) != 2)
1733 return NULL;
1734 val = gimple_call_arg (def, 0);
1735 if (TREE_CONSTANT (val))
1736 return val;
1737 return gimple_call_arg (def, 1);
1740 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N:
1741 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
1742 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
1743 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
1744 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
1745 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
1746 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE:
1747 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N:
1748 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
1749 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
1750 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
1751 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
1752 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
1753 /* Assume that any given atomic operation has low contention,
1754 and thus the compare-and-swap operation succeeds. */
1755 return boolean_true_node;
1759 return NULL;
1762 if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
1764 tree res;
1765 op0 = expr_expected_value (op0, visited);
1766 if (!op0)
1767 return NULL;
1768 op1 = expr_expected_value (op1, visited);
1769 if (!op1)
1770 return NULL;
1771 res = fold_build2 (code, type, op0, op1);
1772 if (TREE_CONSTANT (res))
1773 return res;
1774 return NULL;
1776 if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
1778 tree res;
1779 op0 = expr_expected_value (op0, visited);
1780 if (!op0)
1781 return NULL;
1782 res = fold_build1 (code, type, op0);
1783 if (TREE_CONSTANT (res))
1784 return res;
1785 return NULL;
1787 return NULL;
1790 /* Return constant EXPR will likely have at execution time, NULL if unknown.
1791 The function is used by builtin_expect branch predictor so the evidence
1792 must come from this construct and additional possible constant folding.
1794 We may want to implement more involved value guess (such as value range
1795 propagation based prediction), but such tricks shall go to new
1796 implementation. */
1798 static tree
1799 expr_expected_value (tree expr, bitmap visited)
1801 enum tree_code code;
1802 tree op0, op1;
1804 if (TREE_CONSTANT (expr))
1805 return expr;
1807 extract_ops_from_tree (expr, &code, &op0, &op1);
1808 return expr_expected_value_1 (TREE_TYPE (expr),
1809 op0, code, op1, visited);
1813 /* Get rid of all builtin_expect calls and GIMPLE_PREDICT statements
1814 we no longer need. */
1815 static unsigned int
1816 strip_predict_hints (void)
1818 basic_block bb;
1819 gimple ass_stmt;
1820 tree var;
1822 FOR_EACH_BB (bb)
1824 gimple_stmt_iterator bi;
1825 for (bi = gsi_start_bb (bb); !gsi_end_p (bi);)
1827 gimple stmt = gsi_stmt (bi);
1829 if (gimple_code (stmt) == GIMPLE_PREDICT)
1831 gsi_remove (&bi, true);
1832 continue;
1834 else if (gimple_code (stmt) == GIMPLE_CALL)
1836 tree fndecl = gimple_call_fndecl (stmt);
1838 if (fndecl
1839 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1840 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
1841 && gimple_call_num_args (stmt) == 2)
1843 var = gimple_call_lhs (stmt);
1844 if (var)
1846 ass_stmt
1847 = gimple_build_assign (var, gimple_call_arg (stmt, 0));
1848 gsi_replace (&bi, ass_stmt, true);
1850 else
1852 gsi_remove (&bi, true);
1853 continue;
1857 gsi_next (&bi);
1860 return 0;
1863 /* Predict using opcode of the last statement in basic block. */
1864 static void
1865 tree_predict_by_opcode (basic_block bb)
1867 gimple stmt = last_stmt (bb);
1868 edge then_edge;
1869 tree op0, op1;
1870 tree type;
1871 tree val;
1872 enum tree_code cmp;
1873 bitmap visited;
1874 edge_iterator ei;
1876 if (!stmt || gimple_code (stmt) != GIMPLE_COND)
1877 return;
1878 FOR_EACH_EDGE (then_edge, ei, bb->succs)
1879 if (then_edge->flags & EDGE_TRUE_VALUE)
1880 break;
1881 op0 = gimple_cond_lhs (stmt);
1882 op1 = gimple_cond_rhs (stmt);
1883 cmp = gimple_cond_code (stmt);
1884 type = TREE_TYPE (op0);
1885 visited = BITMAP_ALLOC (NULL);
1886 val = expr_expected_value_1 (boolean_type_node, op0, cmp, op1, visited);
1887 BITMAP_FREE (visited);
1888 if (val)
1890 if (integer_zerop (val))
1891 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, NOT_TAKEN);
1892 else
1893 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, TAKEN);
1894 return;
1896 /* Try "pointer heuristic."
1897 A comparison ptr == 0 is predicted as false.
1898 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1899 if (POINTER_TYPE_P (type))
1901 if (cmp == EQ_EXPR)
1902 predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN);
1903 else if (cmp == NE_EXPR)
1904 predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN);
1906 else
1908 /* Try "opcode heuristic."
1909 EQ tests are usually false and NE tests are usually true. Also,
1910 most quantities are positive, so we can make the appropriate guesses
1911 about signed comparisons against zero. */
1912 switch (cmp)
1914 case EQ_EXPR:
1915 case UNEQ_EXPR:
1916 /* Floating point comparisons appears to behave in a very
1917 unpredictable way because of special role of = tests in
1918 FP code. */
1919 if (FLOAT_TYPE_P (type))
1921 /* Comparisons with 0 are often used for booleans and there is
1922 nothing useful to predict about them. */
1923 else if (integer_zerop (op0) || integer_zerop (op1))
1925 else
1926 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN);
1927 break;
1929 case NE_EXPR:
1930 case LTGT_EXPR:
1931 /* Floating point comparisons appears to behave in a very
1932 unpredictable way because of special role of = tests in
1933 FP code. */
1934 if (FLOAT_TYPE_P (type))
1936 /* Comparisons with 0 are often used for booleans and there is
1937 nothing useful to predict about them. */
1938 else if (integer_zerop (op0)
1939 || integer_zerop (op1))
1941 else
1942 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN);
1943 break;
1945 case ORDERED_EXPR:
1946 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN);
1947 break;
1949 case UNORDERED_EXPR:
1950 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN);
1951 break;
1953 case LE_EXPR:
1954 case LT_EXPR:
1955 if (integer_zerop (op1)
1956 || integer_onep (op1)
1957 || integer_all_onesp (op1)
1958 || real_zerop (op1)
1959 || real_onep (op1)
1960 || real_minus_onep (op1))
1961 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN);
1962 break;
1964 case GE_EXPR:
1965 case GT_EXPR:
1966 if (integer_zerop (op1)
1967 || integer_onep (op1)
1968 || integer_all_onesp (op1)
1969 || real_zerop (op1)
1970 || real_onep (op1)
1971 || real_minus_onep (op1))
1972 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN);
1973 break;
1975 default:
1976 break;
1980 /* Try to guess whether the value of return means error code. */
1982 static enum br_predictor
1983 return_prediction (tree val, enum prediction *prediction)
1985 /* VOID. */
1986 if (!val)
1987 return PRED_NO_PREDICTION;
1988 /* Different heuristics for pointers and scalars. */
1989 if (POINTER_TYPE_P (TREE_TYPE (val)))
1991 /* NULL is usually not returned. */
1992 if (integer_zerop (val))
1994 *prediction = NOT_TAKEN;
1995 return PRED_NULL_RETURN;
1998 else if (INTEGRAL_TYPE_P (TREE_TYPE (val)))
2000 /* Negative return values are often used to indicate
2001 errors. */
2002 if (TREE_CODE (val) == INTEGER_CST
2003 && tree_int_cst_sgn (val) < 0)
2005 *prediction = NOT_TAKEN;
2006 return PRED_NEGATIVE_RETURN;
2008 /* Constant return values seems to be commonly taken.
2009 Zero/one often represent booleans so exclude them from the
2010 heuristics. */
2011 if (TREE_CONSTANT (val)
2012 && (!integer_zerop (val) && !integer_onep (val)))
2014 *prediction = TAKEN;
2015 return PRED_CONST_RETURN;
2018 return PRED_NO_PREDICTION;
2021 /* Find the basic block with return expression and look up for possible
2022 return value trying to apply RETURN_PREDICTION heuristics. */
2023 static void
2024 apply_return_prediction (void)
2026 gimple return_stmt = NULL;
2027 tree return_val;
2028 edge e;
2029 gimple phi;
2030 int phi_num_args, i;
2031 enum br_predictor pred;
2032 enum prediction direction;
2033 edge_iterator ei;
2035 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
2037 return_stmt = last_stmt (e->src);
2038 if (return_stmt
2039 && gimple_code (return_stmt) == GIMPLE_RETURN)
2040 break;
2042 if (!e)
2043 return;
2044 return_val = gimple_return_retval (return_stmt);
2045 if (!return_val)
2046 return;
2047 if (TREE_CODE (return_val) != SSA_NAME
2048 || !SSA_NAME_DEF_STMT (return_val)
2049 || gimple_code (SSA_NAME_DEF_STMT (return_val)) != GIMPLE_PHI)
2050 return;
2051 phi = SSA_NAME_DEF_STMT (return_val);
2052 phi_num_args = gimple_phi_num_args (phi);
2053 pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction);
2055 /* Avoid the degenerate case where all return values form the function
2056 belongs to same category (ie they are all positive constants)
2057 so we can hardly say something about them. */
2058 for (i = 1; i < phi_num_args; i++)
2059 if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction))
2060 break;
2061 if (i != phi_num_args)
2062 for (i = 0; i < phi_num_args; i++)
2064 pred = return_prediction (PHI_ARG_DEF (phi, i), &direction);
2065 if (pred != PRED_NO_PREDICTION)
2066 predict_paths_leading_to_edge (gimple_phi_arg_edge (phi, i), pred,
2067 direction);
2071 /* Look for basic block that contains unlikely to happen events
2072 (such as noreturn calls) and mark all paths leading to execution
2073 of this basic blocks as unlikely. */
2075 static void
2076 tree_bb_level_predictions (void)
2078 basic_block bb;
2079 bool has_return_edges = false;
2080 edge e;
2081 edge_iterator ei;
2083 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
2084 if (!(e->flags & (EDGE_ABNORMAL | EDGE_FAKE | EDGE_EH)))
2086 has_return_edges = true;
2087 break;
2090 apply_return_prediction ();
2092 FOR_EACH_BB (bb)
2094 gimple_stmt_iterator gsi;
2096 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2098 gimple stmt = gsi_stmt (gsi);
2099 tree decl;
2101 if (is_gimple_call (stmt))
2103 if ((gimple_call_flags (stmt) & ECF_NORETURN)
2104 && has_return_edges)
2105 predict_paths_leading_to (bb, PRED_NORETURN,
2106 NOT_TAKEN);
2107 decl = gimple_call_fndecl (stmt);
2108 if (decl
2109 && lookup_attribute ("cold",
2110 DECL_ATTRIBUTES (decl)))
2111 predict_paths_leading_to (bb, PRED_COLD_FUNCTION,
2112 NOT_TAKEN);
2114 else if (gimple_code (stmt) == GIMPLE_PREDICT)
2116 predict_paths_leading_to (bb, gimple_predict_predictor (stmt),
2117 gimple_predict_outcome (stmt));
2118 /* Keep GIMPLE_PREDICT around so early inlining will propagate
2119 hints to callers. */
2125 #ifdef ENABLE_CHECKING
2127 /* Callback for pointer_map_traverse, asserts that the pointer map is
2128 empty. */
2130 static bool
2131 assert_is_empty (const void *key ATTRIBUTE_UNUSED, void **value,
2132 void *data ATTRIBUTE_UNUSED)
2134 gcc_assert (!*value);
2135 return false;
2137 #endif
2139 /* Predict branch probabilities and estimate profile for basic block BB. */
2141 static void
2142 tree_estimate_probability_bb (basic_block bb)
2144 edge e;
2145 edge_iterator ei;
2146 gimple last;
2148 FOR_EACH_EDGE (e, ei, bb->succs)
2150 /* Predict edges to user labels with attributes. */
2151 if (e->dest != EXIT_BLOCK_PTR)
2153 gimple_stmt_iterator gi;
2154 for (gi = gsi_start_bb (e->dest); !gsi_end_p (gi); gsi_next (&gi))
2156 gimple stmt = gsi_stmt (gi);
2157 tree decl;
2159 if (gimple_code (stmt) != GIMPLE_LABEL)
2160 break;
2161 decl = gimple_label_label (stmt);
2162 if (DECL_ARTIFICIAL (decl))
2163 continue;
2165 /* Finally, we have a user-defined label. */
2166 if (lookup_attribute ("cold", DECL_ATTRIBUTES (decl)))
2167 predict_edge_def (e, PRED_COLD_LABEL, NOT_TAKEN);
2168 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (decl)))
2169 predict_edge_def (e, PRED_HOT_LABEL, TAKEN);
2173 /* Predict early returns to be probable, as we've already taken
2174 care for error returns and other cases are often used for
2175 fast paths through function.
2177 Since we've already removed the return statements, we are
2178 looking for CFG like:
2180 if (conditional)
2183 goto return_block
2185 some other blocks
2186 return_block:
2187 return_stmt. */
2188 if (e->dest != bb->next_bb
2189 && e->dest != EXIT_BLOCK_PTR
2190 && single_succ_p (e->dest)
2191 && single_succ_edge (e->dest)->dest == EXIT_BLOCK_PTR
2192 && (last = last_stmt (e->dest)) != NULL
2193 && gimple_code (last) == GIMPLE_RETURN)
2195 edge e1;
2196 edge_iterator ei1;
2198 if (single_succ_p (bb))
2200 FOR_EACH_EDGE (e1, ei1, bb->preds)
2201 if (!predicted_by_p (e1->src, PRED_NULL_RETURN)
2202 && !predicted_by_p (e1->src, PRED_CONST_RETURN)
2203 && !predicted_by_p (e1->src, PRED_NEGATIVE_RETURN))
2204 predict_edge_def (e1, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
2206 else
2207 if (!predicted_by_p (e->src, PRED_NULL_RETURN)
2208 && !predicted_by_p (e->src, PRED_CONST_RETURN)
2209 && !predicted_by_p (e->src, PRED_NEGATIVE_RETURN))
2210 predict_edge_def (e, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
2213 /* Look for block we are guarding (ie we dominate it,
2214 but it doesn't postdominate us). */
2215 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
2216 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
2217 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
2219 gimple_stmt_iterator bi;
2221 /* The call heuristic claims that a guarded function call
2222 is improbable. This is because such calls are often used
2223 to signal exceptional situations such as printing error
2224 messages. */
2225 for (bi = gsi_start_bb (e->dest); !gsi_end_p (bi);
2226 gsi_next (&bi))
2228 gimple stmt = gsi_stmt (bi);
2229 if (is_gimple_call (stmt)
2230 /* Constant and pure calls are hardly used to signalize
2231 something exceptional. */
2232 && gimple_has_side_effects (stmt))
2234 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
2235 break;
2240 tree_predict_by_opcode (bb);
2243 /* Predict branch probabilities and estimate profile of the tree CFG.
2244 This function can be called from the loop optimizers to recompute
2245 the profile information. */
2247 void
2248 tree_estimate_probability (void)
2250 basic_block bb;
2252 add_noreturn_fake_exit_edges ();
2253 connect_infinite_loops_to_exit ();
2254 /* We use loop_niter_by_eval, which requires that the loops have
2255 preheaders. */
2256 create_preheaders (CP_SIMPLE_PREHEADERS);
2257 calculate_dominance_info (CDI_POST_DOMINATORS);
2259 bb_predictions = pointer_map_create ();
2260 tree_bb_level_predictions ();
2261 record_loop_exits ();
2263 if (number_of_loops () > 1)
2264 predict_loops ();
2266 FOR_EACH_BB (bb)
2267 tree_estimate_probability_bb (bb);
2269 FOR_EACH_BB (bb)
2270 combine_predictions_for_bb (bb);
2272 #ifdef ENABLE_CHECKING
2273 pointer_map_traverse (bb_predictions, assert_is_empty, NULL);
2274 #endif
2275 pointer_map_destroy (bb_predictions);
2276 bb_predictions = NULL;
2278 estimate_bb_frequencies ();
2279 free_dominance_info (CDI_POST_DOMINATORS);
2280 remove_fake_exit_edges ();
2283 /* Predict branch probabilities and estimate profile of the tree CFG.
2284 This is the driver function for PASS_PROFILE. */
2286 static unsigned int
2287 tree_estimate_probability_driver (void)
2289 unsigned nb_loops;
2291 loop_optimizer_init (LOOPS_NORMAL);
2292 if (dump_file && (dump_flags & TDF_DETAILS))
2293 flow_loops_dump (dump_file, NULL, 0);
2295 mark_irreducible_loops ();
2297 nb_loops = number_of_loops ();
2298 if (nb_loops > 1)
2299 scev_initialize ();
2301 tree_estimate_probability ();
2303 if (nb_loops > 1)
2304 scev_finalize ();
2306 loop_optimizer_finalize ();
2307 if (dump_file && (dump_flags & TDF_DETAILS))
2308 gimple_dump_cfg (dump_file, dump_flags);
2309 if (profile_status == PROFILE_ABSENT)
2310 profile_status = PROFILE_GUESSED;
2311 return 0;
2314 /* Predict edges to successors of CUR whose sources are not postdominated by
2315 BB by PRED and recurse to all postdominators. */
2317 static void
2318 predict_paths_for_bb (basic_block cur, basic_block bb,
2319 enum br_predictor pred,
2320 enum prediction taken,
2321 bitmap visited)
2323 edge e;
2324 edge_iterator ei;
2325 basic_block son;
2327 /* We are looking for all edges forming edge cut induced by
2328 set of all blocks postdominated by BB. */
2329 FOR_EACH_EDGE (e, ei, cur->preds)
2330 if (e->src->index >= NUM_FIXED_BLOCKS
2331 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, bb))
2333 edge e2;
2334 edge_iterator ei2;
2335 bool found = false;
2337 /* Ignore fake edges and eh, we predict them as not taken anyway. */
2338 if (e->flags & (EDGE_EH | EDGE_FAKE))
2339 continue;
2340 gcc_assert (bb == cur || dominated_by_p (CDI_POST_DOMINATORS, cur, bb));
2342 /* See if there is an edge from e->src that is not abnormal
2343 and does not lead to BB. */
2344 FOR_EACH_EDGE (e2, ei2, e->src->succs)
2345 if (e2 != e
2346 && !(e2->flags & (EDGE_EH | EDGE_FAKE))
2347 && !dominated_by_p (CDI_POST_DOMINATORS, e2->dest, bb))
2349 found = true;
2350 break;
2353 /* If there is non-abnormal path leaving e->src, predict edge
2354 using predictor. Otherwise we need to look for paths
2355 leading to e->src.
2357 The second may lead to infinite loop in the case we are predicitng
2358 regions that are only reachable by abnormal edges. We simply
2359 prevent visiting given BB twice. */
2360 if (found)
2361 predict_edge_def (e, pred, taken);
2362 else if (bitmap_set_bit (visited, e->src->index))
2363 predict_paths_for_bb (e->src, e->src, pred, taken, visited);
2365 for (son = first_dom_son (CDI_POST_DOMINATORS, cur);
2366 son;
2367 son = next_dom_son (CDI_POST_DOMINATORS, son))
2368 predict_paths_for_bb (son, bb, pred, taken, visited);
2371 /* Sets branch probabilities according to PREDiction and
2372 FLAGS. */
2374 static void
2375 predict_paths_leading_to (basic_block bb, enum br_predictor pred,
2376 enum prediction taken)
2378 bitmap visited = BITMAP_ALLOC (NULL);
2379 predict_paths_for_bb (bb, bb, pred, taken, visited);
2380 BITMAP_FREE (visited);
2383 /* Like predict_paths_leading_to but take edge instead of basic block. */
2385 static void
2386 predict_paths_leading_to_edge (edge e, enum br_predictor pred,
2387 enum prediction taken)
2389 bool has_nonloop_edge = false;
2390 edge_iterator ei;
2391 edge e2;
2393 basic_block bb = e->src;
2394 FOR_EACH_EDGE (e2, ei, bb->succs)
2395 if (e2->dest != e->src && e2->dest != e->dest
2396 && !(e->flags & (EDGE_EH | EDGE_FAKE))
2397 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e2->dest))
2399 has_nonloop_edge = true;
2400 break;
2402 if (!has_nonloop_edge)
2404 bitmap visited = BITMAP_ALLOC (NULL);
2405 predict_paths_for_bb (bb, bb, pred, taken, visited);
2406 BITMAP_FREE (visited);
2408 else
2409 predict_edge_def (e, pred, taken);
2412 /* This is used to carry information about basic blocks. It is
2413 attached to the AUX field of the standard CFG block. */
2415 typedef struct block_info_def
2417 /* Estimated frequency of execution of basic_block. */
2418 sreal frequency;
2420 /* To keep queue of basic blocks to process. */
2421 basic_block next;
2423 /* Number of predecessors we need to visit first. */
2424 int npredecessors;
2425 } *block_info;
2427 /* Similar information for edges. */
2428 typedef struct edge_info_def
2430 /* In case edge is a loopback edge, the probability edge will be reached
2431 in case header is. Estimated number of iterations of the loop can be
2432 then computed as 1 / (1 - back_edge_prob). */
2433 sreal back_edge_prob;
2434 /* True if the edge is a loopback edge in the natural loop. */
2435 unsigned int back_edge:1;
2436 } *edge_info;
2438 #define BLOCK_INFO(B) ((block_info) (B)->aux)
2439 #define EDGE_INFO(E) ((edge_info) (E)->aux)
2441 /* Helper function for estimate_bb_frequencies.
2442 Propagate the frequencies in blocks marked in
2443 TOVISIT, starting in HEAD. */
2445 static void
2446 propagate_freq (basic_block head, bitmap tovisit)
2448 basic_block bb;
2449 basic_block last;
2450 unsigned i;
2451 edge e;
2452 basic_block nextbb;
2453 bitmap_iterator bi;
2455 /* For each basic block we need to visit count number of his predecessors
2456 we need to visit first. */
2457 EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi)
2459 edge_iterator ei;
2460 int count = 0;
2462 bb = BASIC_BLOCK (i);
2464 FOR_EACH_EDGE (e, ei, bb->preds)
2466 bool visit = bitmap_bit_p (tovisit, e->src->index);
2468 if (visit && !(e->flags & EDGE_DFS_BACK))
2469 count++;
2470 else if (visit && dump_file && !EDGE_INFO (e)->back_edge)
2471 fprintf (dump_file,
2472 "Irreducible region hit, ignoring edge to %i->%i\n",
2473 e->src->index, bb->index);
2475 BLOCK_INFO (bb)->npredecessors = count;
2476 /* When function never returns, we will never process exit block. */
2477 if (!count && bb == EXIT_BLOCK_PTR)
2478 bb->count = bb->frequency = 0;
2481 memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
2482 last = head;
2483 for (bb = head; bb; bb = nextbb)
2485 edge_iterator ei;
2486 sreal cyclic_probability, frequency;
2488 memcpy (&cyclic_probability, &real_zero, sizeof (real_zero));
2489 memcpy (&frequency, &real_zero, sizeof (real_zero));
2491 nextbb = BLOCK_INFO (bb)->next;
2492 BLOCK_INFO (bb)->next = NULL;
2494 /* Compute frequency of basic block. */
2495 if (bb != head)
2497 #ifdef ENABLE_CHECKING
2498 FOR_EACH_EDGE (e, ei, bb->preds)
2499 gcc_assert (!bitmap_bit_p (tovisit, e->src->index)
2500 || (e->flags & EDGE_DFS_BACK));
2501 #endif
2503 FOR_EACH_EDGE (e, ei, bb->preds)
2504 if (EDGE_INFO (e)->back_edge)
2506 sreal_add (&cyclic_probability, &cyclic_probability,
2507 &EDGE_INFO (e)->back_edge_prob);
2509 else if (!(e->flags & EDGE_DFS_BACK))
2511 sreal tmp;
2513 /* frequency += (e->probability
2514 * BLOCK_INFO (e->src)->frequency /
2515 REG_BR_PROB_BASE); */
2517 sreal_init (&tmp, e->probability, 0);
2518 sreal_mul (&tmp, &tmp, &BLOCK_INFO (e->src)->frequency);
2519 sreal_mul (&tmp, &tmp, &real_inv_br_prob_base);
2520 sreal_add (&frequency, &frequency, &tmp);
2523 if (sreal_compare (&cyclic_probability, &real_zero) == 0)
2525 memcpy (&BLOCK_INFO (bb)->frequency, &frequency,
2526 sizeof (frequency));
2528 else
2530 if (sreal_compare (&cyclic_probability, &real_almost_one) > 0)
2532 memcpy (&cyclic_probability, &real_almost_one,
2533 sizeof (real_almost_one));
2536 /* BLOCK_INFO (bb)->frequency = frequency
2537 / (1 - cyclic_probability) */
2539 sreal_sub (&cyclic_probability, &real_one, &cyclic_probability);
2540 sreal_div (&BLOCK_INFO (bb)->frequency,
2541 &frequency, &cyclic_probability);
2545 bitmap_clear_bit (tovisit, bb->index);
2547 e = find_edge (bb, head);
2548 if (e)
2550 sreal tmp;
2552 /* EDGE_INFO (e)->back_edge_prob
2553 = ((e->probability * BLOCK_INFO (bb)->frequency)
2554 / REG_BR_PROB_BASE); */
2556 sreal_init (&tmp, e->probability, 0);
2557 sreal_mul (&tmp, &tmp, &BLOCK_INFO (bb)->frequency);
2558 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
2559 &tmp, &real_inv_br_prob_base);
2562 /* Propagate to successor blocks. */
2563 FOR_EACH_EDGE (e, ei, bb->succs)
2564 if (!(e->flags & EDGE_DFS_BACK)
2565 && BLOCK_INFO (e->dest)->npredecessors)
2567 BLOCK_INFO (e->dest)->npredecessors--;
2568 if (!BLOCK_INFO (e->dest)->npredecessors)
2570 if (!nextbb)
2571 nextbb = e->dest;
2572 else
2573 BLOCK_INFO (last)->next = e->dest;
2575 last = e->dest;
2581 /* Estimate probabilities of loopback edges in loops at same nest level. */
2583 static void
2584 estimate_loops_at_level (struct loop *first_loop)
2586 struct loop *loop;
2588 for (loop = first_loop; loop; loop = loop->next)
2590 edge e;
2591 basic_block *bbs;
2592 unsigned i;
2593 bitmap tovisit = BITMAP_ALLOC (NULL);
2595 estimate_loops_at_level (loop->inner);
2597 /* Find current loop back edge and mark it. */
2598 e = loop_latch_edge (loop);
2599 EDGE_INFO (e)->back_edge = 1;
2601 bbs = get_loop_body (loop);
2602 for (i = 0; i < loop->num_nodes; i++)
2603 bitmap_set_bit (tovisit, bbs[i]->index);
2604 free (bbs);
2605 propagate_freq (loop->header, tovisit);
2606 BITMAP_FREE (tovisit);
2610 /* Propagates frequencies through structure of loops. */
2612 static void
2613 estimate_loops (void)
2615 bitmap tovisit = BITMAP_ALLOC (NULL);
2616 basic_block bb;
2618 /* Start by estimating the frequencies in the loops. */
2619 if (number_of_loops () > 1)
2620 estimate_loops_at_level (current_loops->tree_root->inner);
2622 /* Now propagate the frequencies through all the blocks. */
2623 FOR_ALL_BB (bb)
2625 bitmap_set_bit (tovisit, bb->index);
2627 propagate_freq (ENTRY_BLOCK_PTR, tovisit);
2628 BITMAP_FREE (tovisit);
2631 /* Convert counts measured by profile driven feedback to frequencies.
2632 Return nonzero iff there was any nonzero execution count. */
2635 counts_to_freqs (void)
2637 gcov_type count_max, true_count_max = 0;
2638 basic_block bb;
2640 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2641 true_count_max = MAX (bb->count, true_count_max);
2643 count_max = MAX (true_count_max, 1);
2644 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2645 bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
2647 return true_count_max;
2650 /* Return true if function is likely to be expensive, so there is no point to
2651 optimize performance of prologue, epilogue or do inlining at the expense
2652 of code size growth. THRESHOLD is the limit of number of instructions
2653 function can execute at average to be still considered not expensive. */
2655 bool
2656 expensive_function_p (int threshold)
2658 unsigned int sum = 0;
2659 basic_block bb;
2660 unsigned int limit;
2662 /* We can not compute accurately for large thresholds due to scaled
2663 frequencies. */
2664 gcc_assert (threshold <= BB_FREQ_MAX);
2666 /* Frequencies are out of range. This either means that function contains
2667 internal loop executing more than BB_FREQ_MAX times or profile feedback
2668 is available and function has not been executed at all. */
2669 if (ENTRY_BLOCK_PTR->frequency == 0)
2670 return true;
2672 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
2673 limit = ENTRY_BLOCK_PTR->frequency * threshold;
2674 FOR_EACH_BB (bb)
2676 rtx insn;
2678 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
2679 insn = NEXT_INSN (insn))
2680 if (active_insn_p (insn))
2682 sum += bb->frequency;
2683 if (sum > limit)
2684 return true;
2688 return false;
2691 /* Estimate basic blocks frequency by given branch probabilities. */
2693 void
2694 estimate_bb_frequencies (void)
2696 basic_block bb;
2697 sreal freq_max;
2699 if (profile_status != PROFILE_READ || !counts_to_freqs ())
2701 static int real_values_initialized = 0;
2703 if (!real_values_initialized)
2705 real_values_initialized = 1;
2706 sreal_init (&real_zero, 0, 0);
2707 sreal_init (&real_one, 1, 0);
2708 sreal_init (&real_br_prob_base, REG_BR_PROB_BASE, 0);
2709 sreal_init (&real_bb_freq_max, BB_FREQ_MAX, 0);
2710 sreal_init (&real_one_half, 1, -1);
2711 sreal_div (&real_inv_br_prob_base, &real_one, &real_br_prob_base);
2712 sreal_sub (&real_almost_one, &real_one, &real_inv_br_prob_base);
2715 mark_dfs_back_edges ();
2717 single_succ_edge (ENTRY_BLOCK_PTR)->probability = REG_BR_PROB_BASE;
2719 /* Set up block info for each basic block. */
2720 alloc_aux_for_blocks (sizeof (struct block_info_def));
2721 alloc_aux_for_edges (sizeof (struct edge_info_def));
2722 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2724 edge e;
2725 edge_iterator ei;
2727 FOR_EACH_EDGE (e, ei, bb->succs)
2729 sreal_init (&EDGE_INFO (e)->back_edge_prob, e->probability, 0);
2730 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
2731 &EDGE_INFO (e)->back_edge_prob,
2732 &real_inv_br_prob_base);
2736 /* First compute probabilities locally for each loop from innermost
2737 to outermost to examine probabilities for back edges. */
2738 estimate_loops ();
2740 memcpy (&freq_max, &real_zero, sizeof (real_zero));
2741 FOR_EACH_BB (bb)
2742 if (sreal_compare (&freq_max, &BLOCK_INFO (bb)->frequency) < 0)
2743 memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
2745 sreal_div (&freq_max, &real_bb_freq_max, &freq_max);
2746 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2748 sreal tmp;
2750 sreal_mul (&tmp, &BLOCK_INFO (bb)->frequency, &freq_max);
2751 sreal_add (&tmp, &tmp, &real_one_half);
2752 bb->frequency = sreal_to_int (&tmp);
2755 free_aux_for_blocks ();
2756 free_aux_for_edges ();
2758 compute_function_frequency ();
2761 /* Decide whether function is hot, cold or unlikely executed. */
2762 void
2763 compute_function_frequency (void)
2765 basic_block bb;
2766 struct cgraph_node *node = cgraph_get_node (current_function_decl);
2767 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
2768 || MAIN_NAME_P (DECL_NAME (current_function_decl)))
2769 node->only_called_at_startup = true;
2770 if (DECL_STATIC_DESTRUCTOR (current_function_decl))
2771 node->only_called_at_exit = true;
2773 if (!profile_info || !flag_branch_probabilities)
2775 int flags = flags_from_decl_or_type (current_function_decl);
2776 if (lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl))
2777 != NULL)
2778 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
2779 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (current_function_decl))
2780 != NULL)
2781 node->frequency = NODE_FREQUENCY_HOT;
2782 else if (flags & ECF_NORETURN)
2783 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2784 else if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
2785 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2786 else if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
2787 || DECL_STATIC_DESTRUCTOR (current_function_decl))
2788 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2789 return;
2791 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
2792 FOR_EACH_BB (bb)
2794 if (maybe_hot_bb_p (cfun, bb))
2796 node->frequency = NODE_FREQUENCY_HOT;
2797 return;
2799 if (!probably_never_executed_bb_p (cfun, bb))
2800 node->frequency = NODE_FREQUENCY_NORMAL;
2804 static bool
2805 gate_estimate_probability (void)
2807 return flag_guess_branch_prob;
2810 /* Build PREDICT_EXPR. */
2811 tree
2812 build_predict_expr (enum br_predictor predictor, enum prediction taken)
2814 tree t = build1 (PREDICT_EXPR, void_type_node,
2815 build_int_cst (integer_type_node, predictor));
2816 SET_PREDICT_EXPR_OUTCOME (t, taken);
2817 return t;
2820 const char *
2821 predictor_name (enum br_predictor predictor)
2823 return predictor_info[predictor].name;
2826 struct gimple_opt_pass pass_profile =
2829 GIMPLE_PASS,
2830 "profile_estimate", /* name */
2831 OPTGROUP_NONE, /* optinfo_flags */
2832 gate_estimate_probability, /* gate */
2833 tree_estimate_probability_driver, /* execute */
2834 NULL, /* sub */
2835 NULL, /* next */
2836 0, /* static_pass_number */
2837 TV_BRANCH_PROB, /* tv_id */
2838 PROP_cfg, /* properties_required */
2839 0, /* properties_provided */
2840 0, /* properties_destroyed */
2841 0, /* todo_flags_start */
2842 TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
2846 struct gimple_opt_pass pass_strip_predict_hints =
2849 GIMPLE_PASS,
2850 "*strip_predict_hints", /* name */
2851 OPTGROUP_NONE, /* optinfo_flags */
2852 NULL, /* gate */
2853 strip_predict_hints, /* execute */
2854 NULL, /* sub */
2855 NULL, /* next */
2856 0, /* static_pass_number */
2857 TV_BRANCH_PROB, /* tv_id */
2858 PROP_cfg, /* properties_required */
2859 0, /* properties_provided */
2860 0, /* properties_destroyed */
2861 0, /* todo_flags_start */
2862 TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
2866 /* Rebuild function frequencies. Passes are in general expected to
2867 maintain profile by hand, however in some cases this is not possible:
2868 for example when inlining several functions with loops freuqencies might run
2869 out of scale and thus needs to be recomputed. */
2871 void
2872 rebuild_frequencies (void)
2874 timevar_push (TV_REBUILD_FREQUENCIES);
2875 if (profile_status == PROFILE_GUESSED)
2877 loop_optimizer_init (0);
2878 add_noreturn_fake_exit_edges ();
2879 mark_irreducible_loops ();
2880 connect_infinite_loops_to_exit ();
2881 estimate_bb_frequencies ();
2882 remove_fake_exit_edges ();
2883 loop_optimizer_finalize ();
2885 else if (profile_status == PROFILE_READ)
2886 counts_to_freqs ();
2887 else
2888 gcc_unreachable ();
2889 timevar_pop (TV_REBUILD_FREQUENCIES);