Default to dwarf version 4 on hppa64-hpux
[official-gcc.git] / gcc / predict.c
blobd9c7249831e4a630f717e6cce603a941da8adef3
1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* References:
22 [1] "Branch Prediction for Free"
23 Ball and Larus; PLDI '93.
24 [2] "Static Branch Frequency and Program Profile Analysis"
25 Wu and Larus; MICRO-27.
26 [3] "Corpus-based Static Branch Prediction"
27 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "rtl.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "cfghooks.h"
38 #include "tree-pass.h"
39 #include "ssa.h"
40 #include "memmodel.h"
41 #include "emit-rtl.h"
42 #include "cgraph.h"
43 #include "coverage.h"
44 #include "diagnostic-core.h"
45 #include "gimple-predict.h"
46 #include "fold-const.h"
47 #include "calls.h"
48 #include "cfganal.h"
49 #include "profile.h"
50 #include "sreal.h"
51 #include "cfgloop.h"
52 #include "gimple-iterator.h"
53 #include "tree-cfg.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "tree-ssa-loop.h"
56 #include "tree-scalar-evolution.h"
57 #include "ipa-utils.h"
58 #include "gimple-pretty-print.h"
59 #include "selftest.h"
60 #include "cfgrtl.h"
61 #include "stringpool.h"
62 #include "attribs.h"
64 /* Enum with reasons why a predictor is ignored. */
66 enum predictor_reason
68 REASON_NONE,
69 REASON_IGNORED,
70 REASON_SINGLE_EDGE_DUPLICATE,
71 REASON_EDGE_PAIR_DUPLICATE
74 /* String messages for the aforementioned enum. */
76 static const char *reason_messages[] = {"", " (ignored)",
77 " (single edge duplicate)", " (edge pair duplicate)"};
80 static void combine_predictions_for_insn (rtx_insn *, basic_block);
81 static void dump_prediction (FILE *, enum br_predictor, int, basic_block,
82 enum predictor_reason, edge);
83 static void predict_paths_leading_to (basic_block, enum br_predictor,
84 enum prediction,
85 class loop *in_loop = NULL);
86 static void predict_paths_leading_to_edge (edge, enum br_predictor,
87 enum prediction,
88 class loop *in_loop = NULL);
89 static bool can_predict_insn_p (const rtx_insn *);
90 static HOST_WIDE_INT get_predictor_value (br_predictor, HOST_WIDE_INT);
91 static void determine_unlikely_bbs ();
93 /* Information we hold about each branch predictor.
94 Filled using information from predict.def. */
96 struct predictor_info
98 const char *const name; /* Name used in the debugging dumps. */
99 const int hitrate; /* Expected hitrate used by
100 predict_insn_def call. */
101 const int flags;
104 /* Use given predictor without Dempster-Shaffer theory if it matches
105 using first_match heuristics. */
106 #define PRED_FLAG_FIRST_MATCH 1
108 /* Recompute hitrate in percent to our representation. */
110 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
112 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
113 static const struct predictor_info predictor_info[]= {
114 #include "predict.def"
116 /* Upper bound on predictors. */
117 {NULL, 0, 0}
119 #undef DEF_PREDICTOR
121 static gcov_type min_count = -1;
123 /* Determine the threshold for hot BB counts. */
125 gcov_type
126 get_hot_bb_threshold ()
128 if (min_count == -1)
130 const int hot_frac = param_hot_bb_count_fraction;
131 const gcov_type min_hot_count
132 = hot_frac
133 ? profile_info->sum_max / hot_frac
134 : (gcov_type)profile_count::max_count;
135 set_hot_bb_threshold (min_hot_count);
136 if (dump_file)
137 fprintf (dump_file, "Setting hotness threshold to %" PRId64 ".\n",
138 min_hot_count);
140 return min_count;
143 /* Set the threshold for hot BB counts. */
145 void
146 set_hot_bb_threshold (gcov_type min)
148 min_count = min;
151 /* Return TRUE if COUNT is considered to be hot in function FUN. */
153 bool
154 maybe_hot_count_p (struct function *fun, profile_count count)
156 if (!count.initialized_p ())
157 return true;
158 if (count.ipa () == profile_count::zero ())
159 return false;
160 if (!count.ipa_p ())
162 struct cgraph_node *node = cgraph_node::get (fun->decl);
163 if (!profile_info || profile_status_for_fn (fun) != PROFILE_READ)
165 if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
166 return false;
167 if (node->frequency == NODE_FREQUENCY_HOT)
168 return true;
170 if (profile_status_for_fn (fun) == PROFILE_ABSENT)
171 return true;
172 if (node->frequency == NODE_FREQUENCY_EXECUTED_ONCE
173 && count < (ENTRY_BLOCK_PTR_FOR_FN (fun)->count.apply_scale (2, 3)))
174 return false;
175 if (count.apply_scale (param_hot_bb_frequency_fraction, 1)
176 < ENTRY_BLOCK_PTR_FOR_FN (fun)->count)
177 return false;
178 return true;
180 /* Code executed at most once is not hot. */
181 if (count <= MAX (profile_info ? profile_info->runs : 1, 1))
182 return false;
183 return (count >= get_hot_bb_threshold ());
186 /* Return true if basic block BB of function FUN can be CPU intensive
187 and should thus be optimized for maximum performance. */
189 bool
190 maybe_hot_bb_p (struct function *fun, const_basic_block bb)
192 gcc_checking_assert (fun);
193 return maybe_hot_count_p (fun, bb->count);
196 /* Return true if edge E can be CPU intensive and should thus be optimized
197 for maximum performance. */
199 bool
200 maybe_hot_edge_p (edge e)
202 return maybe_hot_count_p (cfun, e->count ());
205 /* Return true if COUNT is considered to be never executed in function FUN
206 or if function FUN is considered so in the static profile. */
208 static bool
209 probably_never_executed (struct function *fun, profile_count count)
211 gcc_checking_assert (fun);
212 if (count.ipa () == profile_count::zero ())
213 return true;
214 /* Do not trust adjusted counts. This will make us to drop int cold section
215 code with low execution count as a result of inlining. These low counts
216 are not safe even with read profile and may lead us to dropping
217 code which actually gets executed into cold section of binary that is not
218 desirable. */
219 if (count.precise_p () && profile_status_for_fn (fun) == PROFILE_READ)
221 const int unlikely_frac = param_unlikely_bb_count_fraction;
222 if (count.apply_scale (unlikely_frac, 1) >= profile_info->runs)
223 return false;
224 return true;
226 if ((!profile_info || profile_status_for_fn (fun) != PROFILE_READ)
227 && (cgraph_node::get (fun->decl)->frequency
228 == NODE_FREQUENCY_UNLIKELY_EXECUTED))
229 return true;
230 return false;
233 /* Return true if basic block BB of function FUN is probably never executed. */
235 bool
236 probably_never_executed_bb_p (struct function *fun, const_basic_block bb)
238 return probably_never_executed (fun, bb->count);
241 /* Return true if edge E is unlikely executed for obvious reasons. */
243 static bool
244 unlikely_executed_edge_p (edge e)
246 return (e->src->count == profile_count::zero ()
247 || e->probability == profile_probability::never ())
248 || (e->flags & (EDGE_EH | EDGE_FAKE));
251 /* Return true if edge E of function FUN is probably never executed. */
253 bool
254 probably_never_executed_edge_p (struct function *fun, edge e)
256 if (unlikely_executed_edge_p (e))
257 return true;
258 return probably_never_executed (fun, e->count ());
261 /* Return true if function FUN should always be optimized for size. */
263 optimize_size_level
264 optimize_function_for_size_p (struct function *fun)
266 if (!fun || !fun->decl)
267 return optimize_size ? OPTIMIZE_SIZE_MAX : OPTIMIZE_SIZE_NO;
268 cgraph_node *n = cgraph_node::get (fun->decl);
269 if (n)
270 return n->optimize_for_size_p ();
271 return OPTIMIZE_SIZE_NO;
274 /* Return true if function FUN should always be optimized for speed. */
276 bool
277 optimize_function_for_speed_p (struct function *fun)
279 return !optimize_function_for_size_p (fun);
282 /* Return the optimization type that should be used for function FUN. */
284 optimization_type
285 function_optimization_type (struct function *fun)
287 return (optimize_function_for_speed_p (fun)
288 ? OPTIMIZE_FOR_SPEED
289 : OPTIMIZE_FOR_SIZE);
292 /* Return TRUE if basic block BB should be optimized for size. */
294 optimize_size_level
295 optimize_bb_for_size_p (const_basic_block bb)
297 enum optimize_size_level ret = optimize_function_for_size_p (cfun);
299 if (bb && ret < OPTIMIZE_SIZE_MAX && bb->count == profile_count::zero ())
300 ret = OPTIMIZE_SIZE_MAX;
301 if (bb && ret < OPTIMIZE_SIZE_BALANCED && !maybe_hot_bb_p (cfun, bb))
302 ret = OPTIMIZE_SIZE_BALANCED;
303 return ret;
306 /* Return TRUE if basic block BB should be optimized for speed. */
308 bool
309 optimize_bb_for_speed_p (const_basic_block bb)
311 return !optimize_bb_for_size_p (bb);
314 /* Return the optimization type that should be used for basic block BB. */
316 optimization_type
317 bb_optimization_type (const_basic_block bb)
319 return (optimize_bb_for_speed_p (bb)
320 ? OPTIMIZE_FOR_SPEED
321 : OPTIMIZE_FOR_SIZE);
324 /* Return TRUE if edge E should be optimized for size. */
326 optimize_size_level
327 optimize_edge_for_size_p (edge e)
329 enum optimize_size_level ret = optimize_function_for_size_p (cfun);
331 if (ret < OPTIMIZE_SIZE_MAX && unlikely_executed_edge_p (e))
332 ret = OPTIMIZE_SIZE_MAX;
333 if (ret < OPTIMIZE_SIZE_BALANCED && !maybe_hot_edge_p (e))
334 ret = OPTIMIZE_SIZE_BALANCED;
335 return ret;
338 /* Return TRUE if edge E should be optimized for speed. */
340 bool
341 optimize_edge_for_speed_p (edge e)
343 return !optimize_edge_for_size_p (e);
346 /* Return TRUE if the current function is optimized for size. */
348 optimize_size_level
349 optimize_insn_for_size_p (void)
351 enum optimize_size_level ret = optimize_function_for_size_p (cfun);
352 if (ret < OPTIMIZE_SIZE_BALANCED && !crtl->maybe_hot_insn_p)
353 ret = OPTIMIZE_SIZE_BALANCED;
354 return ret;
357 /* Return TRUE if the current function is optimized for speed. */
359 bool
360 optimize_insn_for_speed_p (void)
362 return !optimize_insn_for_size_p ();
365 /* Return TRUE if LOOP should be optimized for size. */
367 optimize_size_level
368 optimize_loop_for_size_p (class loop *loop)
370 return optimize_bb_for_size_p (loop->header);
373 /* Return TRUE if LOOP should be optimized for speed. */
375 bool
376 optimize_loop_for_speed_p (class loop *loop)
378 return optimize_bb_for_speed_p (loop->header);
381 /* Return TRUE if nest rooted at LOOP should be optimized for speed. */
383 bool
384 optimize_loop_nest_for_speed_p (class loop *loop)
386 class loop *l = loop;
387 if (optimize_loop_for_speed_p (loop))
388 return true;
389 l = loop->inner;
390 while (l && l != loop)
392 if (optimize_loop_for_speed_p (l))
393 return true;
394 if (l->inner)
395 l = l->inner;
396 else if (l->next)
397 l = l->next;
398 else
400 while (l != loop && !l->next)
401 l = loop_outer (l);
402 if (l != loop)
403 l = l->next;
406 return false;
409 /* Return TRUE if nest rooted at LOOP should be optimized for size. */
411 optimize_size_level
412 optimize_loop_nest_for_size_p (class loop *loop)
414 enum optimize_size_level ret = optimize_loop_for_size_p (loop);
415 class loop *l = loop;
417 l = loop->inner;
418 while (l && l != loop)
420 if (ret == OPTIMIZE_SIZE_NO)
421 break;
422 ret = MIN (optimize_loop_for_size_p (l), ret);
423 if (l->inner)
424 l = l->inner;
425 else if (l->next)
426 l = l->next;
427 else
429 while (l != loop && !l->next)
430 l = loop_outer (l);
431 if (l != loop)
432 l = l->next;
435 return ret;
438 /* Return true if edge E is likely to be well predictable by branch
439 predictor. */
441 bool
442 predictable_edge_p (edge e)
444 if (!e->probability.initialized_p ())
445 return false;
446 if ((e->probability.to_reg_br_prob_base ()
447 <= param_predictable_branch_outcome * REG_BR_PROB_BASE / 100)
448 || (REG_BR_PROB_BASE - e->probability.to_reg_br_prob_base ()
449 <= param_predictable_branch_outcome * REG_BR_PROB_BASE / 100))
450 return true;
451 return false;
455 /* Set RTL expansion for BB profile. */
457 void
458 rtl_profile_for_bb (basic_block bb)
460 crtl->maybe_hot_insn_p = maybe_hot_bb_p (cfun, bb);
463 /* Set RTL expansion for edge profile. */
465 void
466 rtl_profile_for_edge (edge e)
468 crtl->maybe_hot_insn_p = maybe_hot_edge_p (e);
471 /* Set RTL expansion to default mode (i.e. when profile info is not known). */
472 void
473 default_rtl_profile (void)
475 crtl->maybe_hot_insn_p = true;
478 /* Return true if the one of outgoing edges is already predicted by
479 PREDICTOR. */
481 bool
482 rtl_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
484 rtx note;
485 if (!INSN_P (BB_END (bb)))
486 return false;
487 for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
488 if (REG_NOTE_KIND (note) == REG_BR_PRED
489 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
490 return true;
491 return false;
494 /* Structure representing predictions in tree level. */
496 struct edge_prediction {
497 struct edge_prediction *ep_next;
498 edge ep_edge;
499 enum br_predictor ep_predictor;
500 int ep_probability;
503 /* This map contains for a basic block the list of predictions for the
504 outgoing edges. */
506 static hash_map<const_basic_block, edge_prediction *> *bb_predictions;
508 /* Return true if the one of outgoing edges is already predicted by
509 PREDICTOR. */
511 bool
512 gimple_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
514 struct edge_prediction *i;
515 edge_prediction **preds = bb_predictions->get (bb);
517 if (!preds)
518 return false;
520 for (i = *preds; i; i = i->ep_next)
521 if (i->ep_predictor == predictor)
522 return true;
523 return false;
526 /* Return true if the one of outgoing edges is already predicted by
527 PREDICTOR for edge E predicted as TAKEN. */
529 bool
530 edge_predicted_by_p (edge e, enum br_predictor predictor, bool taken)
532 struct edge_prediction *i;
533 basic_block bb = e->src;
534 edge_prediction **preds = bb_predictions->get (bb);
535 if (!preds)
536 return false;
538 int probability = predictor_info[(int) predictor].hitrate;
540 if (taken != TAKEN)
541 probability = REG_BR_PROB_BASE - probability;
543 for (i = *preds; i; i = i->ep_next)
544 if (i->ep_predictor == predictor
545 && i->ep_edge == e
546 && i->ep_probability == probability)
547 return true;
548 return false;
551 /* Same predicate as above, working on edges. */
552 bool
553 edge_probability_reliable_p (const_edge e)
555 return e->probability.probably_reliable_p ();
558 /* Same predicate as edge_probability_reliable_p, working on notes. */
559 bool
560 br_prob_note_reliable_p (const_rtx note)
562 gcc_assert (REG_NOTE_KIND (note) == REG_BR_PROB);
563 return profile_probability::from_reg_br_prob_note
564 (XINT (note, 0)).probably_reliable_p ();
567 static void
568 predict_insn (rtx_insn *insn, enum br_predictor predictor, int probability)
570 gcc_assert (any_condjump_p (insn));
571 if (!flag_guess_branch_prob)
572 return;
574 add_reg_note (insn, REG_BR_PRED,
575 gen_rtx_CONCAT (VOIDmode,
576 GEN_INT ((int) predictor),
577 GEN_INT ((int) probability)));
580 /* Predict insn by given predictor. */
582 void
583 predict_insn_def (rtx_insn *insn, enum br_predictor predictor,
584 enum prediction taken)
586 int probability = predictor_info[(int) predictor].hitrate;
587 gcc_assert (probability != PROB_UNINITIALIZED);
589 if (taken != TAKEN)
590 probability = REG_BR_PROB_BASE - probability;
592 predict_insn (insn, predictor, probability);
595 /* Predict edge E with given probability if possible. */
597 void
598 rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
600 rtx_insn *last_insn;
601 last_insn = BB_END (e->src);
603 /* We can store the branch prediction information only about
604 conditional jumps. */
605 if (!any_condjump_p (last_insn))
606 return;
608 /* We always store probability of branching. */
609 if (e->flags & EDGE_FALLTHRU)
610 probability = REG_BR_PROB_BASE - probability;
612 predict_insn (last_insn, predictor, probability);
615 /* Predict edge E with the given PROBABILITY. */
616 void
617 gimple_predict_edge (edge e, enum br_predictor predictor, int probability)
619 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
620 && EDGE_COUNT (e->src->succs) > 1
621 && flag_guess_branch_prob
622 && optimize)
624 struct edge_prediction *i = XNEW (struct edge_prediction);
625 edge_prediction *&preds = bb_predictions->get_or_insert (e->src);
627 i->ep_next = preds;
628 preds = i;
629 i->ep_probability = probability;
630 i->ep_predictor = predictor;
631 i->ep_edge = e;
635 /* Filter edge predictions PREDS by a function FILTER: if FILTER return false
636 the prediction is removed.
637 DATA are passed to the filter function. */
639 static void
640 filter_predictions (edge_prediction **preds,
641 bool (*filter) (edge_prediction *, void *), void *data)
643 if (!bb_predictions)
644 return;
646 if (preds)
648 struct edge_prediction **prediction = preds;
649 struct edge_prediction *next;
651 while (*prediction)
653 if ((*filter) (*prediction, data))
654 prediction = &((*prediction)->ep_next);
655 else
657 next = (*prediction)->ep_next;
658 free (*prediction);
659 *prediction = next;
665 /* Filter function predicate that returns true for a edge predicate P
666 if its edge is equal to DATA. */
668 static bool
669 not_equal_edge_p (edge_prediction *p, void *data)
671 return p->ep_edge != (edge)data;
674 /* Remove all predictions on given basic block that are attached
675 to edge E. */
676 void
677 remove_predictions_associated_with_edge (edge e)
679 if (!bb_predictions)
680 return;
682 edge_prediction **preds = bb_predictions->get (e->src);
683 filter_predictions (preds, not_equal_edge_p, e);
686 /* Clears the list of predictions stored for BB. */
688 static void
689 clear_bb_predictions (basic_block bb)
691 edge_prediction **preds = bb_predictions->get (bb);
692 struct edge_prediction *pred, *next;
694 if (!preds)
695 return;
697 for (pred = *preds; pred; pred = next)
699 next = pred->ep_next;
700 free (pred);
702 *preds = NULL;
705 /* Return true when we can store prediction on insn INSN.
706 At the moment we represent predictions only on conditional
707 jumps, not at computed jump or other complicated cases. */
708 static bool
709 can_predict_insn_p (const rtx_insn *insn)
711 return (JUMP_P (insn)
712 && any_condjump_p (insn)
713 && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs) >= 2);
716 /* Predict edge E by given predictor if possible. */
718 void
719 predict_edge_def (edge e, enum br_predictor predictor,
720 enum prediction taken)
722 int probability = predictor_info[(int) predictor].hitrate;
724 if (taken != TAKEN)
725 probability = REG_BR_PROB_BASE - probability;
727 predict_edge (e, predictor, probability);
730 /* Invert all branch predictions or probability notes in the INSN. This needs
731 to be done each time we invert the condition used by the jump. */
733 void
734 invert_br_probabilities (rtx insn)
736 rtx note;
738 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
739 if (REG_NOTE_KIND (note) == REG_BR_PROB)
740 XINT (note, 0) = profile_probability::from_reg_br_prob_note
741 (XINT (note, 0)).invert ().to_reg_br_prob_note ();
742 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
743 XEXP (XEXP (note, 0), 1)
744 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
747 /* Dump information about the branch prediction to the output file. */
749 static void
750 dump_prediction (FILE *file, enum br_predictor predictor, int probability,
751 basic_block bb, enum predictor_reason reason = REASON_NONE,
752 edge ep_edge = NULL)
754 edge e = ep_edge;
755 edge_iterator ei;
757 if (!file)
758 return;
760 if (e == NULL)
761 FOR_EACH_EDGE (e, ei, bb->succs)
762 if (! (e->flags & EDGE_FALLTHRU))
763 break;
765 char edge_info_str[128];
766 if (ep_edge)
767 sprintf (edge_info_str, " of edge %d->%d", ep_edge->src->index,
768 ep_edge->dest->index);
769 else
770 edge_info_str[0] = '\0';
772 fprintf (file, " %s heuristics%s%s: %.2f%%",
773 predictor_info[predictor].name,
774 edge_info_str, reason_messages[reason],
775 probability * 100.0 / REG_BR_PROB_BASE);
777 if (bb->count.initialized_p ())
779 fprintf (file, " exec ");
780 bb->count.dump (file);
781 if (e)
783 fprintf (file, " hit ");
784 e->count ().dump (file);
785 fprintf (file, " (%.1f%%)", e->count ().to_gcov_type() * 100.0
786 / bb->count.to_gcov_type ());
790 fprintf (file, "\n");
792 /* Print output that be easily read by analyze_brprob.py script. We are
793 interested only in counts that are read from GCDA files. */
794 if (dump_file && (dump_flags & TDF_DETAILS)
795 && bb->count.precise_p ()
796 && reason == REASON_NONE)
798 fprintf (file, ";;heuristics;%s;%" PRId64 ";%" PRId64 ";%.1f;\n",
799 predictor_info[predictor].name,
800 bb->count.to_gcov_type (), e->count ().to_gcov_type (),
801 probability * 100.0 / REG_BR_PROB_BASE);
805 /* Return true if STMT is known to be unlikely executed. */
807 static bool
808 unlikely_executed_stmt_p (gimple *stmt)
810 if (!is_gimple_call (stmt))
811 return false;
812 /* NORETURN attribute alone is not strong enough: exit() may be quite
813 likely executed once during program run. */
814 if (gimple_call_fntype (stmt)
815 && lookup_attribute ("cold",
816 TYPE_ATTRIBUTES (gimple_call_fntype (stmt)))
817 && !lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl)))
818 return true;
819 tree decl = gimple_call_fndecl (stmt);
820 if (!decl)
821 return false;
822 if (lookup_attribute ("cold", DECL_ATTRIBUTES (decl))
823 && !lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl)))
824 return true;
826 cgraph_node *n = cgraph_node::get (decl);
827 if (!n)
828 return false;
830 availability avail;
831 n = n->ultimate_alias_target (&avail);
832 if (avail < AVAIL_AVAILABLE)
833 return false;
834 if (!n->analyzed
835 || n->decl == current_function_decl)
836 return false;
837 return n->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED;
840 /* Return true if BB is unlikely executed. */
842 static bool
843 unlikely_executed_bb_p (basic_block bb)
845 if (bb->count == profile_count::zero ())
846 return true;
847 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun) || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
848 return false;
849 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
850 !gsi_end_p (gsi); gsi_next (&gsi))
852 if (unlikely_executed_stmt_p (gsi_stmt (gsi)))
853 return true;
854 if (stmt_can_terminate_bb_p (gsi_stmt (gsi)))
855 return false;
857 return false;
860 /* We cannot predict the probabilities of outgoing edges of bb. Set them
861 evenly and hope for the best. If UNLIKELY_EDGES is not null, distribute
862 even probability for all edges not mentioned in the set. These edges
863 are given PROB_VERY_UNLIKELY probability. Similarly for LIKELY_EDGES,
864 if we have exactly one likely edge, make the other edges predicted
865 as not probable. */
867 static void
868 set_even_probabilities (basic_block bb,
869 hash_set<edge> *unlikely_edges = NULL,
870 hash_set<edge_prediction *> *likely_edges = NULL)
872 unsigned nedges = 0, unlikely_count = 0;
873 edge e = NULL;
874 edge_iterator ei;
875 profile_probability all = profile_probability::always ();
877 FOR_EACH_EDGE (e, ei, bb->succs)
878 if (e->probability.initialized_p ())
879 all -= e->probability;
880 else if (!unlikely_executed_edge_p (e))
882 nedges++;
883 if (unlikely_edges != NULL && unlikely_edges->contains (e))
885 all -= profile_probability::very_unlikely ();
886 unlikely_count++;
890 /* Make the distribution even if all edges are unlikely. */
891 unsigned likely_count = likely_edges ? likely_edges->elements () : 0;
892 if (unlikely_count == nedges)
894 unlikely_edges = NULL;
895 unlikely_count = 0;
898 /* If we have one likely edge, then use its probability and distribute
899 remaining probabilities as even. */
900 if (likely_count == 1)
902 FOR_EACH_EDGE (e, ei, bb->succs)
903 if (e->probability.initialized_p ())
905 else if (!unlikely_executed_edge_p (e))
907 edge_prediction *prediction = *likely_edges->begin ();
908 int p = prediction->ep_probability;
909 profile_probability prob
910 = profile_probability::from_reg_br_prob_base (p);
912 if (prediction->ep_edge == e)
913 e->probability = prob;
914 else if (unlikely_edges != NULL && unlikely_edges->contains (e))
915 e->probability = profile_probability::very_unlikely ();
916 else
918 profile_probability remainder = prob.invert ();
919 remainder -= profile_probability::very_unlikely ()
920 .apply_scale (unlikely_count, 1);
921 int count = nedges - unlikely_count - 1;
922 gcc_assert (count >= 0);
924 e->probability = remainder.apply_scale (1, count);
927 else
928 e->probability = profile_probability::never ();
930 else
932 /* Make all unlikely edges unlikely and the rest will have even
933 probability. */
934 unsigned scale = nedges - unlikely_count;
935 FOR_EACH_EDGE (e, ei, bb->succs)
936 if (e->probability.initialized_p ())
938 else if (!unlikely_executed_edge_p (e))
940 if (unlikely_edges != NULL && unlikely_edges->contains (e))
941 e->probability = profile_probability::very_unlikely ();
942 else
943 e->probability = all.apply_scale (1, scale);
945 else
946 e->probability = profile_probability::never ();
950 /* Add REG_BR_PROB note to JUMP with PROB. */
952 void
953 add_reg_br_prob_note (rtx_insn *jump, profile_probability prob)
955 gcc_checking_assert (JUMP_P (jump) && !find_reg_note (jump, REG_BR_PROB, 0));
956 add_int_reg_note (jump, REG_BR_PROB, prob.to_reg_br_prob_note ());
959 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
960 note if not already present. Remove now useless REG_BR_PRED notes. */
962 static void
963 combine_predictions_for_insn (rtx_insn *insn, basic_block bb)
965 rtx prob_note;
966 rtx *pnote;
967 rtx note;
968 int best_probability = PROB_EVEN;
969 enum br_predictor best_predictor = END_PREDICTORS;
970 int combined_probability = REG_BR_PROB_BASE / 2;
971 int d;
972 bool first_match = false;
973 bool found = false;
975 if (!can_predict_insn_p (insn))
977 set_even_probabilities (bb);
978 return;
981 prob_note = find_reg_note (insn, REG_BR_PROB, 0);
982 pnote = &REG_NOTES (insn);
983 if (dump_file)
984 fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
985 bb->index);
987 /* We implement "first match" heuristics and use probability guessed
988 by predictor with smallest index. */
989 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
990 if (REG_NOTE_KIND (note) == REG_BR_PRED)
992 enum br_predictor predictor = ((enum br_predictor)
993 INTVAL (XEXP (XEXP (note, 0), 0)));
994 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
996 found = true;
997 if (best_predictor > predictor
998 && predictor_info[predictor].flags & PRED_FLAG_FIRST_MATCH)
999 best_probability = probability, best_predictor = predictor;
1001 d = (combined_probability * probability
1002 + (REG_BR_PROB_BASE - combined_probability)
1003 * (REG_BR_PROB_BASE - probability));
1005 /* Use FP math to avoid overflows of 32bit integers. */
1006 if (d == 0)
1007 /* If one probability is 0% and one 100%, avoid division by zero. */
1008 combined_probability = REG_BR_PROB_BASE / 2;
1009 else
1010 combined_probability = (((double) combined_probability) * probability
1011 * REG_BR_PROB_BASE / d + 0.5);
1014 /* Decide which heuristic to use. In case we didn't match anything,
1015 use no_prediction heuristic, in case we did match, use either
1016 first match or Dempster-Shaffer theory depending on the flags. */
1018 if (best_predictor != END_PREDICTORS)
1019 first_match = true;
1021 if (!found)
1022 dump_prediction (dump_file, PRED_NO_PREDICTION,
1023 combined_probability, bb);
1024 else
1026 if (!first_match)
1027 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability,
1028 bb, !first_match ? REASON_NONE : REASON_IGNORED);
1029 else
1030 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability,
1031 bb, first_match ? REASON_NONE : REASON_IGNORED);
1034 if (first_match)
1035 combined_probability = best_probability;
1036 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb);
1038 while (*pnote)
1040 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
1042 enum br_predictor predictor = ((enum br_predictor)
1043 INTVAL (XEXP (XEXP (*pnote, 0), 0)));
1044 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
1046 dump_prediction (dump_file, predictor, probability, bb,
1047 (!first_match || best_predictor == predictor)
1048 ? REASON_NONE : REASON_IGNORED);
1049 *pnote = XEXP (*pnote, 1);
1051 else
1052 pnote = &XEXP (*pnote, 1);
1055 if (!prob_note)
1057 profile_probability p
1058 = profile_probability::from_reg_br_prob_base (combined_probability);
1059 add_reg_br_prob_note (insn, p);
1061 /* Save the prediction into CFG in case we are seeing non-degenerated
1062 conditional jump. */
1063 if (!single_succ_p (bb))
1065 BRANCH_EDGE (bb)->probability = p;
1066 FALLTHRU_EDGE (bb)->probability
1067 = BRANCH_EDGE (bb)->probability.invert ();
1070 else if (!single_succ_p (bb))
1072 profile_probability prob = profile_probability::from_reg_br_prob_note
1073 (XINT (prob_note, 0));
1075 BRANCH_EDGE (bb)->probability = prob;
1076 FALLTHRU_EDGE (bb)->probability = prob.invert ();
1078 else
1079 single_succ_edge (bb)->probability = profile_probability::always ();
1082 /* Edge prediction hash traits. */
1084 struct predictor_hash: pointer_hash <edge_prediction>
1087 static inline hashval_t hash (const edge_prediction *);
1088 static inline bool equal (const edge_prediction *, const edge_prediction *);
1091 /* Calculate hash value of an edge prediction P based on predictor and
1092 normalized probability. */
1094 inline hashval_t
1095 predictor_hash::hash (const edge_prediction *p)
1097 inchash::hash hstate;
1098 hstate.add_int (p->ep_predictor);
1100 int prob = p->ep_probability;
1101 if (prob > REG_BR_PROB_BASE / 2)
1102 prob = REG_BR_PROB_BASE - prob;
1104 hstate.add_int (prob);
1106 return hstate.end ();
1109 /* Return true whether edge predictions P1 and P2 use the same predictor and
1110 have equal (or opposed probability). */
1112 inline bool
1113 predictor_hash::equal (const edge_prediction *p1, const edge_prediction *p2)
1115 return (p1->ep_predictor == p2->ep_predictor
1116 && (p1->ep_probability == p2->ep_probability
1117 || p1->ep_probability == REG_BR_PROB_BASE - p2->ep_probability));
1120 struct predictor_hash_traits: predictor_hash,
1121 typed_noop_remove <edge_prediction *> {};
1123 /* Return true if edge prediction P is not in DATA hash set. */
1125 static bool
1126 not_removed_prediction_p (edge_prediction *p, void *data)
1128 hash_set<edge_prediction *> *remove = (hash_set<edge_prediction *> *) data;
1129 return !remove->contains (p);
1132 /* Prune predictions for a basic block BB. Currently we do following
1133 clean-up steps:
1135 1) remove duplicate prediction that is guessed with the same probability
1136 (different than 1/2) to both edge
1137 2) remove duplicates for a prediction that belongs with the same probability
1138 to a single edge
1142 static void
1143 prune_predictions_for_bb (basic_block bb)
1145 edge_prediction **preds = bb_predictions->get (bb);
1147 if (preds)
1149 hash_table <predictor_hash_traits> s (13);
1150 hash_set <edge_prediction *> remove;
1152 /* Step 1: identify predictors that should be removed. */
1153 for (edge_prediction *pred = *preds; pred; pred = pred->ep_next)
1155 edge_prediction *existing = s.find (pred);
1156 if (existing)
1158 if (pred->ep_edge == existing->ep_edge
1159 && pred->ep_probability == existing->ep_probability)
1161 /* Remove a duplicate predictor. */
1162 dump_prediction (dump_file, pred->ep_predictor,
1163 pred->ep_probability, bb,
1164 REASON_SINGLE_EDGE_DUPLICATE, pred->ep_edge);
1166 remove.add (pred);
1168 else if (pred->ep_edge != existing->ep_edge
1169 && pred->ep_probability == existing->ep_probability
1170 && pred->ep_probability != REG_BR_PROB_BASE / 2)
1172 /* Remove both predictors as they predict the same
1173 for both edges. */
1174 dump_prediction (dump_file, existing->ep_predictor,
1175 pred->ep_probability, bb,
1176 REASON_EDGE_PAIR_DUPLICATE,
1177 existing->ep_edge);
1178 dump_prediction (dump_file, pred->ep_predictor,
1179 pred->ep_probability, bb,
1180 REASON_EDGE_PAIR_DUPLICATE,
1181 pred->ep_edge);
1183 remove.add (existing);
1184 remove.add (pred);
1188 edge_prediction **slot2 = s.find_slot (pred, INSERT);
1189 *slot2 = pred;
1192 /* Step 2: Remove predictors. */
1193 filter_predictions (preds, not_removed_prediction_p, &remove);
1197 /* Combine predictions into single probability and store them into CFG.
1198 Remove now useless prediction entries.
1199 If DRY_RUN is set, only produce dumps and do not modify profile. */
1201 static void
1202 combine_predictions_for_bb (basic_block bb, bool dry_run)
1204 int best_probability = PROB_EVEN;
1205 enum br_predictor best_predictor = END_PREDICTORS;
1206 int combined_probability = REG_BR_PROB_BASE / 2;
1207 int d;
1208 bool first_match = false;
1209 bool found = false;
1210 struct edge_prediction *pred;
1211 int nedges = 0;
1212 edge e, first = NULL, second = NULL;
1213 edge_iterator ei;
1214 int nzero = 0;
1215 int nunknown = 0;
1217 FOR_EACH_EDGE (e, ei, bb->succs)
1219 if (!unlikely_executed_edge_p (e))
1221 nedges ++;
1222 if (first && !second)
1223 second = e;
1224 if (!first)
1225 first = e;
1227 else if (!e->probability.initialized_p ())
1228 e->probability = profile_probability::never ();
1229 if (!e->probability.initialized_p ())
1230 nunknown++;
1231 else if (e->probability == profile_probability::never ())
1232 nzero++;
1235 /* When there is no successor or only one choice, prediction is easy.
1237 When we have a basic block with more than 2 successors, the situation
1238 is more complicated as DS theory cannot be used literally.
1239 More precisely, let's assume we predicted edge e1 with probability p1,
1240 thus: m1({b1}) = p1. As we're going to combine more than 2 edges, we
1241 need to find probability of e.g. m1({b2}), which we don't know.
1242 The only approximation is to equally distribute 1-p1 to all edges
1243 different from b1.
1245 According to numbers we've got from SPEC2006 benchark, there's only
1246 one interesting reliable predictor (noreturn call), which can be
1247 handled with a bit easier approach. */
1248 if (nedges != 2)
1250 hash_set<edge> unlikely_edges (4);
1251 hash_set<edge_prediction *> likely_edges (4);
1253 /* Identify all edges that have a probability close to very unlikely.
1254 Doing the approach for very unlikely doesn't worth for doing as
1255 there's no such probability in SPEC2006 benchmark. */
1256 edge_prediction **preds = bb_predictions->get (bb);
1257 if (preds)
1258 for (pred = *preds; pred; pred = pred->ep_next)
1260 if (pred->ep_probability <= PROB_VERY_UNLIKELY
1261 || pred->ep_predictor == PRED_COLD_LABEL)
1262 unlikely_edges.add (pred->ep_edge);
1263 else if (pred->ep_probability >= PROB_VERY_LIKELY
1264 || pred->ep_predictor == PRED_BUILTIN_EXPECT
1265 || pred->ep_predictor == PRED_HOT_LABEL)
1266 likely_edges.add (pred);
1269 /* It can happen that an edge is both in likely_edges and unlikely_edges.
1270 Clear both sets in that situation. */
1271 for (hash_set<edge_prediction *>::iterator it = likely_edges.begin ();
1272 it != likely_edges.end (); ++it)
1273 if (unlikely_edges.contains ((*it)->ep_edge))
1275 likely_edges.empty ();
1276 unlikely_edges.empty ();
1277 break;
1280 if (!dry_run)
1281 set_even_probabilities (bb, &unlikely_edges, &likely_edges);
1282 clear_bb_predictions (bb);
1283 if (dump_file)
1285 fprintf (dump_file, "Predictions for bb %i\n", bb->index);
1286 if (unlikely_edges.is_empty ())
1287 fprintf (dump_file,
1288 "%i edges in bb %i predicted to even probabilities\n",
1289 nedges, bb->index);
1290 else
1292 fprintf (dump_file,
1293 "%i edges in bb %i predicted with some unlikely edges\n",
1294 nedges, bb->index);
1295 FOR_EACH_EDGE (e, ei, bb->succs)
1296 if (!unlikely_executed_edge_p (e))
1297 dump_prediction (dump_file, PRED_COMBINED,
1298 e->probability.to_reg_br_prob_base (), bb, REASON_NONE, e);
1301 return;
1304 if (dump_file)
1305 fprintf (dump_file, "Predictions for bb %i\n", bb->index);
1307 prune_predictions_for_bb (bb);
1309 edge_prediction **preds = bb_predictions->get (bb);
1311 if (preds)
1313 /* We implement "first match" heuristics and use probability guessed
1314 by predictor with smallest index. */
1315 for (pred = *preds; pred; pred = pred->ep_next)
1317 enum br_predictor predictor = pred->ep_predictor;
1318 int probability = pred->ep_probability;
1320 if (pred->ep_edge != first)
1321 probability = REG_BR_PROB_BASE - probability;
1323 found = true;
1324 /* First match heuristics would be widly confused if we predicted
1325 both directions. */
1326 if (best_predictor > predictor
1327 && predictor_info[predictor].flags & PRED_FLAG_FIRST_MATCH)
1329 struct edge_prediction *pred2;
1330 int prob = probability;
1332 for (pred2 = (struct edge_prediction *) *preds;
1333 pred2; pred2 = pred2->ep_next)
1334 if (pred2 != pred && pred2->ep_predictor == pred->ep_predictor)
1336 int probability2 = pred2->ep_probability;
1338 if (pred2->ep_edge != first)
1339 probability2 = REG_BR_PROB_BASE - probability2;
1341 if ((probability < REG_BR_PROB_BASE / 2) !=
1342 (probability2 < REG_BR_PROB_BASE / 2))
1343 break;
1345 /* If the same predictor later gave better result, go for it! */
1346 if ((probability >= REG_BR_PROB_BASE / 2 && (probability2 > probability))
1347 || (probability <= REG_BR_PROB_BASE / 2 && (probability2 < probability)))
1348 prob = probability2;
1350 if (!pred2)
1351 best_probability = prob, best_predictor = predictor;
1354 d = (combined_probability * probability
1355 + (REG_BR_PROB_BASE - combined_probability)
1356 * (REG_BR_PROB_BASE - probability));
1358 /* Use FP math to avoid overflows of 32bit integers. */
1359 if (d == 0)
1360 /* If one probability is 0% and one 100%, avoid division by zero. */
1361 combined_probability = REG_BR_PROB_BASE / 2;
1362 else
1363 combined_probability = (((double) combined_probability)
1364 * probability
1365 * REG_BR_PROB_BASE / d + 0.5);
1369 /* Decide which heuristic to use. In case we didn't match anything,
1370 use no_prediction heuristic, in case we did match, use either
1371 first match or Dempster-Shaffer theory depending on the flags. */
1373 if (best_predictor != END_PREDICTORS)
1374 first_match = true;
1376 if (!found)
1377 dump_prediction (dump_file, PRED_NO_PREDICTION, combined_probability, bb);
1378 else
1380 if (!first_match)
1381 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, bb,
1382 !first_match ? REASON_NONE : REASON_IGNORED);
1383 else
1384 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, bb,
1385 first_match ? REASON_NONE : REASON_IGNORED);
1388 if (first_match)
1389 combined_probability = best_probability;
1390 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb);
1392 if (preds)
1394 for (pred = (struct edge_prediction *) *preds; pred; pred = pred->ep_next)
1396 enum br_predictor predictor = pred->ep_predictor;
1397 int probability = pred->ep_probability;
1399 dump_prediction (dump_file, predictor, probability, bb,
1400 (!first_match || best_predictor == predictor)
1401 ? REASON_NONE : REASON_IGNORED, pred->ep_edge);
1404 clear_bb_predictions (bb);
1407 /* If we have only one successor which is unknown, we can compute missing
1408 probability. */
1409 if (nunknown == 1)
1411 profile_probability prob = profile_probability::always ();
1412 edge missing = NULL;
1414 FOR_EACH_EDGE (e, ei, bb->succs)
1415 if (e->probability.initialized_p ())
1416 prob -= e->probability;
1417 else if (missing == NULL)
1418 missing = e;
1419 else
1420 gcc_unreachable ();
1421 missing->probability = prob;
1423 /* If nothing is unknown, we have nothing to update. */
1424 else if (!nunknown && nzero != (int)EDGE_COUNT (bb->succs))
1426 else if (!dry_run)
1428 first->probability
1429 = profile_probability::from_reg_br_prob_base (combined_probability);
1430 second->probability = first->probability.invert ();
1434 /* Check if T1 and T2 satisfy the IV_COMPARE condition.
1435 Return the SSA_NAME if the condition satisfies, NULL otherwise.
1437 T1 and T2 should be one of the following cases:
1438 1. T1 is SSA_NAME, T2 is NULL
1439 2. T1 is SSA_NAME, T2 is INTEGER_CST between [-4, 4]
1440 3. T2 is SSA_NAME, T1 is INTEGER_CST between [-4, 4] */
1442 static tree
1443 strips_small_constant (tree t1, tree t2)
1445 tree ret = NULL;
1446 int value = 0;
1448 if (!t1)
1449 return NULL;
1450 else if (TREE_CODE (t1) == SSA_NAME)
1451 ret = t1;
1452 else if (tree_fits_shwi_p (t1))
1453 value = tree_to_shwi (t1);
1454 else
1455 return NULL;
1457 if (!t2)
1458 return ret;
1459 else if (tree_fits_shwi_p (t2))
1460 value = tree_to_shwi (t2);
1461 else if (TREE_CODE (t2) == SSA_NAME)
1463 if (ret)
1464 return NULL;
1465 else
1466 ret = t2;
1469 if (value <= 4 && value >= -4)
1470 return ret;
1471 else
1472 return NULL;
1475 /* Return the SSA_NAME in T or T's operands.
1476 Return NULL if SSA_NAME cannot be found. */
1478 static tree
1479 get_base_value (tree t)
1481 if (TREE_CODE (t) == SSA_NAME)
1482 return t;
1484 if (!BINARY_CLASS_P (t))
1485 return NULL;
1487 switch (TREE_OPERAND_LENGTH (t))
1489 case 1:
1490 return strips_small_constant (TREE_OPERAND (t, 0), NULL);
1491 case 2:
1492 return strips_small_constant (TREE_OPERAND (t, 0),
1493 TREE_OPERAND (t, 1));
1494 default:
1495 return NULL;
1499 /* Check the compare STMT in LOOP. If it compares an induction
1500 variable to a loop invariant, return true, and save
1501 LOOP_INVARIANT, COMPARE_CODE and LOOP_STEP.
1502 Otherwise return false and set LOOP_INVAIANT to NULL. */
1504 static bool
1505 is_comparison_with_loop_invariant_p (gcond *stmt, class loop *loop,
1506 tree *loop_invariant,
1507 enum tree_code *compare_code,
1508 tree *loop_step,
1509 tree *loop_iv_base)
1511 tree op0, op1, bound, base;
1512 affine_iv iv0, iv1;
1513 enum tree_code code;
1514 tree step;
1516 code = gimple_cond_code (stmt);
1517 *loop_invariant = NULL;
1519 switch (code)
1521 case GT_EXPR:
1522 case GE_EXPR:
1523 case NE_EXPR:
1524 case LT_EXPR:
1525 case LE_EXPR:
1526 case EQ_EXPR:
1527 break;
1529 default:
1530 return false;
1533 op0 = gimple_cond_lhs (stmt);
1534 op1 = gimple_cond_rhs (stmt);
1536 if ((TREE_CODE (op0) != SSA_NAME && TREE_CODE (op0) != INTEGER_CST)
1537 || (TREE_CODE (op1) != SSA_NAME && TREE_CODE (op1) != INTEGER_CST))
1538 return false;
1539 if (!simple_iv (loop, loop_containing_stmt (stmt), op0, &iv0, true))
1540 return false;
1541 if (!simple_iv (loop, loop_containing_stmt (stmt), op1, &iv1, true))
1542 return false;
1543 if (TREE_CODE (iv0.step) != INTEGER_CST
1544 || TREE_CODE (iv1.step) != INTEGER_CST)
1545 return false;
1546 if ((integer_zerop (iv0.step) && integer_zerop (iv1.step))
1547 || (!integer_zerop (iv0.step) && !integer_zerop (iv1.step)))
1548 return false;
1550 if (integer_zerop (iv0.step))
1552 if (code != NE_EXPR && code != EQ_EXPR)
1553 code = invert_tree_comparison (code, false);
1554 bound = iv0.base;
1555 base = iv1.base;
1556 if (tree_fits_shwi_p (iv1.step))
1557 step = iv1.step;
1558 else
1559 return false;
1561 else
1563 bound = iv1.base;
1564 base = iv0.base;
1565 if (tree_fits_shwi_p (iv0.step))
1566 step = iv0.step;
1567 else
1568 return false;
1571 if (TREE_CODE (bound) != INTEGER_CST)
1572 bound = get_base_value (bound);
1573 if (!bound)
1574 return false;
1575 if (TREE_CODE (base) != INTEGER_CST)
1576 base = get_base_value (base);
1577 if (!base)
1578 return false;
1580 *loop_invariant = bound;
1581 *compare_code = code;
1582 *loop_step = step;
1583 *loop_iv_base = base;
1584 return true;
1587 /* Compare two SSA_NAMEs: returns TRUE if T1 and T2 are value coherent. */
1589 static bool
1590 expr_coherent_p (tree t1, tree t2)
1592 gimple *stmt;
1593 tree ssa_name_1 = NULL;
1594 tree ssa_name_2 = NULL;
1596 gcc_assert (TREE_CODE (t1) == SSA_NAME || TREE_CODE (t1) == INTEGER_CST);
1597 gcc_assert (TREE_CODE (t2) == SSA_NAME || TREE_CODE (t2) == INTEGER_CST);
1599 if (t1 == t2)
1600 return true;
1602 if (TREE_CODE (t1) == INTEGER_CST && TREE_CODE (t2) == INTEGER_CST)
1603 return true;
1604 if (TREE_CODE (t1) == INTEGER_CST || TREE_CODE (t2) == INTEGER_CST)
1605 return false;
1607 /* Check to see if t1 is expressed/defined with t2. */
1608 stmt = SSA_NAME_DEF_STMT (t1);
1609 gcc_assert (stmt != NULL);
1610 if (is_gimple_assign (stmt))
1612 ssa_name_1 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE);
1613 if (ssa_name_1 && ssa_name_1 == t2)
1614 return true;
1617 /* Check to see if t2 is expressed/defined with t1. */
1618 stmt = SSA_NAME_DEF_STMT (t2);
1619 gcc_assert (stmt != NULL);
1620 if (is_gimple_assign (stmt))
1622 ssa_name_2 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE);
1623 if (ssa_name_2 && ssa_name_2 == t1)
1624 return true;
1627 /* Compare if t1 and t2's def_stmts are identical. */
1628 if (ssa_name_2 != NULL && ssa_name_1 == ssa_name_2)
1629 return true;
1630 else
1631 return false;
1634 /* Return true if E is predicted by one of loop heuristics. */
1636 static bool
1637 predicted_by_loop_heuristics_p (basic_block bb)
1639 struct edge_prediction *i;
1640 edge_prediction **preds = bb_predictions->get (bb);
1642 if (!preds)
1643 return false;
1645 for (i = *preds; i; i = i->ep_next)
1646 if (i->ep_predictor == PRED_LOOP_ITERATIONS_GUESSED
1647 || i->ep_predictor == PRED_LOOP_ITERATIONS_MAX
1648 || i->ep_predictor == PRED_LOOP_ITERATIONS
1649 || i->ep_predictor == PRED_LOOP_EXIT
1650 || i->ep_predictor == PRED_LOOP_EXIT_WITH_RECURSION
1651 || i->ep_predictor == PRED_LOOP_EXTRA_EXIT)
1652 return true;
1653 return false;
1656 /* Predict branch probability of BB when BB contains a branch that compares
1657 an induction variable in LOOP with LOOP_IV_BASE_VAR to LOOP_BOUND_VAR. The
1658 loop exit is compared using LOOP_BOUND_CODE, with step of LOOP_BOUND_STEP.
1660 E.g.
1661 for (int i = 0; i < bound; i++) {
1662 if (i < bound - 2)
1663 computation_1();
1664 else
1665 computation_2();
1668 In this loop, we will predict the branch inside the loop to be taken. */
1670 static void
1671 predict_iv_comparison (class loop *loop, basic_block bb,
1672 tree loop_bound_var,
1673 tree loop_iv_base_var,
1674 enum tree_code loop_bound_code,
1675 int loop_bound_step)
1677 gimple *stmt;
1678 tree compare_var, compare_base;
1679 enum tree_code compare_code;
1680 tree compare_step_var;
1681 edge then_edge;
1682 edge_iterator ei;
1684 if (predicted_by_loop_heuristics_p (bb))
1685 return;
1687 stmt = last_stmt (bb);
1688 if (!stmt || gimple_code (stmt) != GIMPLE_COND)
1689 return;
1690 if (!is_comparison_with_loop_invariant_p (as_a <gcond *> (stmt),
1691 loop, &compare_var,
1692 &compare_code,
1693 &compare_step_var,
1694 &compare_base))
1695 return;
1697 /* Find the taken edge. */
1698 FOR_EACH_EDGE (then_edge, ei, bb->succs)
1699 if (then_edge->flags & EDGE_TRUE_VALUE)
1700 break;
1702 /* When comparing an IV to a loop invariant, NE is more likely to be
1703 taken while EQ is more likely to be not-taken. */
1704 if (compare_code == NE_EXPR)
1706 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1707 return;
1709 else if (compare_code == EQ_EXPR)
1711 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1712 return;
1715 if (!expr_coherent_p (loop_iv_base_var, compare_base))
1716 return;
1718 /* If loop bound, base and compare bound are all constants, we can
1719 calculate the probability directly. */
1720 if (tree_fits_shwi_p (loop_bound_var)
1721 && tree_fits_shwi_p (compare_var)
1722 && tree_fits_shwi_p (compare_base))
1724 int probability;
1725 wi::overflow_type overflow;
1726 bool overall_overflow = false;
1727 widest_int compare_count, tem;
1729 /* (loop_bound - base) / compare_step */
1730 tem = wi::sub (wi::to_widest (loop_bound_var),
1731 wi::to_widest (compare_base), SIGNED, &overflow);
1732 overall_overflow |= overflow;
1733 widest_int loop_count = wi::div_trunc (tem,
1734 wi::to_widest (compare_step_var),
1735 SIGNED, &overflow);
1736 overall_overflow |= overflow;
1738 if (!wi::neg_p (wi::to_widest (compare_step_var))
1739 ^ (compare_code == LT_EXPR || compare_code == LE_EXPR))
1741 /* (loop_bound - compare_bound) / compare_step */
1742 tem = wi::sub (wi::to_widest (loop_bound_var),
1743 wi::to_widest (compare_var), SIGNED, &overflow);
1744 overall_overflow |= overflow;
1745 compare_count = wi::div_trunc (tem, wi::to_widest (compare_step_var),
1746 SIGNED, &overflow);
1747 overall_overflow |= overflow;
1749 else
1751 /* (compare_bound - base) / compare_step */
1752 tem = wi::sub (wi::to_widest (compare_var),
1753 wi::to_widest (compare_base), SIGNED, &overflow);
1754 overall_overflow |= overflow;
1755 compare_count = wi::div_trunc (tem, wi::to_widest (compare_step_var),
1756 SIGNED, &overflow);
1757 overall_overflow |= overflow;
1759 if (compare_code == LE_EXPR || compare_code == GE_EXPR)
1760 ++compare_count;
1761 if (loop_bound_code == LE_EXPR || loop_bound_code == GE_EXPR)
1762 ++loop_count;
1763 if (wi::neg_p (compare_count))
1764 compare_count = 0;
1765 if (wi::neg_p (loop_count))
1766 loop_count = 0;
1767 if (loop_count == 0)
1768 probability = 0;
1769 else if (wi::cmps (compare_count, loop_count) == 1)
1770 probability = REG_BR_PROB_BASE;
1771 else
1773 tem = compare_count * REG_BR_PROB_BASE;
1774 tem = wi::udiv_trunc (tem, loop_count);
1775 probability = tem.to_uhwi ();
1778 /* FIXME: The branch prediction seems broken. It has only 20% hitrate. */
1779 if (!overall_overflow)
1780 predict_edge (then_edge, PRED_LOOP_IV_COMPARE, probability);
1782 return;
1785 if (expr_coherent_p (loop_bound_var, compare_var))
1787 if ((loop_bound_code == LT_EXPR || loop_bound_code == LE_EXPR)
1788 && (compare_code == LT_EXPR || compare_code == LE_EXPR))
1789 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1790 else if ((loop_bound_code == GT_EXPR || loop_bound_code == GE_EXPR)
1791 && (compare_code == GT_EXPR || compare_code == GE_EXPR))
1792 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1793 else if (loop_bound_code == NE_EXPR)
1795 /* If the loop backedge condition is "(i != bound)", we do
1796 the comparison based on the step of IV:
1797 * step < 0 : backedge condition is like (i > bound)
1798 * step > 0 : backedge condition is like (i < bound) */
1799 gcc_assert (loop_bound_step != 0);
1800 if (loop_bound_step > 0
1801 && (compare_code == LT_EXPR
1802 || compare_code == LE_EXPR))
1803 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1804 else if (loop_bound_step < 0
1805 && (compare_code == GT_EXPR
1806 || compare_code == GE_EXPR))
1807 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1808 else
1809 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1811 else
1812 /* The branch is predicted not-taken if loop_bound_code is
1813 opposite with compare_code. */
1814 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1816 else if (expr_coherent_p (loop_iv_base_var, compare_var))
1818 /* For cases like:
1819 for (i = s; i < h; i++)
1820 if (i > s + 2) ....
1821 The branch should be predicted taken. */
1822 if (loop_bound_step > 0
1823 && (compare_code == GT_EXPR || compare_code == GE_EXPR))
1824 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1825 else if (loop_bound_step < 0
1826 && (compare_code == LT_EXPR || compare_code == LE_EXPR))
1827 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1828 else
1829 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1833 /* Predict for extra loop exits that will lead to EXIT_EDGE. The extra loop
1834 exits are resulted from short-circuit conditions that will generate an
1835 if_tmp. E.g.:
1837 if (foo() || global > 10)
1838 break;
1840 This will be translated into:
1842 BB3:
1843 loop header...
1844 BB4:
1845 if foo() goto BB6 else goto BB5
1846 BB5:
1847 if global > 10 goto BB6 else goto BB7
1848 BB6:
1849 goto BB7
1850 BB7:
1851 iftmp = (PHI 0(BB5), 1(BB6))
1852 if iftmp == 1 goto BB8 else goto BB3
1853 BB8:
1854 outside of the loop...
1856 The edge BB7->BB8 is loop exit because BB8 is outside of the loop.
1857 From the dataflow, we can infer that BB4->BB6 and BB5->BB6 are also loop
1858 exits. This function takes BB7->BB8 as input, and finds out the extra loop
1859 exits to predict them using PRED_LOOP_EXTRA_EXIT. */
1861 static void
1862 predict_extra_loop_exits (edge exit_edge)
1864 unsigned i;
1865 bool check_value_one;
1866 gimple *lhs_def_stmt;
1867 gphi *phi_stmt;
1868 tree cmp_rhs, cmp_lhs;
1869 gimple *last;
1870 gcond *cmp_stmt;
1872 last = last_stmt (exit_edge->src);
1873 if (!last)
1874 return;
1875 cmp_stmt = dyn_cast <gcond *> (last);
1876 if (!cmp_stmt)
1877 return;
1879 cmp_rhs = gimple_cond_rhs (cmp_stmt);
1880 cmp_lhs = gimple_cond_lhs (cmp_stmt);
1881 if (!TREE_CONSTANT (cmp_rhs)
1882 || !(integer_zerop (cmp_rhs) || integer_onep (cmp_rhs)))
1883 return;
1884 if (TREE_CODE (cmp_lhs) != SSA_NAME)
1885 return;
1887 /* If check_value_one is true, only the phi_args with value '1' will lead
1888 to loop exit. Otherwise, only the phi_args with value '0' will lead to
1889 loop exit. */
1890 check_value_one = (((integer_onep (cmp_rhs))
1891 ^ (gimple_cond_code (cmp_stmt) == EQ_EXPR))
1892 ^ ((exit_edge->flags & EDGE_TRUE_VALUE) != 0));
1894 lhs_def_stmt = SSA_NAME_DEF_STMT (cmp_lhs);
1895 if (!lhs_def_stmt)
1896 return;
1898 phi_stmt = dyn_cast <gphi *> (lhs_def_stmt);
1899 if (!phi_stmt)
1900 return;
1902 for (i = 0; i < gimple_phi_num_args (phi_stmt); i++)
1904 edge e1;
1905 edge_iterator ei;
1906 tree val = gimple_phi_arg_def (phi_stmt, i);
1907 edge e = gimple_phi_arg_edge (phi_stmt, i);
1909 if (!TREE_CONSTANT (val) || !(integer_zerop (val) || integer_onep (val)))
1910 continue;
1911 if ((check_value_one ^ integer_onep (val)) == 1)
1912 continue;
1913 if (EDGE_COUNT (e->src->succs) != 1)
1915 predict_paths_leading_to_edge (e, PRED_LOOP_EXTRA_EXIT, NOT_TAKEN);
1916 continue;
1919 FOR_EACH_EDGE (e1, ei, e->src->preds)
1920 predict_paths_leading_to_edge (e1, PRED_LOOP_EXTRA_EXIT, NOT_TAKEN);
1925 /* Predict edge probabilities by exploiting loop structure. */
1927 static void
1928 predict_loops (void)
1930 class loop *loop;
1931 basic_block bb;
1932 hash_set <class loop *> with_recursion(10);
1934 FOR_EACH_BB_FN (bb, cfun)
1936 gimple_stmt_iterator gsi;
1937 tree decl;
1939 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1940 if (is_gimple_call (gsi_stmt (gsi))
1941 && (decl = gimple_call_fndecl (gsi_stmt (gsi))) != NULL
1942 && recursive_call_p (current_function_decl, decl))
1944 loop = bb->loop_father;
1945 while (loop && !with_recursion.add (loop))
1946 loop = loop_outer (loop);
1950 /* Try to predict out blocks in a loop that are not part of a
1951 natural loop. */
1952 for (auto loop : loops_list (cfun, LI_FROM_INNERMOST))
1954 basic_block bb, *bbs;
1955 unsigned j, n_exits = 0;
1956 class tree_niter_desc niter_desc;
1957 edge ex;
1958 class nb_iter_bound *nb_iter;
1959 enum tree_code loop_bound_code = ERROR_MARK;
1960 tree loop_bound_step = NULL;
1961 tree loop_bound_var = NULL;
1962 tree loop_iv_base = NULL;
1963 gcond *stmt = NULL;
1964 bool recursion = with_recursion.contains (loop);
1966 auto_vec<edge> exits = get_loop_exit_edges (loop);
1967 FOR_EACH_VEC_ELT (exits, j, ex)
1968 if (!unlikely_executed_edge_p (ex) && !(ex->flags & EDGE_ABNORMAL_CALL))
1969 n_exits ++;
1970 if (!n_exits)
1971 continue;
1973 if (dump_file && (dump_flags & TDF_DETAILS))
1974 fprintf (dump_file, "Predicting loop %i%s with %i exits.\n",
1975 loop->num, recursion ? " (with recursion)":"", n_exits);
1976 if (dump_file && (dump_flags & TDF_DETAILS)
1977 && max_loop_iterations_int (loop) >= 0)
1979 fprintf (dump_file,
1980 "Loop %d iterates at most %i times.\n", loop->num,
1981 (int)max_loop_iterations_int (loop));
1983 if (dump_file && (dump_flags & TDF_DETAILS)
1984 && likely_max_loop_iterations_int (loop) >= 0)
1986 fprintf (dump_file, "Loop %d likely iterates at most %i times.\n",
1987 loop->num, (int)likely_max_loop_iterations_int (loop));
1990 FOR_EACH_VEC_ELT (exits, j, ex)
1992 tree niter = NULL;
1993 HOST_WIDE_INT nitercst;
1994 int max = param_max_predicted_iterations;
1995 int probability;
1996 enum br_predictor predictor;
1997 widest_int nit;
1999 if (unlikely_executed_edge_p (ex)
2000 || (ex->flags & EDGE_ABNORMAL_CALL))
2001 continue;
2002 /* Loop heuristics do not expect exit conditional to be inside
2003 inner loop. We predict from innermost to outermost loop. */
2004 if (predicted_by_loop_heuristics_p (ex->src))
2006 if (dump_file && (dump_flags & TDF_DETAILS))
2007 fprintf (dump_file, "Skipping exit %i->%i because "
2008 "it is already predicted.\n",
2009 ex->src->index, ex->dest->index);
2010 continue;
2012 predict_extra_loop_exits (ex);
2014 if (number_of_iterations_exit (loop, ex, &niter_desc, false, false))
2015 niter = niter_desc.niter;
2016 if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST)
2017 niter = loop_niter_by_eval (loop, ex);
2018 if (dump_file && (dump_flags & TDF_DETAILS)
2019 && TREE_CODE (niter) == INTEGER_CST)
2021 fprintf (dump_file, "Exit %i->%i %d iterates ",
2022 ex->src->index, ex->dest->index,
2023 loop->num);
2024 print_generic_expr (dump_file, niter, TDF_SLIM);
2025 fprintf (dump_file, " times.\n");
2028 if (TREE_CODE (niter) == INTEGER_CST)
2030 if (tree_fits_uhwi_p (niter)
2031 && max
2032 && compare_tree_int (niter, max - 1) == -1)
2033 nitercst = tree_to_uhwi (niter) + 1;
2034 else
2035 nitercst = max;
2036 predictor = PRED_LOOP_ITERATIONS;
2038 /* If we have just one exit and we can derive some information about
2039 the number of iterations of the loop from the statements inside
2040 the loop, use it to predict this exit. */
2041 else if (n_exits == 1
2042 && estimated_stmt_executions (loop, &nit))
2044 if (wi::gtu_p (nit, max))
2045 nitercst = max;
2046 else
2047 nitercst = nit.to_shwi ();
2048 predictor = PRED_LOOP_ITERATIONS_GUESSED;
2050 /* If we have likely upper bound, trust it for very small iteration
2051 counts. Such loops would otherwise get mispredicted by standard
2052 LOOP_EXIT heuristics. */
2053 else if (n_exits == 1
2054 && likely_max_stmt_executions (loop, &nit)
2055 && wi::ltu_p (nit,
2056 RDIV (REG_BR_PROB_BASE,
2057 REG_BR_PROB_BASE
2058 - predictor_info
2059 [recursion
2060 ? PRED_LOOP_EXIT_WITH_RECURSION
2061 : PRED_LOOP_EXIT].hitrate)))
2063 nitercst = nit.to_shwi ();
2064 predictor = PRED_LOOP_ITERATIONS_MAX;
2066 else
2068 if (dump_file && (dump_flags & TDF_DETAILS))
2069 fprintf (dump_file, "Nothing known about exit %i->%i.\n",
2070 ex->src->index, ex->dest->index);
2071 continue;
2074 if (dump_file && (dump_flags & TDF_DETAILS))
2075 fprintf (dump_file, "Recording prediction to %i iterations by %s.\n",
2076 (int)nitercst, predictor_info[predictor].name);
2077 /* If the prediction for number of iterations is zero, do not
2078 predict the exit edges. */
2079 if (nitercst == 0)
2080 continue;
2082 probability = RDIV (REG_BR_PROB_BASE, nitercst);
2083 predict_edge (ex, predictor, probability);
2086 /* Find information about loop bound variables. */
2087 for (nb_iter = loop->bounds; nb_iter;
2088 nb_iter = nb_iter->next)
2089 if (nb_iter->stmt
2090 && gimple_code (nb_iter->stmt) == GIMPLE_COND)
2092 stmt = as_a <gcond *> (nb_iter->stmt);
2093 break;
2095 if (!stmt && last_stmt (loop->header)
2096 && gimple_code (last_stmt (loop->header)) == GIMPLE_COND)
2097 stmt = as_a <gcond *> (last_stmt (loop->header));
2098 if (stmt)
2099 is_comparison_with_loop_invariant_p (stmt, loop,
2100 &loop_bound_var,
2101 &loop_bound_code,
2102 &loop_bound_step,
2103 &loop_iv_base);
2105 bbs = get_loop_body (loop);
2107 for (j = 0; j < loop->num_nodes; j++)
2109 edge e;
2110 edge_iterator ei;
2112 bb = bbs[j];
2114 /* Bypass loop heuristics on continue statement. These
2115 statements construct loops via "non-loop" constructs
2116 in the source language and are better to be handled
2117 separately. */
2118 if (predicted_by_p (bb, PRED_CONTINUE))
2120 if (dump_file && (dump_flags & TDF_DETAILS))
2121 fprintf (dump_file, "BB %i predicted by continue.\n",
2122 bb->index);
2123 continue;
2126 /* If we already used more reliable loop exit predictors, do not
2127 bother with PRED_LOOP_EXIT. */
2128 if (!predicted_by_loop_heuristics_p (bb))
2130 /* For loop with many exits we don't want to predict all exits
2131 with the pretty large probability, because if all exits are
2132 considered in row, the loop would be predicted to iterate
2133 almost never. The code to divide probability by number of
2134 exits is very rough. It should compute the number of exits
2135 taken in each patch through function (not the overall number
2136 of exits that might be a lot higher for loops with wide switch
2137 statements in them) and compute n-th square root.
2139 We limit the minimal probability by 2% to avoid
2140 EDGE_PROBABILITY_RELIABLE from trusting the branch prediction
2141 as this was causing regression in perl benchmark containing such
2142 a wide loop. */
2144 int probability = ((REG_BR_PROB_BASE
2145 - predictor_info
2146 [recursion
2147 ? PRED_LOOP_EXIT_WITH_RECURSION
2148 : PRED_LOOP_EXIT].hitrate)
2149 / n_exits);
2150 if (probability < HITRATE (2))
2151 probability = HITRATE (2);
2152 FOR_EACH_EDGE (e, ei, bb->succs)
2153 if (e->dest->index < NUM_FIXED_BLOCKS
2154 || !flow_bb_inside_loop_p (loop, e->dest))
2156 if (dump_file && (dump_flags & TDF_DETAILS))
2157 fprintf (dump_file,
2158 "Predicting exit %i->%i with prob %i.\n",
2159 e->src->index, e->dest->index, probability);
2160 predict_edge (e,
2161 recursion ? PRED_LOOP_EXIT_WITH_RECURSION
2162 : PRED_LOOP_EXIT, probability);
2165 if (loop_bound_var)
2166 predict_iv_comparison (loop, bb, loop_bound_var, loop_iv_base,
2167 loop_bound_code,
2168 tree_to_shwi (loop_bound_step));
2171 /* In the following code
2172 for (loop1)
2173 if (cond)
2174 for (loop2)
2175 body;
2176 guess that cond is unlikely. */
2177 if (loop_outer (loop)->num)
2179 basic_block bb = NULL;
2180 edge preheader_edge = loop_preheader_edge (loop);
2182 if (single_pred_p (preheader_edge->src)
2183 && single_succ_p (preheader_edge->src))
2184 preheader_edge = single_pred_edge (preheader_edge->src);
2186 gimple *stmt = last_stmt (preheader_edge->src);
2187 /* Pattern match fortran loop preheader:
2188 _16 = BUILTIN_EXPECT (_15, 1, PRED_FORTRAN_LOOP_PREHEADER);
2189 _17 = (logical(kind=4)) _16;
2190 if (_17 != 0)
2191 goto <bb 11>;
2192 else
2193 goto <bb 13>;
2195 Loop guard branch prediction says nothing about duplicated loop
2196 headers produced by fortran frontend and in this case we want
2197 to predict paths leading to this preheader. */
2199 if (stmt
2200 && gimple_code (stmt) == GIMPLE_COND
2201 && gimple_cond_code (stmt) == NE_EXPR
2202 && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME
2203 && integer_zerop (gimple_cond_rhs (stmt)))
2205 gimple *call_stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (stmt));
2206 if (gimple_code (call_stmt) == GIMPLE_ASSIGN
2207 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (call_stmt))
2208 && TREE_CODE (gimple_assign_rhs1 (call_stmt)) == SSA_NAME)
2209 call_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (call_stmt));
2210 if (gimple_call_internal_p (call_stmt, IFN_BUILTIN_EXPECT)
2211 && TREE_CODE (gimple_call_arg (call_stmt, 2)) == INTEGER_CST
2212 && tree_fits_uhwi_p (gimple_call_arg (call_stmt, 2))
2213 && tree_to_uhwi (gimple_call_arg (call_stmt, 2))
2214 == PRED_FORTRAN_LOOP_PREHEADER)
2215 bb = preheader_edge->src;
2217 if (!bb)
2219 if (!dominated_by_p (CDI_DOMINATORS,
2220 loop_outer (loop)->latch, loop->header))
2221 predict_paths_leading_to_edge (loop_preheader_edge (loop),
2222 recursion
2223 ? PRED_LOOP_GUARD_WITH_RECURSION
2224 : PRED_LOOP_GUARD,
2225 NOT_TAKEN,
2226 loop_outer (loop));
2228 else
2230 if (!dominated_by_p (CDI_DOMINATORS,
2231 loop_outer (loop)->latch, bb))
2232 predict_paths_leading_to (bb,
2233 recursion
2234 ? PRED_LOOP_GUARD_WITH_RECURSION
2235 : PRED_LOOP_GUARD,
2236 NOT_TAKEN,
2237 loop_outer (loop));
2241 /* Free basic blocks from get_loop_body. */
2242 free (bbs);
2246 /* Attempt to predict probabilities of BB outgoing edges using local
2247 properties. */
2248 static void
2249 bb_estimate_probability_locally (basic_block bb)
2251 rtx_insn *last_insn = BB_END (bb);
2252 rtx cond;
2254 if (! can_predict_insn_p (last_insn))
2255 return;
2256 cond = get_condition (last_insn, NULL, false, false);
2257 if (! cond)
2258 return;
2260 /* Try "pointer heuristic."
2261 A comparison ptr == 0 is predicted as false.
2262 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
2263 if (COMPARISON_P (cond)
2264 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
2265 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
2267 if (GET_CODE (cond) == EQ)
2268 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
2269 else if (GET_CODE (cond) == NE)
2270 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
2272 else
2274 /* Try "opcode heuristic."
2275 EQ tests are usually false and NE tests are usually true. Also,
2276 most quantities are positive, so we can make the appropriate guesses
2277 about signed comparisons against zero. */
2278 switch (GET_CODE (cond))
2280 case CONST_INT:
2281 /* Unconditional branch. */
2282 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
2283 cond == const0_rtx ? NOT_TAKEN : TAKEN);
2284 break;
2286 case EQ:
2287 case UNEQ:
2288 /* Floating point comparisons appears to behave in a very
2289 unpredictable way because of special role of = tests in
2290 FP code. */
2291 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
2293 /* Comparisons with 0 are often used for booleans and there is
2294 nothing useful to predict about them. */
2295 else if (XEXP (cond, 1) == const0_rtx
2296 || XEXP (cond, 0) == const0_rtx)
2298 else
2299 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
2300 break;
2302 case NE:
2303 case LTGT:
2304 /* Floating point comparisons appears to behave in a very
2305 unpredictable way because of special role of = tests in
2306 FP code. */
2307 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
2309 /* Comparisons with 0 are often used for booleans and there is
2310 nothing useful to predict about them. */
2311 else if (XEXP (cond, 1) == const0_rtx
2312 || XEXP (cond, 0) == const0_rtx)
2314 else
2315 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
2316 break;
2318 case ORDERED:
2319 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
2320 break;
2322 case UNORDERED:
2323 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
2324 break;
2326 case LE:
2327 case LT:
2328 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
2329 || XEXP (cond, 1) == constm1_rtx)
2330 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
2331 break;
2333 case GE:
2334 case GT:
2335 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
2336 || XEXP (cond, 1) == constm1_rtx)
2337 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
2338 break;
2340 default:
2341 break;
2345 /* Set edge->probability for each successor edge of BB. */
2346 void
2347 guess_outgoing_edge_probabilities (basic_block bb)
2349 bb_estimate_probability_locally (bb);
2350 combine_predictions_for_insn (BB_END (bb), bb);
2353 static tree expr_expected_value (tree, bitmap, enum br_predictor *predictor,
2354 HOST_WIDE_INT *probability);
2356 /* Helper function for expr_expected_value. */
2358 static tree
2359 expr_expected_value_1 (tree type, tree op0, enum tree_code code,
2360 tree op1, bitmap visited, enum br_predictor *predictor,
2361 HOST_WIDE_INT *probability)
2363 gimple *def;
2365 /* Reset returned probability value. */
2366 *probability = -1;
2367 *predictor = PRED_UNCONDITIONAL;
2369 if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
2371 if (TREE_CONSTANT (op0))
2372 return op0;
2374 if (code == IMAGPART_EXPR)
2376 if (TREE_CODE (TREE_OPERAND (op0, 0)) == SSA_NAME)
2378 def = SSA_NAME_DEF_STMT (TREE_OPERAND (op0, 0));
2379 if (is_gimple_call (def)
2380 && gimple_call_internal_p (def)
2381 && (gimple_call_internal_fn (def)
2382 == IFN_ATOMIC_COMPARE_EXCHANGE))
2384 /* Assume that any given atomic operation has low contention,
2385 and thus the compare-and-swap operation succeeds. */
2386 *predictor = PRED_COMPARE_AND_SWAP;
2387 return build_one_cst (TREE_TYPE (op0));
2392 if (code != SSA_NAME)
2393 return NULL_TREE;
2395 def = SSA_NAME_DEF_STMT (op0);
2397 /* If we were already here, break the infinite cycle. */
2398 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (op0)))
2399 return NULL;
2401 if (gimple_code (def) == GIMPLE_PHI)
2403 /* All the arguments of the PHI node must have the same constant
2404 length. */
2405 int i, n = gimple_phi_num_args (def);
2406 tree val = NULL, new_val;
2408 for (i = 0; i < n; i++)
2410 tree arg = PHI_ARG_DEF (def, i);
2411 enum br_predictor predictor2;
2413 /* If this PHI has itself as an argument, we cannot
2414 determine the string length of this argument. However,
2415 if we can find an expected constant value for the other
2416 PHI args then we can still be sure that this is
2417 likely a constant. So be optimistic and just
2418 continue with the next argument. */
2419 if (arg == PHI_RESULT (def))
2420 continue;
2422 HOST_WIDE_INT probability2;
2423 new_val = expr_expected_value (arg, visited, &predictor2,
2424 &probability2);
2426 /* It is difficult to combine value predictors. Simply assume
2427 that later predictor is weaker and take its prediction. */
2428 if (*predictor < predictor2)
2430 *predictor = predictor2;
2431 *probability = probability2;
2433 if (!new_val)
2434 return NULL;
2435 if (!val)
2436 val = new_val;
2437 else if (!operand_equal_p (val, new_val, false))
2438 return NULL;
2440 return val;
2442 if (is_gimple_assign (def))
2444 if (gimple_assign_lhs (def) != op0)
2445 return NULL;
2447 return expr_expected_value_1 (TREE_TYPE (gimple_assign_lhs (def)),
2448 gimple_assign_rhs1 (def),
2449 gimple_assign_rhs_code (def),
2450 gimple_assign_rhs2 (def),
2451 visited, predictor, probability);
2454 if (is_gimple_call (def))
2456 tree decl = gimple_call_fndecl (def);
2457 if (!decl)
2459 if (gimple_call_internal_p (def)
2460 && gimple_call_internal_fn (def) == IFN_BUILTIN_EXPECT)
2462 gcc_assert (gimple_call_num_args (def) == 3);
2463 tree val = gimple_call_arg (def, 0);
2464 if (TREE_CONSTANT (val))
2465 return val;
2466 tree val2 = gimple_call_arg (def, 2);
2467 gcc_assert (TREE_CODE (val2) == INTEGER_CST
2468 && tree_fits_uhwi_p (val2)
2469 && tree_to_uhwi (val2) < END_PREDICTORS);
2470 *predictor = (enum br_predictor) tree_to_uhwi (val2);
2471 if (*predictor == PRED_BUILTIN_EXPECT)
2472 *probability
2473 = HITRATE (param_builtin_expect_probability);
2474 return gimple_call_arg (def, 1);
2476 return NULL;
2479 if (DECL_IS_MALLOC (decl) || DECL_IS_OPERATOR_NEW_P (decl))
2481 if (predictor)
2482 *predictor = PRED_MALLOC_NONNULL;
2483 return boolean_true_node;
2486 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
2487 switch (DECL_FUNCTION_CODE (decl))
2489 case BUILT_IN_EXPECT:
2491 tree val;
2492 if (gimple_call_num_args (def) != 2)
2493 return NULL;
2494 val = gimple_call_arg (def, 0);
2495 if (TREE_CONSTANT (val))
2496 return val;
2497 *predictor = PRED_BUILTIN_EXPECT;
2498 *probability
2499 = HITRATE (param_builtin_expect_probability);
2500 return gimple_call_arg (def, 1);
2502 case BUILT_IN_EXPECT_WITH_PROBABILITY:
2504 tree val;
2505 if (gimple_call_num_args (def) != 3)
2506 return NULL;
2507 val = gimple_call_arg (def, 0);
2508 if (TREE_CONSTANT (val))
2509 return val;
2510 /* Compute final probability as:
2511 probability * REG_BR_PROB_BASE. */
2512 tree prob = gimple_call_arg (def, 2);
2513 tree t = TREE_TYPE (prob);
2514 tree base = build_int_cst (integer_type_node,
2515 REG_BR_PROB_BASE);
2516 base = build_real_from_int_cst (t, base);
2517 tree r = fold_build2_initializer_loc (UNKNOWN_LOCATION,
2518 MULT_EXPR, t, prob, base);
2519 if (TREE_CODE (r) != REAL_CST)
2521 error_at (gimple_location (def),
2522 "probability %qE must be "
2523 "constant floating-point expression", prob);
2524 return NULL;
2526 HOST_WIDE_INT probi
2527 = real_to_integer (TREE_REAL_CST_PTR (r));
2528 if (probi >= 0 && probi <= REG_BR_PROB_BASE)
2530 *predictor = PRED_BUILTIN_EXPECT_WITH_PROBABILITY;
2531 *probability = probi;
2533 else
2534 error_at (gimple_location (def),
2535 "probability %qE is outside "
2536 "the range [0.0, 1.0]", prob);
2538 return gimple_call_arg (def, 1);
2541 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N:
2542 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
2543 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
2544 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
2545 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
2546 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
2547 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE:
2548 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N:
2549 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
2550 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
2551 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
2552 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
2553 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
2554 /* Assume that any given atomic operation has low contention,
2555 and thus the compare-and-swap operation succeeds. */
2556 *predictor = PRED_COMPARE_AND_SWAP;
2557 return boolean_true_node;
2558 case BUILT_IN_REALLOC:
2559 if (predictor)
2560 *predictor = PRED_MALLOC_NONNULL;
2561 return boolean_true_node;
2562 default:
2563 break;
2567 return NULL;
2570 if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
2572 tree res;
2573 enum br_predictor predictor2;
2574 HOST_WIDE_INT probability2;
2575 op0 = expr_expected_value (op0, visited, predictor, probability);
2576 if (!op0)
2577 return NULL;
2578 op1 = expr_expected_value (op1, visited, &predictor2, &probability2);
2579 if (!op1)
2580 return NULL;
2581 res = fold_build2 (code, type, op0, op1);
2582 if (TREE_CODE (res) == INTEGER_CST
2583 && TREE_CODE (op0) == INTEGER_CST
2584 && TREE_CODE (op1) == INTEGER_CST)
2586 /* Combine binary predictions. */
2587 if (*probability != -1 || probability2 != -1)
2589 HOST_WIDE_INT p1 = get_predictor_value (*predictor, *probability);
2590 HOST_WIDE_INT p2 = get_predictor_value (predictor2, probability2);
2591 *probability = RDIV (p1 * p2, REG_BR_PROB_BASE);
2594 if (*predictor < predictor2)
2595 *predictor = predictor2;
2597 return res;
2599 return NULL;
2601 if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
2603 tree res;
2604 op0 = expr_expected_value (op0, visited, predictor, probability);
2605 if (!op0)
2606 return NULL;
2607 res = fold_build1 (code, type, op0);
2608 if (TREE_CONSTANT (res))
2609 return res;
2610 return NULL;
2612 return NULL;
2615 /* Return constant EXPR will likely have at execution time, NULL if unknown.
2616 The function is used by builtin_expect branch predictor so the evidence
2617 must come from this construct and additional possible constant folding.
2619 We may want to implement more involved value guess (such as value range
2620 propagation based prediction), but such tricks shall go to new
2621 implementation. */
2623 static tree
2624 expr_expected_value (tree expr, bitmap visited,
2625 enum br_predictor *predictor,
2626 HOST_WIDE_INT *probability)
2628 enum tree_code code;
2629 tree op0, op1;
2631 if (TREE_CONSTANT (expr))
2633 *predictor = PRED_UNCONDITIONAL;
2634 *probability = -1;
2635 return expr;
2638 extract_ops_from_tree (expr, &code, &op0, &op1);
2639 return expr_expected_value_1 (TREE_TYPE (expr),
2640 op0, code, op1, visited, predictor,
2641 probability);
2645 /* Return probability of a PREDICTOR. If the predictor has variable
2646 probability return passed PROBABILITY. */
2648 static HOST_WIDE_INT
2649 get_predictor_value (br_predictor predictor, HOST_WIDE_INT probability)
2651 switch (predictor)
2653 case PRED_BUILTIN_EXPECT:
2654 case PRED_BUILTIN_EXPECT_WITH_PROBABILITY:
2655 gcc_assert (probability != -1);
2656 return probability;
2657 default:
2658 gcc_assert (probability == -1);
2659 return predictor_info[(int) predictor].hitrate;
2663 /* Predict using opcode of the last statement in basic block. */
2664 static void
2665 tree_predict_by_opcode (basic_block bb)
2667 gimple *stmt = last_stmt (bb);
2668 edge then_edge;
2669 tree op0, op1;
2670 tree type;
2671 tree val;
2672 enum tree_code cmp;
2673 edge_iterator ei;
2674 enum br_predictor predictor;
2675 HOST_WIDE_INT probability;
2677 if (!stmt)
2678 return;
2680 if (gswitch *sw = dyn_cast <gswitch *> (stmt))
2682 tree index = gimple_switch_index (sw);
2683 tree val = expr_expected_value (index, auto_bitmap (),
2684 &predictor, &probability);
2685 if (val && TREE_CODE (val) == INTEGER_CST)
2687 edge e = find_taken_edge_switch_expr (sw, val);
2688 if (predictor == PRED_BUILTIN_EXPECT)
2690 int percent = param_builtin_expect_probability;
2691 gcc_assert (percent >= 0 && percent <= 100);
2692 predict_edge (e, PRED_BUILTIN_EXPECT,
2693 HITRATE (percent));
2695 else
2696 predict_edge_def (e, predictor, TAKEN);
2700 if (gimple_code (stmt) != GIMPLE_COND)
2701 return;
2702 FOR_EACH_EDGE (then_edge, ei, bb->succs)
2703 if (then_edge->flags & EDGE_TRUE_VALUE)
2704 break;
2705 op0 = gimple_cond_lhs (stmt);
2706 op1 = gimple_cond_rhs (stmt);
2707 cmp = gimple_cond_code (stmt);
2708 type = TREE_TYPE (op0);
2709 val = expr_expected_value_1 (boolean_type_node, op0, cmp, op1, auto_bitmap (),
2710 &predictor, &probability);
2711 if (val && TREE_CODE (val) == INTEGER_CST)
2713 HOST_WIDE_INT prob = get_predictor_value (predictor, probability);
2714 if (integer_zerop (val))
2715 prob = REG_BR_PROB_BASE - prob;
2716 predict_edge (then_edge, predictor, prob);
2718 /* Try "pointer heuristic."
2719 A comparison ptr == 0 is predicted as false.
2720 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
2721 if (POINTER_TYPE_P (type))
2723 if (cmp == EQ_EXPR)
2724 predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN);
2725 else if (cmp == NE_EXPR)
2726 predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN);
2728 else
2730 /* Try "opcode heuristic."
2731 EQ tests are usually false and NE tests are usually true. Also,
2732 most quantities are positive, so we can make the appropriate guesses
2733 about signed comparisons against zero. */
2734 switch (cmp)
2736 case EQ_EXPR:
2737 case UNEQ_EXPR:
2738 /* Floating point comparisons appears to behave in a very
2739 unpredictable way because of special role of = tests in
2740 FP code. */
2741 if (FLOAT_TYPE_P (type))
2743 /* Comparisons with 0 are often used for booleans and there is
2744 nothing useful to predict about them. */
2745 else if (integer_zerop (op0) || integer_zerop (op1))
2747 else
2748 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN);
2749 break;
2751 case NE_EXPR:
2752 case LTGT_EXPR:
2753 /* Floating point comparisons appears to behave in a very
2754 unpredictable way because of special role of = tests in
2755 FP code. */
2756 if (FLOAT_TYPE_P (type))
2758 /* Comparisons with 0 are often used for booleans and there is
2759 nothing useful to predict about them. */
2760 else if (integer_zerop (op0)
2761 || integer_zerop (op1))
2763 else
2764 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN);
2765 break;
2767 case ORDERED_EXPR:
2768 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN);
2769 break;
2771 case UNORDERED_EXPR:
2772 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN);
2773 break;
2775 case LE_EXPR:
2776 case LT_EXPR:
2777 if (integer_zerop (op1)
2778 || integer_onep (op1)
2779 || integer_all_onesp (op1)
2780 || real_zerop (op1)
2781 || real_onep (op1)
2782 || real_minus_onep (op1))
2783 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN);
2784 break;
2786 case GE_EXPR:
2787 case GT_EXPR:
2788 if (integer_zerop (op1)
2789 || integer_onep (op1)
2790 || integer_all_onesp (op1)
2791 || real_zerop (op1)
2792 || real_onep (op1)
2793 || real_minus_onep (op1))
2794 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN);
2795 break;
2797 default:
2798 break;
2802 /* Returns TRUE if the STMT is exit(0) like statement. */
2804 static bool
2805 is_exit_with_zero_arg (const gimple *stmt)
2807 /* This is not exit, _exit or _Exit. */
2808 if (!gimple_call_builtin_p (stmt, BUILT_IN_EXIT)
2809 && !gimple_call_builtin_p (stmt, BUILT_IN__EXIT)
2810 && !gimple_call_builtin_p (stmt, BUILT_IN__EXIT2))
2811 return false;
2813 /* Argument is an interger zero. */
2814 return integer_zerop (gimple_call_arg (stmt, 0));
2817 /* Try to guess whether the value of return means error code. */
2819 static enum br_predictor
2820 return_prediction (tree val, enum prediction *prediction)
2822 /* VOID. */
2823 if (!val)
2824 return PRED_NO_PREDICTION;
2825 /* Different heuristics for pointers and scalars. */
2826 if (POINTER_TYPE_P (TREE_TYPE (val)))
2828 /* NULL is usually not returned. */
2829 if (integer_zerop (val))
2831 *prediction = NOT_TAKEN;
2832 return PRED_NULL_RETURN;
2835 else if (INTEGRAL_TYPE_P (TREE_TYPE (val)))
2837 /* Negative return values are often used to indicate
2838 errors. */
2839 if (TREE_CODE (val) == INTEGER_CST
2840 && tree_int_cst_sgn (val) < 0)
2842 *prediction = NOT_TAKEN;
2843 return PRED_NEGATIVE_RETURN;
2845 /* Constant return values seems to be commonly taken.
2846 Zero/one often represent booleans so exclude them from the
2847 heuristics. */
2848 if (TREE_CONSTANT (val)
2849 && (!integer_zerop (val) && !integer_onep (val)))
2851 *prediction = NOT_TAKEN;
2852 return PRED_CONST_RETURN;
2855 return PRED_NO_PREDICTION;
2858 /* Return zero if phi result could have values other than -1, 0 or 1,
2859 otherwise return a bitmask, with bits 0, 1 and 2 set if -1, 0 and 1
2860 values are used or likely. */
2862 static int
2863 zero_one_minusone (gphi *phi, int limit)
2865 int phi_num_args = gimple_phi_num_args (phi);
2866 int ret = 0;
2867 for (int i = 0; i < phi_num_args; i++)
2869 tree t = PHI_ARG_DEF (phi, i);
2870 if (TREE_CODE (t) != INTEGER_CST)
2871 continue;
2872 wide_int w = wi::to_wide (t);
2873 if (w == -1)
2874 ret |= 1;
2875 else if (w == 0)
2876 ret |= 2;
2877 else if (w == 1)
2878 ret |= 4;
2879 else
2880 return 0;
2882 for (int i = 0; i < phi_num_args; i++)
2884 tree t = PHI_ARG_DEF (phi, i);
2885 if (TREE_CODE (t) == INTEGER_CST)
2886 continue;
2887 if (TREE_CODE (t) != SSA_NAME)
2888 return 0;
2889 gimple *g = SSA_NAME_DEF_STMT (t);
2890 if (gimple_code (g) == GIMPLE_PHI && limit > 0)
2891 if (int r = zero_one_minusone (as_a <gphi *> (g), limit - 1))
2893 ret |= r;
2894 continue;
2896 if (!is_gimple_assign (g))
2897 return 0;
2898 if (gimple_assign_cast_p (g))
2900 tree rhs1 = gimple_assign_rhs1 (g);
2901 if (TREE_CODE (rhs1) != SSA_NAME
2902 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2903 || TYPE_PRECISION (TREE_TYPE (rhs1)) != 1
2904 || !TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2905 return 0;
2906 ret |= (2 | 4);
2907 continue;
2909 if (TREE_CODE_CLASS (gimple_assign_rhs_code (g)) != tcc_comparison)
2910 return 0;
2911 ret |= (2 | 4);
2913 return ret;
2916 /* Find the basic block with return expression and look up for possible
2917 return value trying to apply RETURN_PREDICTION heuristics. */
2918 static void
2919 apply_return_prediction (void)
2921 greturn *return_stmt = NULL;
2922 tree return_val;
2923 edge e;
2924 gphi *phi;
2925 int phi_num_args, i;
2926 enum br_predictor pred;
2927 enum prediction direction;
2928 edge_iterator ei;
2930 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
2932 gimple *last = last_stmt (e->src);
2933 if (last
2934 && gimple_code (last) == GIMPLE_RETURN)
2936 return_stmt = as_a <greturn *> (last);
2937 break;
2940 if (!e)
2941 return;
2942 return_val = gimple_return_retval (return_stmt);
2943 if (!return_val)
2944 return;
2945 if (TREE_CODE (return_val) != SSA_NAME
2946 || !SSA_NAME_DEF_STMT (return_val)
2947 || gimple_code (SSA_NAME_DEF_STMT (return_val)) != GIMPLE_PHI)
2948 return;
2949 phi = as_a <gphi *> (SSA_NAME_DEF_STMT (return_val));
2950 phi_num_args = gimple_phi_num_args (phi);
2951 pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction);
2953 /* Avoid the case where the function returns -1, 0 and 1 values and
2954 nothing else. Those could be qsort etc. comparison functions
2955 where the negative return isn't less probable than positive.
2956 For this require that the function returns at least -1 or 1
2957 or -1 and a boolean value or comparison result, so that functions
2958 returning just -1 and 0 are treated as if -1 represents error value. */
2959 if (INTEGRAL_TYPE_P (TREE_TYPE (return_val))
2960 && !TYPE_UNSIGNED (TREE_TYPE (return_val))
2961 && TYPE_PRECISION (TREE_TYPE (return_val)) > 1)
2962 if (int r = zero_one_minusone (phi, 3))
2963 if ((r & (1 | 4)) == (1 | 4))
2964 return;
2966 /* Avoid the degenerate case where all return values form the function
2967 belongs to same category (ie they are all positive constants)
2968 so we can hardly say something about them. */
2969 for (i = 1; i < phi_num_args; i++)
2970 if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction))
2971 break;
2972 if (i != phi_num_args)
2973 for (i = 0; i < phi_num_args; i++)
2975 pred = return_prediction (PHI_ARG_DEF (phi, i), &direction);
2976 if (pred != PRED_NO_PREDICTION)
2977 predict_paths_leading_to_edge (gimple_phi_arg_edge (phi, i), pred,
2978 direction);
2982 /* Look for basic block that contains unlikely to happen events
2983 (such as noreturn calls) and mark all paths leading to execution
2984 of this basic blocks as unlikely. */
2986 static void
2987 tree_bb_level_predictions (void)
2989 basic_block bb;
2990 bool has_return_edges = false;
2991 edge e;
2992 edge_iterator ei;
2994 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
2995 if (!unlikely_executed_edge_p (e) && !(e->flags & EDGE_ABNORMAL_CALL))
2997 has_return_edges = true;
2998 break;
3001 apply_return_prediction ();
3003 FOR_EACH_BB_FN (bb, cfun)
3005 gimple_stmt_iterator gsi;
3007 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3009 gimple *stmt = gsi_stmt (gsi);
3010 tree decl;
3012 if (is_gimple_call (stmt))
3014 if (gimple_call_noreturn_p (stmt)
3015 && has_return_edges
3016 && !is_exit_with_zero_arg (stmt))
3017 predict_paths_leading_to (bb, PRED_NORETURN,
3018 NOT_TAKEN);
3019 decl = gimple_call_fndecl (stmt);
3020 if (decl
3021 && lookup_attribute ("cold",
3022 DECL_ATTRIBUTES (decl)))
3023 predict_paths_leading_to (bb, PRED_COLD_FUNCTION,
3024 NOT_TAKEN);
3025 if (decl && recursive_call_p (current_function_decl, decl))
3026 predict_paths_leading_to (bb, PRED_RECURSIVE_CALL,
3027 NOT_TAKEN);
3029 else if (gimple_code (stmt) == GIMPLE_PREDICT)
3031 predict_paths_leading_to (bb, gimple_predict_predictor (stmt),
3032 gimple_predict_outcome (stmt));
3033 /* Keep GIMPLE_PREDICT around so early inlining will propagate
3034 hints to callers. */
3040 /* Callback for hash_map::traverse, asserts that the pointer map is
3041 empty. */
3043 bool
3044 assert_is_empty (const_basic_block const &, edge_prediction *const &value,
3045 void *)
3047 gcc_assert (!value);
3048 return false;
3051 /* Predict branch probabilities and estimate profile for basic block BB.
3052 When LOCAL_ONLY is set do not use any global properties of CFG. */
3054 static void
3055 tree_estimate_probability_bb (basic_block bb, bool local_only)
3057 edge e;
3058 edge_iterator ei;
3060 FOR_EACH_EDGE (e, ei, bb->succs)
3062 /* Look for block we are guarding (ie we dominate it,
3063 but it doesn't postdominate us). */
3064 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && e->dest != bb
3065 && !local_only
3066 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
3067 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
3069 gimple_stmt_iterator bi;
3071 /* The call heuristic claims that a guarded function call
3072 is improbable. This is because such calls are often used
3073 to signal exceptional situations such as printing error
3074 messages. */
3075 for (bi = gsi_start_bb (e->dest); !gsi_end_p (bi);
3076 gsi_next (&bi))
3078 gimple *stmt = gsi_stmt (bi);
3079 if (is_gimple_call (stmt)
3080 && !gimple_inexpensive_call_p (as_a <gcall *> (stmt))
3081 /* Constant and pure calls are hardly used to signalize
3082 something exceptional. */
3083 && gimple_has_side_effects (stmt))
3085 if (gimple_call_fndecl (stmt))
3086 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
3087 else if (virtual_method_call_p (gimple_call_fn (stmt)))
3088 predict_edge_def (e, PRED_POLYMORPHIC_CALL, NOT_TAKEN);
3089 else
3090 predict_edge_def (e, PRED_INDIR_CALL, TAKEN);
3091 break;
3096 tree_predict_by_opcode (bb);
3099 /* Predict branch probabilities and estimate profile of the tree CFG.
3100 This function can be called from the loop optimizers to recompute
3101 the profile information.
3102 If DRY_RUN is set, do not modify CFG and only produce dump files. */
3104 void
3105 tree_estimate_probability (bool dry_run)
3107 basic_block bb;
3109 connect_infinite_loops_to_exit ();
3110 /* We use loop_niter_by_eval, which requires that the loops have
3111 preheaders. */
3112 create_preheaders (CP_SIMPLE_PREHEADERS);
3113 calculate_dominance_info (CDI_POST_DOMINATORS);
3114 /* Decide which edges are known to be unlikely. This improves later
3115 branch prediction. */
3116 determine_unlikely_bbs ();
3118 bb_predictions = new hash_map<const_basic_block, edge_prediction *>;
3119 tree_bb_level_predictions ();
3120 record_loop_exits ();
3122 if (number_of_loops (cfun) > 1)
3123 predict_loops ();
3125 FOR_EACH_BB_FN (bb, cfun)
3126 tree_estimate_probability_bb (bb, false);
3128 FOR_EACH_BB_FN (bb, cfun)
3129 combine_predictions_for_bb (bb, dry_run);
3131 if (flag_checking)
3132 bb_predictions->traverse<void *, assert_is_empty> (NULL);
3134 delete bb_predictions;
3135 bb_predictions = NULL;
3137 if (!dry_run)
3138 estimate_bb_frequencies (false);
3139 free_dominance_info (CDI_POST_DOMINATORS);
3140 remove_fake_exit_edges ();
3143 /* Set edge->probability for each successor edge of BB. */
3144 void
3145 tree_guess_outgoing_edge_probabilities (basic_block bb)
3147 bb_predictions = new hash_map<const_basic_block, edge_prediction *>;
3148 tree_estimate_probability_bb (bb, true);
3149 combine_predictions_for_bb (bb, false);
3150 if (flag_checking)
3151 bb_predictions->traverse<void *, assert_is_empty> (NULL);
3152 delete bb_predictions;
3153 bb_predictions = NULL;
3156 /* Filter function predicate that returns true for a edge predicate P
3157 if its edge is equal to DATA. */
3159 static bool
3160 not_loop_guard_equal_edge_p (edge_prediction *p, void *data)
3162 return p->ep_edge != (edge)data || p->ep_predictor != PRED_LOOP_GUARD;
3165 /* Predict edge E with PRED unless it is already predicted by some predictor
3166 considered equivalent. */
3168 static void
3169 maybe_predict_edge (edge e, enum br_predictor pred, enum prediction taken)
3171 if (edge_predicted_by_p (e, pred, taken))
3172 return;
3173 if (pred == PRED_LOOP_GUARD
3174 && edge_predicted_by_p (e, PRED_LOOP_GUARD_WITH_RECURSION, taken))
3175 return;
3176 /* Consider PRED_LOOP_GUARD_WITH_RECURSION superrior to LOOP_GUARD. */
3177 if (pred == PRED_LOOP_GUARD_WITH_RECURSION)
3179 edge_prediction **preds = bb_predictions->get (e->src);
3180 if (preds)
3181 filter_predictions (preds, not_loop_guard_equal_edge_p, e);
3183 predict_edge_def (e, pred, taken);
3185 /* Predict edges to successors of CUR whose sources are not postdominated by
3186 BB by PRED and recurse to all postdominators. */
3188 static void
3189 predict_paths_for_bb (basic_block cur, basic_block bb,
3190 enum br_predictor pred,
3191 enum prediction taken,
3192 bitmap visited, class loop *in_loop = NULL)
3194 edge e;
3195 edge_iterator ei;
3196 basic_block son;
3198 /* If we exited the loop or CUR is unconditional in the loop, there is
3199 nothing to do. */
3200 if (in_loop
3201 && (!flow_bb_inside_loop_p (in_loop, cur)
3202 || dominated_by_p (CDI_DOMINATORS, in_loop->latch, cur)))
3203 return;
3205 /* We are looking for all edges forming edge cut induced by
3206 set of all blocks postdominated by BB. */
3207 FOR_EACH_EDGE (e, ei, cur->preds)
3208 if (e->src->index >= NUM_FIXED_BLOCKS
3209 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, bb))
3211 edge e2;
3212 edge_iterator ei2;
3213 bool found = false;
3215 /* Ignore fake edges and eh, we predict them as not taken anyway. */
3216 if (unlikely_executed_edge_p (e))
3217 continue;
3218 gcc_assert (bb == cur || dominated_by_p (CDI_POST_DOMINATORS, cur, bb));
3220 /* See if there is an edge from e->src that is not abnormal
3221 and does not lead to BB and does not exit the loop. */
3222 FOR_EACH_EDGE (e2, ei2, e->src->succs)
3223 if (e2 != e
3224 && !unlikely_executed_edge_p (e2)
3225 && !dominated_by_p (CDI_POST_DOMINATORS, e2->dest, bb)
3226 && (!in_loop || !loop_exit_edge_p (in_loop, e2)))
3228 found = true;
3229 break;
3232 /* If there is non-abnormal path leaving e->src, predict edge
3233 using predictor. Otherwise we need to look for paths
3234 leading to e->src.
3236 The second may lead to infinite loop in the case we are predicitng
3237 regions that are only reachable by abnormal edges. We simply
3238 prevent visiting given BB twice. */
3239 if (found)
3240 maybe_predict_edge (e, pred, taken);
3241 else if (bitmap_set_bit (visited, e->src->index))
3242 predict_paths_for_bb (e->src, e->src, pred, taken, visited, in_loop);
3244 for (son = first_dom_son (CDI_POST_DOMINATORS, cur);
3245 son;
3246 son = next_dom_son (CDI_POST_DOMINATORS, son))
3247 predict_paths_for_bb (son, bb, pred, taken, visited, in_loop);
3250 /* Sets branch probabilities according to PREDiction and
3251 FLAGS. */
3253 static void
3254 predict_paths_leading_to (basic_block bb, enum br_predictor pred,
3255 enum prediction taken, class loop *in_loop)
3257 predict_paths_for_bb (bb, bb, pred, taken, auto_bitmap (), in_loop);
3260 /* Like predict_paths_leading_to but take edge instead of basic block. */
3262 static void
3263 predict_paths_leading_to_edge (edge e, enum br_predictor pred,
3264 enum prediction taken, class loop *in_loop)
3266 bool has_nonloop_edge = false;
3267 edge_iterator ei;
3268 edge e2;
3270 basic_block bb = e->src;
3271 FOR_EACH_EDGE (e2, ei, bb->succs)
3272 if (e2->dest != e->src && e2->dest != e->dest
3273 && !unlikely_executed_edge_p (e2)
3274 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e2->dest))
3276 has_nonloop_edge = true;
3277 break;
3280 if (!has_nonloop_edge)
3281 predict_paths_for_bb (bb, bb, pred, taken, auto_bitmap (), in_loop);
3282 else
3283 maybe_predict_edge (e, pred, taken);
3286 /* This is used to carry information about basic blocks. It is
3287 attached to the AUX field of the standard CFG block. */
3289 class block_info
3291 public:
3292 /* Estimated frequency of execution of basic_block. */
3293 sreal frequency;
3295 /* To keep queue of basic blocks to process. */
3296 basic_block next;
3298 /* Number of predecessors we need to visit first. */
3299 int npredecessors;
3302 /* Similar information for edges. */
3303 class edge_prob_info
3305 public:
3306 /* In case edge is a loopback edge, the probability edge will be reached
3307 in case header is. Estimated number of iterations of the loop can be
3308 then computed as 1 / (1 - back_edge_prob). */
3309 sreal back_edge_prob;
3310 /* True if the edge is a loopback edge in the natural loop. */
3311 unsigned int back_edge:1;
3314 #define BLOCK_INFO(B) ((block_info *) (B)->aux)
3315 #undef EDGE_INFO
3316 #define EDGE_INFO(E) ((edge_prob_info *) (E)->aux)
3318 /* Helper function for estimate_bb_frequencies.
3319 Propagate the frequencies in blocks marked in
3320 TOVISIT, starting in HEAD. */
3322 static void
3323 propagate_freq (basic_block head, bitmap tovisit,
3324 sreal max_cyclic_prob)
3326 basic_block bb;
3327 basic_block last;
3328 unsigned i;
3329 edge e;
3330 basic_block nextbb;
3331 bitmap_iterator bi;
3333 /* For each basic block we need to visit count number of his predecessors
3334 we need to visit first. */
3335 EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi)
3337 edge_iterator ei;
3338 int count = 0;
3340 bb = BASIC_BLOCK_FOR_FN (cfun, i);
3342 FOR_EACH_EDGE (e, ei, bb->preds)
3344 bool visit = bitmap_bit_p (tovisit, e->src->index);
3346 if (visit && !(e->flags & EDGE_DFS_BACK))
3347 count++;
3348 else if (visit && dump_file && !EDGE_INFO (e)->back_edge)
3349 fprintf (dump_file,
3350 "Irreducible region hit, ignoring edge to %i->%i\n",
3351 e->src->index, bb->index);
3353 BLOCK_INFO (bb)->npredecessors = count;
3354 /* When function never returns, we will never process exit block. */
3355 if (!count && bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
3356 bb->count = profile_count::zero ();
3359 BLOCK_INFO (head)->frequency = 1;
3360 last = head;
3361 for (bb = head; bb; bb = nextbb)
3363 edge_iterator ei;
3364 sreal cyclic_probability = 0;
3365 sreal frequency = 0;
3367 nextbb = BLOCK_INFO (bb)->next;
3368 BLOCK_INFO (bb)->next = NULL;
3370 /* Compute frequency of basic block. */
3371 if (bb != head)
3373 if (flag_checking)
3374 FOR_EACH_EDGE (e, ei, bb->preds)
3375 gcc_assert (!bitmap_bit_p (tovisit, e->src->index)
3376 || (e->flags & EDGE_DFS_BACK));
3378 FOR_EACH_EDGE (e, ei, bb->preds)
3379 if (EDGE_INFO (e)->back_edge)
3380 cyclic_probability += EDGE_INFO (e)->back_edge_prob;
3381 else if (!(e->flags & EDGE_DFS_BACK))
3383 /* FIXME: Graphite is producing edges with no profile. Once
3384 this is fixed, drop this. */
3385 sreal tmp = e->probability.initialized_p () ?
3386 e->probability.to_sreal () : 0;
3387 frequency += tmp * BLOCK_INFO (e->src)->frequency;
3390 if (cyclic_probability == 0)
3392 BLOCK_INFO (bb)->frequency = frequency;
3394 else
3396 if (cyclic_probability > max_cyclic_prob)
3398 if (dump_file)
3399 fprintf (dump_file,
3400 "cyclic probability of bb %i is %f (capped to %f)"
3401 "; turning freq %f",
3402 bb->index, cyclic_probability.to_double (),
3403 max_cyclic_prob.to_double (),
3404 frequency.to_double ());
3406 cyclic_probability = max_cyclic_prob;
3408 else if (dump_file)
3409 fprintf (dump_file,
3410 "cyclic probability of bb %i is %f; turning freq %f",
3411 bb->index, cyclic_probability.to_double (),
3412 frequency.to_double ());
3414 BLOCK_INFO (bb)->frequency = frequency
3415 / (sreal (1) - cyclic_probability);
3416 if (dump_file)
3417 fprintf (dump_file, " to %f\n",
3418 BLOCK_INFO (bb)->frequency.to_double ());
3422 bitmap_clear_bit (tovisit, bb->index);
3424 e = find_edge (bb, head);
3425 if (e)
3427 /* FIXME: Graphite is producing edges with no profile. Once
3428 this is fixed, drop this. */
3429 sreal tmp = e->probability.initialized_p () ?
3430 e->probability.to_sreal () : 0;
3431 EDGE_INFO (e)->back_edge_prob = tmp * BLOCK_INFO (bb)->frequency;
3434 /* Propagate to successor blocks. */
3435 FOR_EACH_EDGE (e, ei, bb->succs)
3436 if (!(e->flags & EDGE_DFS_BACK)
3437 && BLOCK_INFO (e->dest)->npredecessors)
3439 BLOCK_INFO (e->dest)->npredecessors--;
3440 if (!BLOCK_INFO (e->dest)->npredecessors)
3442 if (!nextbb)
3443 nextbb = e->dest;
3444 else
3445 BLOCK_INFO (last)->next = e->dest;
3447 last = e->dest;
3453 /* Estimate frequencies in loops at same nest level. */
3455 static void
3456 estimate_loops_at_level (class loop *first_loop, sreal max_cyclic_prob)
3458 class loop *loop;
3460 for (loop = first_loop; loop; loop = loop->next)
3462 edge e;
3463 basic_block *bbs;
3464 unsigned i;
3465 auto_bitmap tovisit;
3467 estimate_loops_at_level (loop->inner, max_cyclic_prob);
3469 /* Find current loop back edge and mark it. */
3470 e = loop_latch_edge (loop);
3471 EDGE_INFO (e)->back_edge = 1;
3473 bbs = get_loop_body (loop);
3474 for (i = 0; i < loop->num_nodes; i++)
3475 bitmap_set_bit (tovisit, bbs[i]->index);
3476 free (bbs);
3477 propagate_freq (loop->header, tovisit, max_cyclic_prob);
3481 /* Propagates frequencies through structure of loops. */
3483 static void
3484 estimate_loops (void)
3486 auto_bitmap tovisit;
3487 basic_block bb;
3488 sreal max_cyclic_prob = (sreal)1
3489 - (sreal)1 / (param_max_predicted_iterations + 1);
3491 /* Start by estimating the frequencies in the loops. */
3492 if (number_of_loops (cfun) > 1)
3493 estimate_loops_at_level (current_loops->tree_root->inner, max_cyclic_prob);
3495 /* Now propagate the frequencies through all the blocks. */
3496 FOR_ALL_BB_FN (bb, cfun)
3498 bitmap_set_bit (tovisit, bb->index);
3500 propagate_freq (ENTRY_BLOCK_PTR_FOR_FN (cfun), tovisit, max_cyclic_prob);
3503 /* Drop the profile for NODE to guessed, and update its frequency based on
3504 whether it is expected to be hot given the CALL_COUNT. */
3506 static void
3507 drop_profile (struct cgraph_node *node, profile_count call_count)
3509 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
3510 /* In the case where this was called by another function with a
3511 dropped profile, call_count will be 0. Since there are no
3512 non-zero call counts to this function, we don't know for sure
3513 whether it is hot, and therefore it will be marked normal below. */
3514 bool hot = maybe_hot_count_p (NULL, call_count);
3516 if (dump_file)
3517 fprintf (dump_file,
3518 "Dropping 0 profile for %s. %s based on calls.\n",
3519 node->dump_name (),
3520 hot ? "Function is hot" : "Function is normal");
3521 /* We only expect to miss profiles for functions that are reached
3522 via non-zero call edges in cases where the function may have
3523 been linked from another module or library (COMDATs and extern
3524 templates). See the comments below for handle_missing_profiles.
3525 Also, only warn in cases where the missing counts exceed the
3526 number of training runs. In certain cases with an execv followed
3527 by a no-return call the profile for the no-return call is not
3528 dumped and there can be a mismatch. */
3529 if (!DECL_COMDAT (node->decl) && !DECL_EXTERNAL (node->decl)
3530 && call_count > profile_info->runs)
3532 if (flag_profile_correction)
3534 if (dump_file)
3535 fprintf (dump_file,
3536 "Missing counts for called function %s\n",
3537 node->dump_name ());
3539 else
3540 warning (0, "Missing counts for called function %s",
3541 node->dump_name ());
3544 basic_block bb;
3545 if (opt_for_fn (node->decl, flag_guess_branch_prob))
3547 bool clear_zeros
3548 = !ENTRY_BLOCK_PTR_FOR_FN (fn)->count.nonzero_p ();
3549 FOR_ALL_BB_FN (bb, fn)
3550 if (clear_zeros || !(bb->count == profile_count::zero ()))
3551 bb->count = bb->count.guessed_local ();
3552 fn->cfg->count_max = fn->cfg->count_max.guessed_local ();
3554 else
3556 FOR_ALL_BB_FN (bb, fn)
3557 bb->count = profile_count::uninitialized ();
3558 fn->cfg->count_max = profile_count::uninitialized ();
3561 struct cgraph_edge *e;
3562 for (e = node->callees; e; e = e->next_callee)
3563 e->count = gimple_bb (e->call_stmt)->count;
3564 for (e = node->indirect_calls; e; e = e->next_callee)
3565 e->count = gimple_bb (e->call_stmt)->count;
3566 node->count = ENTRY_BLOCK_PTR_FOR_FN (fn)->count;
3568 profile_status_for_fn (fn)
3569 = (flag_guess_branch_prob ? PROFILE_GUESSED : PROFILE_ABSENT);
3570 node->frequency
3571 = hot ? NODE_FREQUENCY_HOT : NODE_FREQUENCY_NORMAL;
3574 /* In the case of COMDAT routines, multiple object files will contain the same
3575 function and the linker will select one for the binary. In that case
3576 all the other copies from the profile instrument binary will be missing
3577 profile counts. Look for cases where this happened, due to non-zero
3578 call counts going to 0-count functions, and drop the profile to guessed
3579 so that we can use the estimated probabilities and avoid optimizing only
3580 for size.
3582 The other case where the profile may be missing is when the routine
3583 is not going to be emitted to the object file, e.g. for "extern template"
3584 class methods. Those will be marked DECL_EXTERNAL. Emit a warning in
3585 all other cases of non-zero calls to 0-count functions. */
3587 void
3588 handle_missing_profiles (void)
3590 const int unlikely_frac = param_unlikely_bb_count_fraction;
3591 struct cgraph_node *node;
3592 auto_vec<struct cgraph_node *, 64> worklist;
3594 /* See if 0 count function has non-0 count callers. In this case we
3595 lost some profile. Drop its function profile to PROFILE_GUESSED. */
3596 FOR_EACH_DEFINED_FUNCTION (node)
3598 struct cgraph_edge *e;
3599 profile_count call_count = profile_count::zero ();
3600 gcov_type max_tp_first_run = 0;
3601 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
3603 if (node->count.ipa ().nonzero_p ())
3604 continue;
3605 for (e = node->callers; e; e = e->next_caller)
3606 if (e->count.ipa ().initialized_p () && e->count.ipa () > 0)
3608 call_count = call_count + e->count.ipa ();
3610 if (e->caller->tp_first_run > max_tp_first_run)
3611 max_tp_first_run = e->caller->tp_first_run;
3614 /* If time profile is missing, let assign the maximum that comes from
3615 caller functions. */
3616 if (!node->tp_first_run && max_tp_first_run)
3617 node->tp_first_run = max_tp_first_run + 1;
3619 if (call_count > 0
3620 && fn && fn->cfg
3621 && call_count.apply_scale (unlikely_frac, 1) >= profile_info->runs)
3623 drop_profile (node, call_count);
3624 worklist.safe_push (node);
3628 /* Propagate the profile dropping to other 0-count COMDATs that are
3629 potentially called by COMDATs we already dropped the profile on. */
3630 while (worklist.length () > 0)
3632 struct cgraph_edge *e;
3634 node = worklist.pop ();
3635 for (e = node->callees; e; e = e->next_caller)
3637 struct cgraph_node *callee = e->callee;
3638 struct function *fn = DECL_STRUCT_FUNCTION (callee->decl);
3640 if (!(e->count.ipa () == profile_count::zero ())
3641 && callee->count.ipa ().nonzero_p ())
3642 continue;
3643 if ((DECL_COMDAT (callee->decl) || DECL_EXTERNAL (callee->decl))
3644 && fn && fn->cfg
3645 && profile_status_for_fn (fn) == PROFILE_READ)
3647 drop_profile (node, profile_count::zero ());
3648 worklist.safe_push (callee);
3654 /* Convert counts measured by profile driven feedback to frequencies.
3655 Return nonzero iff there was any nonzero execution count. */
3657 bool
3658 update_max_bb_count (void)
3660 profile_count true_count_max = profile_count::uninitialized ();
3661 basic_block bb;
3663 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
3664 true_count_max = true_count_max.max (bb->count);
3666 cfun->cfg->count_max = true_count_max;
3668 return true_count_max.ipa ().nonzero_p ();
3671 /* Return true if function is likely to be expensive, so there is no point to
3672 optimize performance of prologue, epilogue or do inlining at the expense
3673 of code size growth. THRESHOLD is the limit of number of instructions
3674 function can execute at average to be still considered not expensive. */
3676 bool
3677 expensive_function_p (int threshold)
3679 basic_block bb;
3681 /* If profile was scaled in a way entry block has count 0, then the function
3682 is deifnitly taking a lot of time. */
3683 if (!ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.nonzero_p ())
3684 return true;
3686 profile_count limit = ENTRY_BLOCK_PTR_FOR_FN
3687 (cfun)->count.apply_scale (threshold, 1);
3688 profile_count sum = profile_count::zero ();
3689 FOR_EACH_BB_FN (bb, cfun)
3691 rtx_insn *insn;
3693 if (!bb->count.initialized_p ())
3695 if (dump_file)
3696 fprintf (dump_file, "Function is considered expensive because"
3697 " count of bb %i is not initialized\n", bb->index);
3698 return true;
3701 FOR_BB_INSNS (bb, insn)
3702 if (active_insn_p (insn))
3704 sum += bb->count;
3705 if (sum > limit)
3706 return true;
3710 return false;
3713 /* All basic blocks that are reachable only from unlikely basic blocks are
3714 unlikely. */
3716 void
3717 propagate_unlikely_bbs_forward (void)
3719 auto_vec<basic_block, 64> worklist;
3720 basic_block bb;
3721 edge_iterator ei;
3722 edge e;
3724 if (!(ENTRY_BLOCK_PTR_FOR_FN (cfun)->count == profile_count::zero ()))
3726 ENTRY_BLOCK_PTR_FOR_FN (cfun)->aux = (void *)(size_t) 1;
3727 worklist.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3729 while (worklist.length () > 0)
3731 bb = worklist.pop ();
3732 FOR_EACH_EDGE (e, ei, bb->succs)
3733 if (!(e->count () == profile_count::zero ())
3734 && !(e->dest->count == profile_count::zero ())
3735 && !e->dest->aux)
3737 e->dest->aux = (void *)(size_t) 1;
3738 worklist.safe_push (e->dest);
3743 FOR_ALL_BB_FN (bb, cfun)
3745 if (!bb->aux)
3747 if (!(bb->count == profile_count::zero ())
3748 && (dump_file && (dump_flags & TDF_DETAILS)))
3749 fprintf (dump_file,
3750 "Basic block %i is marked unlikely by forward prop\n",
3751 bb->index);
3752 bb->count = profile_count::zero ();
3754 else
3755 bb->aux = NULL;
3759 /* Determine basic blocks/edges that are known to be unlikely executed and set
3760 their counters to zero.
3761 This is done with first identifying obviously unlikely BBs/edges and then
3762 propagating in both directions. */
3764 static void
3765 determine_unlikely_bbs ()
3767 basic_block bb;
3768 auto_vec<basic_block, 64> worklist;
3769 edge_iterator ei;
3770 edge e;
3772 FOR_EACH_BB_FN (bb, cfun)
3774 if (!(bb->count == profile_count::zero ())
3775 && unlikely_executed_bb_p (bb))
3777 if (dump_file && (dump_flags & TDF_DETAILS))
3778 fprintf (dump_file, "Basic block %i is locally unlikely\n",
3779 bb->index);
3780 bb->count = profile_count::zero ();
3783 FOR_EACH_EDGE (e, ei, bb->succs)
3784 if (!(e->probability == profile_probability::never ())
3785 && unlikely_executed_edge_p (e))
3787 if (dump_file && (dump_flags & TDF_DETAILS))
3788 fprintf (dump_file, "Edge %i->%i is locally unlikely\n",
3789 bb->index, e->dest->index);
3790 e->probability = profile_probability::never ();
3793 gcc_checking_assert (!bb->aux);
3795 propagate_unlikely_bbs_forward ();
3797 auto_vec<int, 64> nsuccs;
3798 nsuccs.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
3799 FOR_ALL_BB_FN (bb, cfun)
3800 if (!(bb->count == profile_count::zero ())
3801 && bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3803 nsuccs[bb->index] = 0;
3804 FOR_EACH_EDGE (e, ei, bb->succs)
3805 if (!(e->probability == profile_probability::never ())
3806 && !(e->dest->count == profile_count::zero ()))
3807 nsuccs[bb->index]++;
3808 if (!nsuccs[bb->index])
3809 worklist.safe_push (bb);
3811 while (worklist.length () > 0)
3813 bb = worklist.pop ();
3814 if (bb->count == profile_count::zero ())
3815 continue;
3816 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
3818 bool found = false;
3819 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
3820 !gsi_end_p (gsi); gsi_next (&gsi))
3821 if (stmt_can_terminate_bb_p (gsi_stmt (gsi))
3822 /* stmt_can_terminate_bb_p special cases noreturns because it
3823 assumes that fake edges are created. We want to know that
3824 noreturn alone does not imply BB to be unlikely. */
3825 || (is_gimple_call (gsi_stmt (gsi))
3826 && (gimple_call_flags (gsi_stmt (gsi)) & ECF_NORETURN)))
3828 found = true;
3829 break;
3831 if (found)
3832 continue;
3834 if (dump_file && (dump_flags & TDF_DETAILS))
3835 fprintf (dump_file,
3836 "Basic block %i is marked unlikely by backward prop\n",
3837 bb->index);
3838 bb->count = profile_count::zero ();
3839 FOR_EACH_EDGE (e, ei, bb->preds)
3840 if (!(e->probability == profile_probability::never ()))
3842 if (!(e->src->count == profile_count::zero ()))
3844 gcc_checking_assert (nsuccs[e->src->index] > 0);
3845 nsuccs[e->src->index]--;
3846 if (!nsuccs[e->src->index])
3847 worklist.safe_push (e->src);
3851 /* Finally all edges from non-0 regions to 0 are unlikely. */
3852 FOR_ALL_BB_FN (bb, cfun)
3854 if (!(bb->count == profile_count::zero ()))
3855 FOR_EACH_EDGE (e, ei, bb->succs)
3856 if (!(e->probability == profile_probability::never ())
3857 && e->dest->count == profile_count::zero ())
3859 if (dump_file && (dump_flags & TDF_DETAILS))
3860 fprintf (dump_file, "Edge %i->%i is unlikely because "
3861 "it enters unlikely block\n",
3862 bb->index, e->dest->index);
3863 e->probability = profile_probability::never ();
3866 edge other = NULL;
3868 FOR_EACH_EDGE (e, ei, bb->succs)
3869 if (e->probability == profile_probability::never ())
3871 else if (other)
3873 other = NULL;
3874 break;
3876 else
3877 other = e;
3878 if (other
3879 && !(other->probability == profile_probability::always ()))
3881 if (dump_file && (dump_flags & TDF_DETAILS))
3882 fprintf (dump_file, "Edge %i->%i is locally likely\n",
3883 bb->index, other->dest->index);
3884 other->probability = profile_probability::always ();
3887 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count == profile_count::zero ())
3888 cgraph_node::get (current_function_decl)->count = profile_count::zero ();
3891 /* Estimate and propagate basic block frequencies using the given branch
3892 probabilities. If FORCE is true, the frequencies are used to estimate
3893 the counts even when there are already non-zero profile counts. */
3895 void
3896 estimate_bb_frequencies (bool force)
3898 basic_block bb;
3899 sreal freq_max;
3901 determine_unlikely_bbs ();
3903 if (force || profile_status_for_fn (cfun) != PROFILE_READ
3904 || !update_max_bb_count ())
3907 mark_dfs_back_edges ();
3909 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->probability =
3910 profile_probability::always ();
3912 /* Set up block info for each basic block. */
3913 alloc_aux_for_blocks (sizeof (block_info));
3914 alloc_aux_for_edges (sizeof (edge_prob_info));
3915 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
3917 edge e;
3918 edge_iterator ei;
3920 FOR_EACH_EDGE (e, ei, bb->succs)
3922 /* FIXME: Graphite is producing edges with no profile. Once
3923 this is fixed, drop this. */
3924 if (e->probability.initialized_p ())
3925 EDGE_INFO (e)->back_edge_prob
3926 = e->probability.to_sreal ();
3927 else
3928 /* back_edge_prob = 0.5 */
3929 EDGE_INFO (e)->back_edge_prob = sreal (1, -1);
3933 /* First compute frequencies locally for each loop from innermost
3934 to outermost to examine frequencies for back edges. */
3935 estimate_loops ();
3937 freq_max = 0;
3938 FOR_EACH_BB_FN (bb, cfun)
3939 if (freq_max < BLOCK_INFO (bb)->frequency)
3940 freq_max = BLOCK_INFO (bb)->frequency;
3942 /* Scaling frequencies up to maximal profile count may result in
3943 frequent overflows especially when inlining loops.
3944 Small scalling results in unnecesary precision loss. Stay in
3945 the half of the (exponential) range. */
3946 freq_max = (sreal (1) << (profile_count::n_bits / 2)) / freq_max;
3947 if (freq_max < 16)
3948 freq_max = 16;
3949 profile_count ipa_count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ();
3950 cfun->cfg->count_max = profile_count::uninitialized ();
3951 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
3953 sreal tmp = BLOCK_INFO (bb)->frequency;
3954 if (tmp >= 1)
3956 gimple_stmt_iterator gsi;
3957 tree decl;
3959 /* Self recursive calls can not have frequency greater than 1
3960 or program will never terminate. This will result in an
3961 inconsistent bb profile but it is better than greatly confusing
3962 IPA cost metrics. */
3963 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3964 if (is_gimple_call (gsi_stmt (gsi))
3965 && (decl = gimple_call_fndecl (gsi_stmt (gsi))) != NULL
3966 && recursive_call_p (current_function_decl, decl))
3968 if (dump_file)
3969 fprintf (dump_file, "Dropping frequency of recursive call"
3970 " in bb %i from %f\n", bb->index,
3971 tmp.to_double ());
3972 tmp = (sreal)9 / (sreal)10;
3973 break;
3976 tmp = tmp * freq_max + sreal (1, -1);
3977 profile_count count = profile_count::from_gcov_type (tmp.to_int ());
3979 /* If we have profile feedback in which this function was never
3980 executed, then preserve this info. */
3981 if (!(bb->count == profile_count::zero ()))
3982 bb->count = count.guessed_local ().combine_with_ipa_count (ipa_count);
3983 cfun->cfg->count_max = cfun->cfg->count_max.max (bb->count);
3986 free_aux_for_blocks ();
3987 free_aux_for_edges ();
3989 compute_function_frequency ();
3992 /* Decide whether function is hot, cold or unlikely executed. */
3993 void
3994 compute_function_frequency (void)
3996 basic_block bb;
3997 struct cgraph_node *node = cgraph_node::get (current_function_decl);
3999 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4000 || MAIN_NAME_P (DECL_NAME (current_function_decl)))
4001 node->only_called_at_startup = true;
4002 if (DECL_STATIC_DESTRUCTOR (current_function_decl))
4003 node->only_called_at_exit = true;
4005 if (!ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa_p ())
4007 int flags = flags_from_decl_or_type (current_function_decl);
4008 if (lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl))
4009 != NULL)
4010 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
4011 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (current_function_decl))
4012 != NULL)
4013 node->frequency = NODE_FREQUENCY_HOT;
4014 else if (flags & ECF_NORETURN)
4015 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
4016 else if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
4017 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
4018 else if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4019 || DECL_STATIC_DESTRUCTOR (current_function_decl))
4020 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
4021 return;
4024 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
4025 warn_function_cold (current_function_decl);
4026 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa() == profile_count::zero ())
4027 return;
4028 FOR_EACH_BB_FN (bb, cfun)
4030 if (maybe_hot_bb_p (cfun, bb))
4032 node->frequency = NODE_FREQUENCY_HOT;
4033 return;
4035 if (!probably_never_executed_bb_p (cfun, bb))
4036 node->frequency = NODE_FREQUENCY_NORMAL;
4040 /* Build PREDICT_EXPR. */
4041 tree
4042 build_predict_expr (enum br_predictor predictor, enum prediction taken)
4044 tree t = build1 (PREDICT_EXPR, void_type_node,
4045 build_int_cst (integer_type_node, predictor));
4046 SET_PREDICT_EXPR_OUTCOME (t, taken);
4047 return t;
4050 const char *
4051 predictor_name (enum br_predictor predictor)
4053 return predictor_info[predictor].name;
4056 /* Predict branch probabilities and estimate profile of the tree CFG. */
4058 namespace {
4060 const pass_data pass_data_profile =
4062 GIMPLE_PASS, /* type */
4063 "profile_estimate", /* name */
4064 OPTGROUP_NONE, /* optinfo_flags */
4065 TV_BRANCH_PROB, /* tv_id */
4066 PROP_cfg, /* properties_required */
4067 0, /* properties_provided */
4068 0, /* properties_destroyed */
4069 0, /* todo_flags_start */
4070 0, /* todo_flags_finish */
4073 class pass_profile : public gimple_opt_pass
4075 public:
4076 pass_profile (gcc::context *ctxt)
4077 : gimple_opt_pass (pass_data_profile, ctxt)
4080 /* opt_pass methods: */
4081 virtual bool gate (function *) { return flag_guess_branch_prob; }
4082 virtual unsigned int execute (function *);
4084 }; // class pass_profile
4086 unsigned int
4087 pass_profile::execute (function *fun)
4089 unsigned nb_loops;
4091 if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
4092 return 0;
4094 loop_optimizer_init (LOOPS_NORMAL);
4095 if (dump_file && (dump_flags & TDF_DETAILS))
4096 flow_loops_dump (dump_file, NULL, 0);
4098 nb_loops = number_of_loops (fun);
4099 if (nb_loops > 1)
4100 scev_initialize ();
4102 tree_estimate_probability (false);
4104 if (nb_loops > 1)
4105 scev_finalize ();
4107 loop_optimizer_finalize ();
4108 if (dump_file && (dump_flags & TDF_DETAILS))
4109 gimple_dump_cfg (dump_file, dump_flags);
4110 if (profile_status_for_fn (fun) == PROFILE_ABSENT)
4111 profile_status_for_fn (fun) = PROFILE_GUESSED;
4112 if (dump_file && (dump_flags & TDF_DETAILS))
4114 for (auto loop : loops_list (cfun, LI_FROM_INNERMOST))
4115 if (loop->header->count.initialized_p ())
4116 fprintf (dump_file, "Loop got predicted %d to iterate %i times.\n",
4117 loop->num,
4118 (int)expected_loop_iterations_unbounded (loop));
4120 return 0;
4123 } // anon namespace
4125 gimple_opt_pass *
4126 make_pass_profile (gcc::context *ctxt)
4128 return new pass_profile (ctxt);
4131 /* Return true when PRED predictor should be removed after early
4132 tree passes. Most of the predictors are beneficial to survive
4133 as early inlining can also distribute then into caller's bodies. */
4135 static bool
4136 strip_predictor_early (enum br_predictor pred)
4138 switch (pred)
4140 case PRED_TREE_EARLY_RETURN:
4141 return true;
4142 default:
4143 return false;
4147 /* Get rid of all builtin_expect calls and GIMPLE_PREDICT statements
4148 we no longer need. EARLY is set to true when called from early
4149 optimizations. */
4151 unsigned int
4152 strip_predict_hints (function *fun, bool early)
4154 basic_block bb;
4155 gimple *ass_stmt;
4156 tree var;
4157 bool changed = false;
4159 FOR_EACH_BB_FN (bb, fun)
4161 gimple_stmt_iterator bi;
4162 for (bi = gsi_start_bb (bb); !gsi_end_p (bi);)
4164 gimple *stmt = gsi_stmt (bi);
4166 if (gimple_code (stmt) == GIMPLE_PREDICT)
4168 if (!early
4169 || strip_predictor_early (gimple_predict_predictor (stmt)))
4171 gsi_remove (&bi, true);
4172 changed = true;
4173 continue;
4176 else if (is_gimple_call (stmt))
4178 tree fndecl = gimple_call_fndecl (stmt);
4180 if (!early
4181 && ((fndecl != NULL_TREE
4182 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
4183 && gimple_call_num_args (stmt) == 2)
4184 || (fndecl != NULL_TREE
4185 && fndecl_built_in_p (fndecl,
4186 BUILT_IN_EXPECT_WITH_PROBABILITY)
4187 && gimple_call_num_args (stmt) == 3)
4188 || (gimple_call_internal_p (stmt)
4189 && gimple_call_internal_fn (stmt) == IFN_BUILTIN_EXPECT)))
4191 var = gimple_call_lhs (stmt);
4192 changed = true;
4193 if (var)
4195 ass_stmt
4196 = gimple_build_assign (var, gimple_call_arg (stmt, 0));
4197 gsi_replace (&bi, ass_stmt, true);
4199 else
4201 gsi_remove (&bi, true);
4202 continue;
4206 gsi_next (&bi);
4209 return changed ? TODO_cleanup_cfg : 0;
4212 namespace {
4214 const pass_data pass_data_strip_predict_hints =
4216 GIMPLE_PASS, /* type */
4217 "*strip_predict_hints", /* name */
4218 OPTGROUP_NONE, /* optinfo_flags */
4219 TV_BRANCH_PROB, /* tv_id */
4220 PROP_cfg, /* properties_required */
4221 0, /* properties_provided */
4222 0, /* properties_destroyed */
4223 0, /* todo_flags_start */
4224 0, /* todo_flags_finish */
4227 class pass_strip_predict_hints : public gimple_opt_pass
4229 public:
4230 pass_strip_predict_hints (gcc::context *ctxt)
4231 : gimple_opt_pass (pass_data_strip_predict_hints, ctxt)
4234 /* opt_pass methods: */
4235 opt_pass * clone () { return new pass_strip_predict_hints (m_ctxt); }
4236 void set_pass_param (unsigned int n, bool param)
4238 gcc_assert (n == 0);
4239 early_p = param;
4242 virtual unsigned int execute (function *);
4244 private:
4245 bool early_p;
4247 }; // class pass_strip_predict_hints
4249 unsigned int
4250 pass_strip_predict_hints::execute (function *fun)
4252 return strip_predict_hints (fun, early_p);
4255 } // anon namespace
4257 gimple_opt_pass *
4258 make_pass_strip_predict_hints (gcc::context *ctxt)
4260 return new pass_strip_predict_hints (ctxt);
4263 /* Rebuild function frequencies. Passes are in general expected to
4264 maintain profile by hand, however in some cases this is not possible:
4265 for example when inlining several functions with loops freuqencies might run
4266 out of scale and thus needs to be recomputed. */
4268 void
4269 rebuild_frequencies (void)
4271 timevar_push (TV_REBUILD_FREQUENCIES);
4273 /* When the max bb count in the function is small, there is a higher
4274 chance that there were truncation errors in the integer scaling
4275 of counts by inlining and other optimizations. This could lead
4276 to incorrect classification of code as being cold when it isn't.
4277 In that case, force the estimation of bb counts/frequencies from the
4278 branch probabilities, rather than computing frequencies from counts,
4279 which may also lead to frequencies incorrectly reduced to 0. There
4280 is less precision in the probabilities, so we only do this for small
4281 max counts. */
4282 cfun->cfg->count_max = profile_count::uninitialized ();
4283 basic_block bb;
4284 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
4285 cfun->cfg->count_max = cfun->cfg->count_max.max (bb->count);
4287 if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
4289 loop_optimizer_init (LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS);
4290 connect_infinite_loops_to_exit ();
4291 estimate_bb_frequencies (true);
4292 remove_fake_exit_edges ();
4293 loop_optimizer_finalize ();
4295 else if (profile_status_for_fn (cfun) == PROFILE_READ)
4296 update_max_bb_count ();
4297 else if (profile_status_for_fn (cfun) == PROFILE_ABSENT
4298 && !flag_guess_branch_prob)
4300 else
4301 gcc_unreachable ();
4302 timevar_pop (TV_REBUILD_FREQUENCIES);
4305 /* Perform a dry run of the branch prediction pass and report comparsion of
4306 the predicted and real profile into the dump file. */
4308 void
4309 report_predictor_hitrates (void)
4311 unsigned nb_loops;
4313 loop_optimizer_init (LOOPS_NORMAL);
4314 if (dump_file && (dump_flags & TDF_DETAILS))
4315 flow_loops_dump (dump_file, NULL, 0);
4317 nb_loops = number_of_loops (cfun);
4318 if (nb_loops > 1)
4319 scev_initialize ();
4321 tree_estimate_probability (true);
4323 if (nb_loops > 1)
4324 scev_finalize ();
4326 loop_optimizer_finalize ();
4329 /* Force edge E to be cold.
4330 If IMPOSSIBLE is true, for edge to have count and probability 0 otherwise
4331 keep low probability to represent possible error in a guess. This is used
4332 i.e. in case we predict loop to likely iterate given number of times but
4333 we are not 100% sure.
4335 This function locally updates profile without attempt to keep global
4336 consistency which cannot be reached in full generality without full profile
4337 rebuild from probabilities alone. Doing so is not necessarily a good idea
4338 because frequencies and counts may be more realistic then probabilities.
4340 In some cases (such as for elimination of early exits during full loop
4341 unrolling) the caller can ensure that profile will get consistent
4342 afterwards. */
4344 void
4345 force_edge_cold (edge e, bool impossible)
4347 profile_count count_sum = profile_count::zero ();
4348 profile_probability prob_sum = profile_probability::never ();
4349 edge_iterator ei;
4350 edge e2;
4351 bool uninitialized_exit = false;
4353 /* When branch probability guesses are not known, then do nothing. */
4354 if (!impossible && !e->count ().initialized_p ())
4355 return;
4357 profile_probability goal = (impossible ? profile_probability::never ()
4358 : profile_probability::very_unlikely ());
4360 /* If edge is already improbably or cold, just return. */
4361 if (e->probability <= goal
4362 && (!impossible || e->count () == profile_count::zero ()))
4363 return;
4364 FOR_EACH_EDGE (e2, ei, e->src->succs)
4365 if (e2 != e)
4367 if (e->flags & EDGE_FAKE)
4368 continue;
4369 if (e2->count ().initialized_p ())
4370 count_sum += e2->count ();
4371 if (e2->probability.initialized_p ())
4372 prob_sum += e2->probability;
4373 else
4374 uninitialized_exit = true;
4377 /* If we are not guessing profiles but have some other edges out,
4378 just assume the control flow goes elsewhere. */
4379 if (uninitialized_exit)
4380 e->probability = goal;
4381 /* If there are other edges out of e->src, redistribute probabilitity
4382 there. */
4383 else if (prob_sum > profile_probability::never ())
4385 if (!(e->probability < goal))
4386 e->probability = goal;
4388 profile_probability prob_comp = prob_sum / e->probability.invert ();
4390 if (dump_file && (dump_flags & TDF_DETAILS))
4391 fprintf (dump_file, "Making edge %i->%i %s by redistributing "
4392 "probability to other edges.\n",
4393 e->src->index, e->dest->index,
4394 impossible ? "impossible" : "cold");
4395 FOR_EACH_EDGE (e2, ei, e->src->succs)
4396 if (e2 != e)
4398 e2->probability /= prob_comp;
4400 if (current_ir_type () != IR_GIMPLE
4401 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
4402 update_br_prob_note (e->src);
4404 /* If all edges out of e->src are unlikely, the basic block itself
4405 is unlikely. */
4406 else
4408 if (prob_sum == profile_probability::never ())
4409 e->probability = profile_probability::always ();
4410 else
4412 if (impossible)
4413 e->probability = profile_probability::never ();
4414 /* If BB has some edges out that are not impossible, we cannot
4415 assume that BB itself is. */
4416 impossible = false;
4418 if (current_ir_type () != IR_GIMPLE
4419 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
4420 update_br_prob_note (e->src);
4421 if (e->src->count == profile_count::zero ())
4422 return;
4423 if (count_sum == profile_count::zero () && impossible)
4425 bool found = false;
4426 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
4428 else if (current_ir_type () == IR_GIMPLE)
4429 for (gimple_stmt_iterator gsi = gsi_start_bb (e->src);
4430 !gsi_end_p (gsi); gsi_next (&gsi))
4432 if (stmt_can_terminate_bb_p (gsi_stmt (gsi)))
4434 found = true;
4435 break;
4438 /* FIXME: Implement RTL path. */
4439 else
4440 found = true;
4441 if (!found)
4443 if (dump_file && (dump_flags & TDF_DETAILS))
4444 fprintf (dump_file,
4445 "Making bb %i impossible and dropping count to 0.\n",
4446 e->src->index);
4447 e->src->count = profile_count::zero ();
4448 FOR_EACH_EDGE (e2, ei, e->src->preds)
4449 force_edge_cold (e2, impossible);
4450 return;
4454 /* If we did not adjusting, the source basic block has no likely edeges
4455 leaving other direction. In that case force that bb cold, too.
4456 This in general is difficult task to do, but handle special case when
4457 BB has only one predecestor. This is common case when we are updating
4458 after loop transforms. */
4459 if (!(prob_sum > profile_probability::never ())
4460 && count_sum == profile_count::zero ()
4461 && single_pred_p (e->src) && e->src->count.to_frequency (cfun)
4462 > (impossible ? 0 : 1))
4464 int old_frequency = e->src->count.to_frequency (cfun);
4465 if (dump_file && (dump_flags & TDF_DETAILS))
4466 fprintf (dump_file, "Making bb %i %s.\n", e->src->index,
4467 impossible ? "impossible" : "cold");
4468 int new_frequency = MIN (e->src->count.to_frequency (cfun),
4469 impossible ? 0 : 1);
4470 if (impossible)
4471 e->src->count = profile_count::zero ();
4472 else
4473 e->src->count = e->count ().apply_scale (new_frequency,
4474 old_frequency);
4475 force_edge_cold (single_pred_edge (e->src), impossible);
4477 else if (dump_file && (dump_flags & TDF_DETAILS)
4478 && maybe_hot_bb_p (cfun, e->src))
4479 fprintf (dump_file, "Giving up on making bb %i %s.\n", e->src->index,
4480 impossible ? "impossible" : "cold");
4484 #if CHECKING_P
4486 namespace selftest {
4488 /* Test that value range of predictor values defined in predict.def is
4489 within range (50, 100]. */
4491 struct branch_predictor
4493 const char *name;
4494 int probability;
4497 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) { NAME, HITRATE },
4499 static void
4500 test_prediction_value_range ()
4502 branch_predictor predictors[] = {
4503 #include "predict.def"
4504 { NULL, PROB_UNINITIALIZED }
4507 for (unsigned i = 0; predictors[i].name != NULL; i++)
4509 if (predictors[i].probability == PROB_UNINITIALIZED)
4510 continue;
4512 unsigned p = 100 * predictors[i].probability / REG_BR_PROB_BASE;
4513 ASSERT_TRUE (p >= 50 && p <= 100);
4517 #undef DEF_PREDICTOR
4519 /* Run all of the selfests within this file. */
4521 void
4522 predict_c_tests ()
4524 test_prediction_value_range ();
4527 } // namespace selftest
4528 #endif /* CHECKING_P. */