typeck.c (cp_truthvalue_conversion): Add tsubst_flags_t parameter and use it in calls...
[official-gcc.git] / gcc / predict.c
blob8c66a27d8b6d00ae2bf7a38ac37ca9deed8f1be4
1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* References:
22 [1] "Branch Prediction for Free"
23 Ball and Larus; PLDI '93.
24 [2] "Static Branch Frequency and Program Profile Analysis"
25 Wu and Larus; MICRO-27.
26 [3] "Corpus-based Static Branch Prediction"
27 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "rtl.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "cfghooks.h"
38 #include "tree-pass.h"
39 #include "ssa.h"
40 #include "memmodel.h"
41 #include "emit-rtl.h"
42 #include "cgraph.h"
43 #include "coverage.h"
44 #include "diagnostic-core.h"
45 #include "gimple-predict.h"
46 #include "fold-const.h"
47 #include "calls.h"
48 #include "cfganal.h"
49 #include "profile.h"
50 #include "sreal.h"
51 #include "cfgloop.h"
52 #include "gimple-iterator.h"
53 #include "tree-cfg.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "tree-ssa-loop.h"
56 #include "tree-scalar-evolution.h"
57 #include "ipa-utils.h"
58 #include "gimple-pretty-print.h"
59 #include "selftest.h"
60 #include "cfgrtl.h"
61 #include "stringpool.h"
62 #include "attribs.h"
64 /* Enum with reasons why a predictor is ignored. */
66 enum predictor_reason
68 REASON_NONE,
69 REASON_IGNORED,
70 REASON_SINGLE_EDGE_DUPLICATE,
71 REASON_EDGE_PAIR_DUPLICATE
74 /* String messages for the aforementioned enum. */
76 static const char *reason_messages[] = {"", " (ignored)",
77 " (single edge duplicate)", " (edge pair duplicate)"};
79 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
80 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
81 static sreal real_almost_one, real_br_prob_base,
82 real_inv_br_prob_base, real_one_half, real_bb_freq_max;
84 static void combine_predictions_for_insn (rtx_insn *, basic_block);
85 static void dump_prediction (FILE *, enum br_predictor, int, basic_block,
86 enum predictor_reason, edge);
87 static void predict_paths_leading_to (basic_block, enum br_predictor,
88 enum prediction,
89 class loop *in_loop = NULL);
90 static void predict_paths_leading_to_edge (edge, enum br_predictor,
91 enum prediction,
92 class loop *in_loop = NULL);
93 static bool can_predict_insn_p (const rtx_insn *);
94 static HOST_WIDE_INT get_predictor_value (br_predictor, HOST_WIDE_INT);
95 static void determine_unlikely_bbs ();
97 /* Information we hold about each branch predictor.
98 Filled using information from predict.def. */
100 struct predictor_info
102 const char *const name; /* Name used in the debugging dumps. */
103 const int hitrate; /* Expected hitrate used by
104 predict_insn_def call. */
105 const int flags;
108 /* Use given predictor without Dempster-Shaffer theory if it matches
109 using first_match heuristics. */
110 #define PRED_FLAG_FIRST_MATCH 1
112 /* Recompute hitrate in percent to our representation. */
114 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
116 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
117 static const struct predictor_info predictor_info[]= {
118 #include "predict.def"
120 /* Upper bound on predictors. */
121 {NULL, 0, 0}
123 #undef DEF_PREDICTOR
125 static gcov_type min_count = -1;
127 /* Determine the threshold for hot BB counts. */
129 gcov_type
130 get_hot_bb_threshold ()
132 if (min_count == -1)
134 const int hot_frac = param_hot_bb_count_fraction;
135 const gcov_type min_hot_count
136 = hot_frac
137 ? profile_info->sum_max / hot_frac
138 : (gcov_type)profile_count::max_count;
139 set_hot_bb_threshold (min_hot_count);
140 if (dump_file)
141 fprintf (dump_file, "Setting hotness threshold to %" PRId64 ".\n",
142 min_hot_count);
144 return min_count;
147 /* Set the threshold for hot BB counts. */
149 void
150 set_hot_bb_threshold (gcov_type min)
152 min_count = min;
155 /* Return TRUE if COUNT is considered to be hot in function FUN. */
157 bool
158 maybe_hot_count_p (struct function *fun, profile_count count)
160 if (!count.initialized_p ())
161 return true;
162 if (count.ipa () == profile_count::zero ())
163 return false;
164 if (!count.ipa_p ())
166 struct cgraph_node *node = cgraph_node::get (fun->decl);
167 if (!profile_info || profile_status_for_fn (fun) != PROFILE_READ)
169 if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
170 return false;
171 if (node->frequency == NODE_FREQUENCY_HOT)
172 return true;
174 if (profile_status_for_fn (fun) == PROFILE_ABSENT)
175 return true;
176 if (node->frequency == NODE_FREQUENCY_EXECUTED_ONCE
177 && count < (ENTRY_BLOCK_PTR_FOR_FN (fun)->count.apply_scale (2, 3)))
178 return false;
179 if (count.apply_scale (param_hot_bb_frequency_fraction, 1)
180 < ENTRY_BLOCK_PTR_FOR_FN (fun)->count)
181 return false;
182 return true;
184 /* Code executed at most once is not hot. */
185 if (count <= MAX (profile_info ? profile_info->runs : 1, 1))
186 return false;
187 return (count.to_gcov_type () >= get_hot_bb_threshold ());
190 /* Return true if basic block BB of function FUN can be CPU intensive
191 and should thus be optimized for maximum performance. */
193 bool
194 maybe_hot_bb_p (struct function *fun, const_basic_block bb)
196 gcc_checking_assert (fun);
197 return maybe_hot_count_p (fun, bb->count);
200 /* Return true if edge E can be CPU intensive and should thus be optimized
201 for maximum performance. */
203 bool
204 maybe_hot_edge_p (edge e)
206 return maybe_hot_count_p (cfun, e->count ());
209 /* Return true if COUNT is considered to be never executed in function FUN
210 or if function FUN is considered so in the static profile. */
212 static bool
213 probably_never_executed (struct function *fun, profile_count count)
215 gcc_checking_assert (fun);
216 if (count.ipa () == profile_count::zero ())
217 return true;
218 /* Do not trust adjusted counts. This will make us to drop int cold section
219 code with low execution count as a result of inlining. These low counts
220 are not safe even with read profile and may lead us to dropping
221 code which actually gets executed into cold section of binary that is not
222 desirable. */
223 if (count.precise_p () && profile_status_for_fn (fun) == PROFILE_READ)
225 const int unlikely_frac = param_unlikely_bb_count_fraction;
226 if (count.apply_scale (unlikely_frac, 1) >= profile_info->runs)
227 return false;
228 return true;
230 if ((!profile_info || profile_status_for_fn (fun) != PROFILE_READ)
231 && (cgraph_node::get (fun->decl)->frequency
232 == NODE_FREQUENCY_UNLIKELY_EXECUTED))
233 return true;
234 return false;
237 /* Return true if basic block BB of function FUN is probably never executed. */
239 bool
240 probably_never_executed_bb_p (struct function *fun, const_basic_block bb)
242 return probably_never_executed (fun, bb->count);
245 /* Return true if edge E is unlikely executed for obvious reasons. */
247 static bool
248 unlikely_executed_edge_p (edge e)
250 return (e->count () == profile_count::zero ()
251 || e->probability == profile_probability::never ())
252 || (e->flags & (EDGE_EH | EDGE_FAKE));
255 /* Return true if edge E of function FUN is probably never executed. */
257 bool
258 probably_never_executed_edge_p (struct function *fun, edge e)
260 if (unlikely_executed_edge_p (e))
261 return true;
262 return probably_never_executed (fun, e->count ());
265 /* Return true if function FUN should always be optimized for size. */
267 bool
268 optimize_function_for_size_p (struct function *fun)
270 if (!fun || !fun->decl)
271 return optimize_size;
272 cgraph_node *n = cgraph_node::get (fun->decl);
273 return n && n->optimize_for_size_p ();
276 /* Return true if function FUN should always be optimized for speed. */
278 bool
279 optimize_function_for_speed_p (struct function *fun)
281 return !optimize_function_for_size_p (fun);
284 /* Return the optimization type that should be used for function FUN. */
286 optimization_type
287 function_optimization_type (struct function *fun)
289 return (optimize_function_for_speed_p (fun)
290 ? OPTIMIZE_FOR_SPEED
291 : OPTIMIZE_FOR_SIZE);
294 /* Return TRUE if basic block BB should be optimized for size. */
296 bool
297 optimize_bb_for_size_p (const_basic_block bb)
299 return (optimize_function_for_size_p (cfun)
300 || (bb && !maybe_hot_bb_p (cfun, bb)));
303 /* Return TRUE if basic block BB should be optimized for speed. */
305 bool
306 optimize_bb_for_speed_p (const_basic_block bb)
308 return !optimize_bb_for_size_p (bb);
311 /* Return the optimization type that should be used for basic block BB. */
313 optimization_type
314 bb_optimization_type (const_basic_block bb)
316 return (optimize_bb_for_speed_p (bb)
317 ? OPTIMIZE_FOR_SPEED
318 : OPTIMIZE_FOR_SIZE);
321 /* Return TRUE if edge E should be optimized for size. */
323 bool
324 optimize_edge_for_size_p (edge e)
326 return optimize_function_for_size_p (cfun) || !maybe_hot_edge_p (e);
329 /* Return TRUE if edge E should be optimized for speed. */
331 bool
332 optimize_edge_for_speed_p (edge e)
334 return !optimize_edge_for_size_p (e);
337 /* Return TRUE if the current function is optimized for size. */
339 bool
340 optimize_insn_for_size_p (void)
342 return optimize_function_for_size_p (cfun) || !crtl->maybe_hot_insn_p;
345 /* Return TRUE if the current function is optimized for speed. */
347 bool
348 optimize_insn_for_speed_p (void)
350 return !optimize_insn_for_size_p ();
353 /* Return TRUE if LOOP should be optimized for size. */
355 bool
356 optimize_loop_for_size_p (class loop *loop)
358 return optimize_bb_for_size_p (loop->header);
361 /* Return TRUE if LOOP should be optimized for speed. */
363 bool
364 optimize_loop_for_speed_p (class loop *loop)
366 return optimize_bb_for_speed_p (loop->header);
369 /* Return TRUE if nest rooted at LOOP should be optimized for speed. */
371 bool
372 optimize_loop_nest_for_speed_p (class loop *loop)
374 class loop *l = loop;
375 if (optimize_loop_for_speed_p (loop))
376 return true;
377 l = loop->inner;
378 while (l && l != loop)
380 if (optimize_loop_for_speed_p (l))
381 return true;
382 if (l->inner)
383 l = l->inner;
384 else if (l->next)
385 l = l->next;
386 else
388 while (l != loop && !l->next)
389 l = loop_outer (l);
390 if (l != loop)
391 l = l->next;
394 return false;
397 /* Return TRUE if nest rooted at LOOP should be optimized for size. */
399 bool
400 optimize_loop_nest_for_size_p (class loop *loop)
402 return !optimize_loop_nest_for_speed_p (loop);
405 /* Return true if edge E is likely to be well predictable by branch
406 predictor. */
408 bool
409 predictable_edge_p (edge e)
411 if (!e->probability.initialized_p ())
412 return false;
413 if ((e->probability.to_reg_br_prob_base ()
414 <= param_predictable_branch_outcome * REG_BR_PROB_BASE / 100)
415 || (REG_BR_PROB_BASE - e->probability.to_reg_br_prob_base ()
416 <= param_predictable_branch_outcome * REG_BR_PROB_BASE / 100))
417 return true;
418 return false;
422 /* Set RTL expansion for BB profile. */
424 void
425 rtl_profile_for_bb (basic_block bb)
427 crtl->maybe_hot_insn_p = maybe_hot_bb_p (cfun, bb);
430 /* Set RTL expansion for edge profile. */
432 void
433 rtl_profile_for_edge (edge e)
435 crtl->maybe_hot_insn_p = maybe_hot_edge_p (e);
438 /* Set RTL expansion to default mode (i.e. when profile info is not known). */
439 void
440 default_rtl_profile (void)
442 crtl->maybe_hot_insn_p = true;
445 /* Return true if the one of outgoing edges is already predicted by
446 PREDICTOR. */
448 bool
449 rtl_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
451 rtx note;
452 if (!INSN_P (BB_END (bb)))
453 return false;
454 for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
455 if (REG_NOTE_KIND (note) == REG_BR_PRED
456 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
457 return true;
458 return false;
461 /* Structure representing predictions in tree level. */
463 struct edge_prediction {
464 struct edge_prediction *ep_next;
465 edge ep_edge;
466 enum br_predictor ep_predictor;
467 int ep_probability;
470 /* This map contains for a basic block the list of predictions for the
471 outgoing edges. */
473 static hash_map<const_basic_block, edge_prediction *> *bb_predictions;
475 /* Return true if the one of outgoing edges is already predicted by
476 PREDICTOR. */
478 bool
479 gimple_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
481 struct edge_prediction *i;
482 edge_prediction **preds = bb_predictions->get (bb);
484 if (!preds)
485 return false;
487 for (i = *preds; i; i = i->ep_next)
488 if (i->ep_predictor == predictor)
489 return true;
490 return false;
493 /* Return true if the one of outgoing edges is already predicted by
494 PREDICTOR for edge E predicted as TAKEN. */
496 bool
497 edge_predicted_by_p (edge e, enum br_predictor predictor, bool taken)
499 struct edge_prediction *i;
500 basic_block bb = e->src;
501 edge_prediction **preds = bb_predictions->get (bb);
502 if (!preds)
503 return false;
505 int probability = predictor_info[(int) predictor].hitrate;
507 if (taken != TAKEN)
508 probability = REG_BR_PROB_BASE - probability;
510 for (i = *preds; i; i = i->ep_next)
511 if (i->ep_predictor == predictor
512 && i->ep_edge == e
513 && i->ep_probability == probability)
514 return true;
515 return false;
518 /* Same predicate as above, working on edges. */
519 bool
520 edge_probability_reliable_p (const_edge e)
522 return e->probability.probably_reliable_p ();
525 /* Same predicate as edge_probability_reliable_p, working on notes. */
526 bool
527 br_prob_note_reliable_p (const_rtx note)
529 gcc_assert (REG_NOTE_KIND (note) == REG_BR_PROB);
530 return profile_probability::from_reg_br_prob_note
531 (XINT (note, 0)).probably_reliable_p ();
534 static void
535 predict_insn (rtx_insn *insn, enum br_predictor predictor, int probability)
537 gcc_assert (any_condjump_p (insn));
538 if (!flag_guess_branch_prob)
539 return;
541 add_reg_note (insn, REG_BR_PRED,
542 gen_rtx_CONCAT (VOIDmode,
543 GEN_INT ((int) predictor),
544 GEN_INT ((int) probability)));
547 /* Predict insn by given predictor. */
549 void
550 predict_insn_def (rtx_insn *insn, enum br_predictor predictor,
551 enum prediction taken)
553 int probability = predictor_info[(int) predictor].hitrate;
554 gcc_assert (probability != PROB_UNINITIALIZED);
556 if (taken != TAKEN)
557 probability = REG_BR_PROB_BASE - probability;
559 predict_insn (insn, predictor, probability);
562 /* Predict edge E with given probability if possible. */
564 void
565 rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
567 rtx_insn *last_insn;
568 last_insn = BB_END (e->src);
570 /* We can store the branch prediction information only about
571 conditional jumps. */
572 if (!any_condjump_p (last_insn))
573 return;
575 /* We always store probability of branching. */
576 if (e->flags & EDGE_FALLTHRU)
577 probability = REG_BR_PROB_BASE - probability;
579 predict_insn (last_insn, predictor, probability);
582 /* Predict edge E with the given PROBABILITY. */
583 void
584 gimple_predict_edge (edge e, enum br_predictor predictor, int probability)
586 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
587 && EDGE_COUNT (e->src->succs) > 1
588 && flag_guess_branch_prob
589 && optimize)
591 struct edge_prediction *i = XNEW (struct edge_prediction);
592 edge_prediction *&preds = bb_predictions->get_or_insert (e->src);
594 i->ep_next = preds;
595 preds = i;
596 i->ep_probability = probability;
597 i->ep_predictor = predictor;
598 i->ep_edge = e;
602 /* Filter edge predictions PREDS by a function FILTER. DATA are passed
603 to the filter function. */
605 void
606 filter_predictions (edge_prediction **preds,
607 bool (*filter) (edge_prediction *, void *), void *data)
609 if (!bb_predictions)
610 return;
612 if (preds)
614 struct edge_prediction **prediction = preds;
615 struct edge_prediction *next;
617 while (*prediction)
619 if ((*filter) (*prediction, data))
620 prediction = &((*prediction)->ep_next);
621 else
623 next = (*prediction)->ep_next;
624 free (*prediction);
625 *prediction = next;
631 /* Filter function predicate that returns true for a edge predicate P
632 if its edge is equal to DATA. */
634 bool
635 equal_edge_p (edge_prediction *p, void *data)
637 return p->ep_edge == (edge)data;
640 /* Remove all predictions on given basic block that are attached
641 to edge E. */
642 void
643 remove_predictions_associated_with_edge (edge e)
645 if (!bb_predictions)
646 return;
648 edge_prediction **preds = bb_predictions->get (e->src);
649 filter_predictions (preds, equal_edge_p, e);
652 /* Clears the list of predictions stored for BB. */
654 static void
655 clear_bb_predictions (basic_block bb)
657 edge_prediction **preds = bb_predictions->get (bb);
658 struct edge_prediction *pred, *next;
660 if (!preds)
661 return;
663 for (pred = *preds; pred; pred = next)
665 next = pred->ep_next;
666 free (pred);
668 *preds = NULL;
671 /* Return true when we can store prediction on insn INSN.
672 At the moment we represent predictions only on conditional
673 jumps, not at computed jump or other complicated cases. */
674 static bool
675 can_predict_insn_p (const rtx_insn *insn)
677 return (JUMP_P (insn)
678 && any_condjump_p (insn)
679 && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs) >= 2);
682 /* Predict edge E by given predictor if possible. */
684 void
685 predict_edge_def (edge e, enum br_predictor predictor,
686 enum prediction taken)
688 int probability = predictor_info[(int) predictor].hitrate;
690 if (taken != TAKEN)
691 probability = REG_BR_PROB_BASE - probability;
693 predict_edge (e, predictor, probability);
696 /* Invert all branch predictions or probability notes in the INSN. This needs
697 to be done each time we invert the condition used by the jump. */
699 void
700 invert_br_probabilities (rtx insn)
702 rtx note;
704 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
705 if (REG_NOTE_KIND (note) == REG_BR_PROB)
706 XINT (note, 0) = profile_probability::from_reg_br_prob_note
707 (XINT (note, 0)).invert ().to_reg_br_prob_note ();
708 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
709 XEXP (XEXP (note, 0), 1)
710 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
713 /* Dump information about the branch prediction to the output file. */
715 static void
716 dump_prediction (FILE *file, enum br_predictor predictor, int probability,
717 basic_block bb, enum predictor_reason reason = REASON_NONE,
718 edge ep_edge = NULL)
720 edge e = ep_edge;
721 edge_iterator ei;
723 if (!file)
724 return;
726 if (e == NULL)
727 FOR_EACH_EDGE (e, ei, bb->succs)
728 if (! (e->flags & EDGE_FALLTHRU))
729 break;
731 char edge_info_str[128];
732 if (ep_edge)
733 sprintf (edge_info_str, " of edge %d->%d", ep_edge->src->index,
734 ep_edge->dest->index);
735 else
736 edge_info_str[0] = '\0';
738 fprintf (file, " %s heuristics%s%s: %.2f%%",
739 predictor_info[predictor].name,
740 edge_info_str, reason_messages[reason],
741 probability * 100.0 / REG_BR_PROB_BASE);
743 if (bb->count.initialized_p ())
745 fprintf (file, " exec ");
746 bb->count.dump (file);
747 if (e)
749 fprintf (file, " hit ");
750 e->count ().dump (file);
751 fprintf (file, " (%.1f%%)", e->count ().to_gcov_type() * 100.0
752 / bb->count.to_gcov_type ());
756 fprintf (file, "\n");
758 /* Print output that be easily read by analyze_brprob.py script. We are
759 interested only in counts that are read from GCDA files. */
760 if (dump_file && (dump_flags & TDF_DETAILS)
761 && bb->count.precise_p ()
762 && reason == REASON_NONE)
764 gcc_assert (e->count ().precise_p ());
765 fprintf (file, ";;heuristics;%s;%" PRId64 ";%" PRId64 ";%.1f;\n",
766 predictor_info[predictor].name,
767 bb->count.to_gcov_type (), e->count ().to_gcov_type (),
768 probability * 100.0 / REG_BR_PROB_BASE);
772 /* Return true if STMT is known to be unlikely executed. */
774 static bool
775 unlikely_executed_stmt_p (gimple *stmt)
777 if (!is_gimple_call (stmt))
778 return false;
779 /* NORETURN attribute alone is not strong enough: exit() may be quite
780 likely executed once during program run. */
781 if (gimple_call_fntype (stmt)
782 && lookup_attribute ("cold",
783 TYPE_ATTRIBUTES (gimple_call_fntype (stmt)))
784 && !lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl)))
785 return true;
786 tree decl = gimple_call_fndecl (stmt);
787 if (!decl)
788 return false;
789 if (lookup_attribute ("cold", DECL_ATTRIBUTES (decl))
790 && !lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl)))
791 return true;
793 cgraph_node *n = cgraph_node::get (decl);
794 if (!n)
795 return false;
797 availability avail;
798 n = n->ultimate_alias_target (&avail);
799 if (avail < AVAIL_AVAILABLE)
800 return false;
801 if (!n->analyzed
802 || n->decl == current_function_decl)
803 return false;
804 return n->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED;
807 /* Return true if BB is unlikely executed. */
809 static bool
810 unlikely_executed_bb_p (basic_block bb)
812 if (bb->count == profile_count::zero ())
813 return true;
814 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun) || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
815 return false;
816 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
817 !gsi_end_p (gsi); gsi_next (&gsi))
819 if (unlikely_executed_stmt_p (gsi_stmt (gsi)))
820 return true;
821 if (stmt_can_terminate_bb_p (gsi_stmt (gsi)))
822 return false;
824 return false;
827 /* We cannot predict the probabilities of outgoing edges of bb. Set them
828 evenly and hope for the best. If UNLIKELY_EDGES is not null, distribute
829 even probability for all edges not mentioned in the set. These edges
830 are given PROB_VERY_UNLIKELY probability. Similarly for LIKELY_EDGES,
831 if we have exactly one likely edge, make the other edges predicted
832 as not probable. */
834 static void
835 set_even_probabilities (basic_block bb,
836 hash_set<edge> *unlikely_edges = NULL,
837 hash_set<edge_prediction *> *likely_edges = NULL)
839 unsigned nedges = 0, unlikely_count = 0;
840 edge e = NULL;
841 edge_iterator ei;
842 profile_probability all = profile_probability::always ();
844 FOR_EACH_EDGE (e, ei, bb->succs)
845 if (e->probability.initialized_p ())
846 all -= e->probability;
847 else if (!unlikely_executed_edge_p (e))
849 nedges++;
850 if (unlikely_edges != NULL && unlikely_edges->contains (e))
852 all -= profile_probability::very_unlikely ();
853 unlikely_count++;
857 /* Make the distribution even if all edges are unlikely. */
858 unsigned likely_count = likely_edges ? likely_edges->elements () : 0;
859 if (unlikely_count == nedges)
861 unlikely_edges = NULL;
862 unlikely_count = 0;
865 /* If we have one likely edge, then use its probability and distribute
866 remaining probabilities as even. */
867 if (likely_count == 1)
869 FOR_EACH_EDGE (e, ei, bb->succs)
870 if (e->probability.initialized_p ())
872 else if (!unlikely_executed_edge_p (e))
874 edge_prediction *prediction = *likely_edges->begin ();
875 int p = prediction->ep_probability;
876 profile_probability prob
877 = profile_probability::from_reg_br_prob_base (p);
879 if (prediction->ep_edge == e)
880 e->probability = prob;
881 else if (unlikely_edges != NULL && unlikely_edges->contains (e))
882 e->probability = profile_probability::very_unlikely ();
883 else
885 profile_probability remainder = prob.invert ();
886 remainder -= profile_probability::very_unlikely ()
887 .apply_scale (unlikely_count, 1);
888 int count = nedges - unlikely_count - 1;
889 gcc_assert (count >= 0);
891 e->probability = remainder.apply_scale (1, count);
894 else
895 e->probability = profile_probability::never ();
897 else
899 /* Make all unlikely edges unlikely and the rest will have even
900 probability. */
901 unsigned scale = nedges - unlikely_count;
902 FOR_EACH_EDGE (e, ei, bb->succs)
903 if (e->probability.initialized_p ())
905 else if (!unlikely_executed_edge_p (e))
907 if (unlikely_edges != NULL && unlikely_edges->contains (e))
908 e->probability = profile_probability::very_unlikely ();
909 else
910 e->probability = all.apply_scale (1, scale);
912 else
913 e->probability = profile_probability::never ();
917 /* Add REG_BR_PROB note to JUMP with PROB. */
919 void
920 add_reg_br_prob_note (rtx_insn *jump, profile_probability prob)
922 gcc_checking_assert (JUMP_P (jump) && !find_reg_note (jump, REG_BR_PROB, 0));
923 add_int_reg_note (jump, REG_BR_PROB, prob.to_reg_br_prob_note ());
926 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
927 note if not already present. Remove now useless REG_BR_PRED notes. */
929 static void
930 combine_predictions_for_insn (rtx_insn *insn, basic_block bb)
932 rtx prob_note;
933 rtx *pnote;
934 rtx note;
935 int best_probability = PROB_EVEN;
936 enum br_predictor best_predictor = END_PREDICTORS;
937 int combined_probability = REG_BR_PROB_BASE / 2;
938 int d;
939 bool first_match = false;
940 bool found = false;
942 if (!can_predict_insn_p (insn))
944 set_even_probabilities (bb);
945 return;
948 prob_note = find_reg_note (insn, REG_BR_PROB, 0);
949 pnote = &REG_NOTES (insn);
950 if (dump_file)
951 fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
952 bb->index);
954 /* We implement "first match" heuristics and use probability guessed
955 by predictor with smallest index. */
956 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
957 if (REG_NOTE_KIND (note) == REG_BR_PRED)
959 enum br_predictor predictor = ((enum br_predictor)
960 INTVAL (XEXP (XEXP (note, 0), 0)));
961 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
963 found = true;
964 if (best_predictor > predictor
965 && predictor_info[predictor].flags & PRED_FLAG_FIRST_MATCH)
966 best_probability = probability, best_predictor = predictor;
968 d = (combined_probability * probability
969 + (REG_BR_PROB_BASE - combined_probability)
970 * (REG_BR_PROB_BASE - probability));
972 /* Use FP math to avoid overflows of 32bit integers. */
973 if (d == 0)
974 /* If one probability is 0% and one 100%, avoid division by zero. */
975 combined_probability = REG_BR_PROB_BASE / 2;
976 else
977 combined_probability = (((double) combined_probability) * probability
978 * REG_BR_PROB_BASE / d + 0.5);
981 /* Decide which heuristic to use. In case we didn't match anything,
982 use no_prediction heuristic, in case we did match, use either
983 first match or Dempster-Shaffer theory depending on the flags. */
985 if (best_predictor != END_PREDICTORS)
986 first_match = true;
988 if (!found)
989 dump_prediction (dump_file, PRED_NO_PREDICTION,
990 combined_probability, bb);
991 else
993 if (!first_match)
994 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability,
995 bb, !first_match ? REASON_NONE : REASON_IGNORED);
996 else
997 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability,
998 bb, first_match ? REASON_NONE : REASON_IGNORED);
1001 if (first_match)
1002 combined_probability = best_probability;
1003 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb);
1005 while (*pnote)
1007 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
1009 enum br_predictor predictor = ((enum br_predictor)
1010 INTVAL (XEXP (XEXP (*pnote, 0), 0)));
1011 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
1013 dump_prediction (dump_file, predictor, probability, bb,
1014 (!first_match || best_predictor == predictor)
1015 ? REASON_NONE : REASON_IGNORED);
1016 *pnote = XEXP (*pnote, 1);
1018 else
1019 pnote = &XEXP (*pnote, 1);
1022 if (!prob_note)
1024 profile_probability p
1025 = profile_probability::from_reg_br_prob_base (combined_probability);
1026 add_reg_br_prob_note (insn, p);
1028 /* Save the prediction into CFG in case we are seeing non-degenerated
1029 conditional jump. */
1030 if (!single_succ_p (bb))
1032 BRANCH_EDGE (bb)->probability = p;
1033 FALLTHRU_EDGE (bb)->probability
1034 = BRANCH_EDGE (bb)->probability.invert ();
1037 else if (!single_succ_p (bb))
1039 profile_probability prob = profile_probability::from_reg_br_prob_note
1040 (XINT (prob_note, 0));
1042 BRANCH_EDGE (bb)->probability = prob;
1043 FALLTHRU_EDGE (bb)->probability = prob.invert ();
1045 else
1046 single_succ_edge (bb)->probability = profile_probability::always ();
1049 /* Edge prediction hash traits. */
1051 struct predictor_hash: pointer_hash <edge_prediction>
1054 static inline hashval_t hash (const edge_prediction *);
1055 static inline bool equal (const edge_prediction *, const edge_prediction *);
1058 /* Calculate hash value of an edge prediction P based on predictor and
1059 normalized probability. */
1061 inline hashval_t
1062 predictor_hash::hash (const edge_prediction *p)
1064 inchash::hash hstate;
1065 hstate.add_int (p->ep_predictor);
1067 int prob = p->ep_probability;
1068 if (prob > REG_BR_PROB_BASE / 2)
1069 prob = REG_BR_PROB_BASE - prob;
1071 hstate.add_int (prob);
1073 return hstate.end ();
1076 /* Return true whether edge predictions P1 and P2 use the same predictor and
1077 have equal (or opposed probability). */
1079 inline bool
1080 predictor_hash::equal (const edge_prediction *p1, const edge_prediction *p2)
1082 return (p1->ep_predictor == p2->ep_predictor
1083 && (p1->ep_probability == p2->ep_probability
1084 || p1->ep_probability == REG_BR_PROB_BASE - p2->ep_probability));
1087 struct predictor_hash_traits: predictor_hash,
1088 typed_noop_remove <edge_prediction *> {};
1090 /* Return true if edge prediction P is not in DATA hash set. */
1092 static bool
1093 not_removed_prediction_p (edge_prediction *p, void *data)
1095 hash_set<edge_prediction *> *remove = (hash_set<edge_prediction *> *) data;
1096 return !remove->contains (p);
1099 /* Prune predictions for a basic block BB. Currently we do following
1100 clean-up steps:
1102 1) remove duplicate prediction that is guessed with the same probability
1103 (different than 1/2) to both edge
1104 2) remove duplicates for a prediction that belongs with the same probability
1105 to a single edge
1109 static void
1110 prune_predictions_for_bb (basic_block bb)
1112 edge_prediction **preds = bb_predictions->get (bb);
1114 if (preds)
1116 hash_table <predictor_hash_traits> s (13);
1117 hash_set <edge_prediction *> remove;
1119 /* Step 1: identify predictors that should be removed. */
1120 for (edge_prediction *pred = *preds; pred; pred = pred->ep_next)
1122 edge_prediction *existing = s.find (pred);
1123 if (existing)
1125 if (pred->ep_edge == existing->ep_edge
1126 && pred->ep_probability == existing->ep_probability)
1128 /* Remove a duplicate predictor. */
1129 dump_prediction (dump_file, pred->ep_predictor,
1130 pred->ep_probability, bb,
1131 REASON_SINGLE_EDGE_DUPLICATE, pred->ep_edge);
1133 remove.add (pred);
1135 else if (pred->ep_edge != existing->ep_edge
1136 && pred->ep_probability == existing->ep_probability
1137 && pred->ep_probability != REG_BR_PROB_BASE / 2)
1139 /* Remove both predictors as they predict the same
1140 for both edges. */
1141 dump_prediction (dump_file, existing->ep_predictor,
1142 pred->ep_probability, bb,
1143 REASON_EDGE_PAIR_DUPLICATE,
1144 existing->ep_edge);
1145 dump_prediction (dump_file, pred->ep_predictor,
1146 pred->ep_probability, bb,
1147 REASON_EDGE_PAIR_DUPLICATE,
1148 pred->ep_edge);
1150 remove.add (existing);
1151 remove.add (pred);
1155 edge_prediction **slot2 = s.find_slot (pred, INSERT);
1156 *slot2 = pred;
1159 /* Step 2: Remove predictors. */
1160 filter_predictions (preds, not_removed_prediction_p, &remove);
1164 /* Combine predictions into single probability and store them into CFG.
1165 Remove now useless prediction entries.
1166 If DRY_RUN is set, only produce dumps and do not modify profile. */
1168 static void
1169 combine_predictions_for_bb (basic_block bb, bool dry_run)
1171 int best_probability = PROB_EVEN;
1172 enum br_predictor best_predictor = END_PREDICTORS;
1173 int combined_probability = REG_BR_PROB_BASE / 2;
1174 int d;
1175 bool first_match = false;
1176 bool found = false;
1177 struct edge_prediction *pred;
1178 int nedges = 0;
1179 edge e, first = NULL, second = NULL;
1180 edge_iterator ei;
1181 int nzero = 0;
1182 int nunknown = 0;
1184 FOR_EACH_EDGE (e, ei, bb->succs)
1186 if (!unlikely_executed_edge_p (e))
1188 nedges ++;
1189 if (first && !second)
1190 second = e;
1191 if (!first)
1192 first = e;
1194 else if (!e->probability.initialized_p ())
1195 e->probability = profile_probability::never ();
1196 if (!e->probability.initialized_p ())
1197 nunknown++;
1198 else if (e->probability == profile_probability::never ())
1199 nzero++;
1202 /* When there is no successor or only one choice, prediction is easy.
1204 When we have a basic block with more than 2 successors, the situation
1205 is more complicated as DS theory cannot be used literally.
1206 More precisely, let's assume we predicted edge e1 with probability p1,
1207 thus: m1({b1}) = p1. As we're going to combine more than 2 edges, we
1208 need to find probability of e.g. m1({b2}), which we don't know.
1209 The only approximation is to equally distribute 1-p1 to all edges
1210 different from b1.
1212 According to numbers we've got from SPEC2006 benchark, there's only
1213 one interesting reliable predictor (noreturn call), which can be
1214 handled with a bit easier approach. */
1215 if (nedges != 2)
1217 hash_set<edge> unlikely_edges (4);
1218 hash_set<edge_prediction *> likely_edges (4);
1220 /* Identify all edges that have a probability close to very unlikely.
1221 Doing the approach for very unlikely doesn't worth for doing as
1222 there's no such probability in SPEC2006 benchmark. */
1223 edge_prediction **preds = bb_predictions->get (bb);
1224 if (preds)
1225 for (pred = *preds; pred; pred = pred->ep_next)
1227 if (pred->ep_probability <= PROB_VERY_UNLIKELY
1228 || pred->ep_predictor == PRED_COLD_LABEL)
1229 unlikely_edges.add (pred->ep_edge);
1230 else if (pred->ep_probability >= PROB_VERY_LIKELY
1231 || pred->ep_predictor == PRED_BUILTIN_EXPECT
1232 || pred->ep_predictor == PRED_HOT_LABEL)
1233 likely_edges.add (pred);
1236 /* It can happen that an edge is both in likely_edges and unlikely_edges.
1237 Clear both sets in that situation. */
1238 for (hash_set<edge_prediction *>::iterator it = likely_edges.begin ();
1239 it != likely_edges.end (); ++it)
1240 if (unlikely_edges.contains ((*it)->ep_edge))
1242 likely_edges.empty ();
1243 unlikely_edges.empty ();
1244 break;
1247 if (!dry_run)
1248 set_even_probabilities (bb, &unlikely_edges, &likely_edges);
1249 clear_bb_predictions (bb);
1250 if (dump_file)
1252 fprintf (dump_file, "Predictions for bb %i\n", bb->index);
1253 if (unlikely_edges.is_empty ())
1254 fprintf (dump_file,
1255 "%i edges in bb %i predicted to even probabilities\n",
1256 nedges, bb->index);
1257 else
1259 fprintf (dump_file,
1260 "%i edges in bb %i predicted with some unlikely edges\n",
1261 nedges, bb->index);
1262 FOR_EACH_EDGE (e, ei, bb->succs)
1263 if (!unlikely_executed_edge_p (e))
1264 dump_prediction (dump_file, PRED_COMBINED,
1265 e->probability.to_reg_br_prob_base (), bb, REASON_NONE, e);
1268 return;
1271 if (dump_file)
1272 fprintf (dump_file, "Predictions for bb %i\n", bb->index);
1274 prune_predictions_for_bb (bb);
1276 edge_prediction **preds = bb_predictions->get (bb);
1278 if (preds)
1280 /* We implement "first match" heuristics and use probability guessed
1281 by predictor with smallest index. */
1282 for (pred = *preds; pred; pred = pred->ep_next)
1284 enum br_predictor predictor = pred->ep_predictor;
1285 int probability = pred->ep_probability;
1287 if (pred->ep_edge != first)
1288 probability = REG_BR_PROB_BASE - probability;
1290 found = true;
1291 /* First match heuristics would be widly confused if we predicted
1292 both directions. */
1293 if (best_predictor > predictor
1294 && predictor_info[predictor].flags & PRED_FLAG_FIRST_MATCH)
1296 struct edge_prediction *pred2;
1297 int prob = probability;
1299 for (pred2 = (struct edge_prediction *) *preds;
1300 pred2; pred2 = pred2->ep_next)
1301 if (pred2 != pred && pred2->ep_predictor == pred->ep_predictor)
1303 int probability2 = pred2->ep_probability;
1305 if (pred2->ep_edge != first)
1306 probability2 = REG_BR_PROB_BASE - probability2;
1308 if ((probability < REG_BR_PROB_BASE / 2) !=
1309 (probability2 < REG_BR_PROB_BASE / 2))
1310 break;
1312 /* If the same predictor later gave better result, go for it! */
1313 if ((probability >= REG_BR_PROB_BASE / 2 && (probability2 > probability))
1314 || (probability <= REG_BR_PROB_BASE / 2 && (probability2 < probability)))
1315 prob = probability2;
1317 if (!pred2)
1318 best_probability = prob, best_predictor = predictor;
1321 d = (combined_probability * probability
1322 + (REG_BR_PROB_BASE - combined_probability)
1323 * (REG_BR_PROB_BASE - probability));
1325 /* Use FP math to avoid overflows of 32bit integers. */
1326 if (d == 0)
1327 /* If one probability is 0% and one 100%, avoid division by zero. */
1328 combined_probability = REG_BR_PROB_BASE / 2;
1329 else
1330 combined_probability = (((double) combined_probability)
1331 * probability
1332 * REG_BR_PROB_BASE / d + 0.5);
1336 /* Decide which heuristic to use. In case we didn't match anything,
1337 use no_prediction heuristic, in case we did match, use either
1338 first match or Dempster-Shaffer theory depending on the flags. */
1340 if (best_predictor != END_PREDICTORS)
1341 first_match = true;
1343 if (!found)
1344 dump_prediction (dump_file, PRED_NO_PREDICTION, combined_probability, bb);
1345 else
1347 if (!first_match)
1348 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, bb,
1349 !first_match ? REASON_NONE : REASON_IGNORED);
1350 else
1351 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, bb,
1352 first_match ? REASON_NONE : REASON_IGNORED);
1355 if (first_match)
1356 combined_probability = best_probability;
1357 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb);
1359 if (preds)
1361 for (pred = (struct edge_prediction *) *preds; pred; pred = pred->ep_next)
1363 enum br_predictor predictor = pred->ep_predictor;
1364 int probability = pred->ep_probability;
1366 dump_prediction (dump_file, predictor, probability, bb,
1367 (!first_match || best_predictor == predictor)
1368 ? REASON_NONE : REASON_IGNORED, pred->ep_edge);
1371 clear_bb_predictions (bb);
1374 /* If we have only one successor which is unknown, we can compute missing
1375 probablity. */
1376 if (nunknown == 1)
1378 profile_probability prob = profile_probability::always ();
1379 edge missing = NULL;
1381 FOR_EACH_EDGE (e, ei, bb->succs)
1382 if (e->probability.initialized_p ())
1383 prob -= e->probability;
1384 else if (missing == NULL)
1385 missing = e;
1386 else
1387 gcc_unreachable ();
1388 missing->probability = prob;
1390 /* If nothing is unknown, we have nothing to update. */
1391 else if (!nunknown && nzero != (int)EDGE_COUNT (bb->succs))
1393 else if (!dry_run)
1395 first->probability
1396 = profile_probability::from_reg_br_prob_base (combined_probability);
1397 second->probability = first->probability.invert ();
1401 /* Check if T1 and T2 satisfy the IV_COMPARE condition.
1402 Return the SSA_NAME if the condition satisfies, NULL otherwise.
1404 T1 and T2 should be one of the following cases:
1405 1. T1 is SSA_NAME, T2 is NULL
1406 2. T1 is SSA_NAME, T2 is INTEGER_CST between [-4, 4]
1407 3. T2 is SSA_NAME, T1 is INTEGER_CST between [-4, 4] */
1409 static tree
1410 strips_small_constant (tree t1, tree t2)
1412 tree ret = NULL;
1413 int value = 0;
1415 if (!t1)
1416 return NULL;
1417 else if (TREE_CODE (t1) == SSA_NAME)
1418 ret = t1;
1419 else if (tree_fits_shwi_p (t1))
1420 value = tree_to_shwi (t1);
1421 else
1422 return NULL;
1424 if (!t2)
1425 return ret;
1426 else if (tree_fits_shwi_p (t2))
1427 value = tree_to_shwi (t2);
1428 else if (TREE_CODE (t2) == SSA_NAME)
1430 if (ret)
1431 return NULL;
1432 else
1433 ret = t2;
1436 if (value <= 4 && value >= -4)
1437 return ret;
1438 else
1439 return NULL;
1442 /* Return the SSA_NAME in T or T's operands.
1443 Return NULL if SSA_NAME cannot be found. */
1445 static tree
1446 get_base_value (tree t)
1448 if (TREE_CODE (t) == SSA_NAME)
1449 return t;
1451 if (!BINARY_CLASS_P (t))
1452 return NULL;
1454 switch (TREE_OPERAND_LENGTH (t))
1456 case 1:
1457 return strips_small_constant (TREE_OPERAND (t, 0), NULL);
1458 case 2:
1459 return strips_small_constant (TREE_OPERAND (t, 0),
1460 TREE_OPERAND (t, 1));
1461 default:
1462 return NULL;
1466 /* Check the compare STMT in LOOP. If it compares an induction
1467 variable to a loop invariant, return true, and save
1468 LOOP_INVARIANT, COMPARE_CODE and LOOP_STEP.
1469 Otherwise return false and set LOOP_INVAIANT to NULL. */
1471 static bool
1472 is_comparison_with_loop_invariant_p (gcond *stmt, class loop *loop,
1473 tree *loop_invariant,
1474 enum tree_code *compare_code,
1475 tree *loop_step,
1476 tree *loop_iv_base)
1478 tree op0, op1, bound, base;
1479 affine_iv iv0, iv1;
1480 enum tree_code code;
1481 tree step;
1483 code = gimple_cond_code (stmt);
1484 *loop_invariant = NULL;
1486 switch (code)
1488 case GT_EXPR:
1489 case GE_EXPR:
1490 case NE_EXPR:
1491 case LT_EXPR:
1492 case LE_EXPR:
1493 case EQ_EXPR:
1494 break;
1496 default:
1497 return false;
1500 op0 = gimple_cond_lhs (stmt);
1501 op1 = gimple_cond_rhs (stmt);
1503 if ((TREE_CODE (op0) != SSA_NAME && TREE_CODE (op0) != INTEGER_CST)
1504 || (TREE_CODE (op1) != SSA_NAME && TREE_CODE (op1) != INTEGER_CST))
1505 return false;
1506 if (!simple_iv (loop, loop_containing_stmt (stmt), op0, &iv0, true))
1507 return false;
1508 if (!simple_iv (loop, loop_containing_stmt (stmt), op1, &iv1, true))
1509 return false;
1510 if (TREE_CODE (iv0.step) != INTEGER_CST
1511 || TREE_CODE (iv1.step) != INTEGER_CST)
1512 return false;
1513 if ((integer_zerop (iv0.step) && integer_zerop (iv1.step))
1514 || (!integer_zerop (iv0.step) && !integer_zerop (iv1.step)))
1515 return false;
1517 if (integer_zerop (iv0.step))
1519 if (code != NE_EXPR && code != EQ_EXPR)
1520 code = invert_tree_comparison (code, false);
1521 bound = iv0.base;
1522 base = iv1.base;
1523 if (tree_fits_shwi_p (iv1.step))
1524 step = iv1.step;
1525 else
1526 return false;
1528 else
1530 bound = iv1.base;
1531 base = iv0.base;
1532 if (tree_fits_shwi_p (iv0.step))
1533 step = iv0.step;
1534 else
1535 return false;
1538 if (TREE_CODE (bound) != INTEGER_CST)
1539 bound = get_base_value (bound);
1540 if (!bound)
1541 return false;
1542 if (TREE_CODE (base) != INTEGER_CST)
1543 base = get_base_value (base);
1544 if (!base)
1545 return false;
1547 *loop_invariant = bound;
1548 *compare_code = code;
1549 *loop_step = step;
1550 *loop_iv_base = base;
1551 return true;
1554 /* Compare two SSA_NAMEs: returns TRUE if T1 and T2 are value coherent. */
1556 static bool
1557 expr_coherent_p (tree t1, tree t2)
1559 gimple *stmt;
1560 tree ssa_name_1 = NULL;
1561 tree ssa_name_2 = NULL;
1563 gcc_assert (TREE_CODE (t1) == SSA_NAME || TREE_CODE (t1) == INTEGER_CST);
1564 gcc_assert (TREE_CODE (t2) == SSA_NAME || TREE_CODE (t2) == INTEGER_CST);
1566 if (t1 == t2)
1567 return true;
1569 if (TREE_CODE (t1) == INTEGER_CST && TREE_CODE (t2) == INTEGER_CST)
1570 return true;
1571 if (TREE_CODE (t1) == INTEGER_CST || TREE_CODE (t2) == INTEGER_CST)
1572 return false;
1574 /* Check to see if t1 is expressed/defined with t2. */
1575 stmt = SSA_NAME_DEF_STMT (t1);
1576 gcc_assert (stmt != NULL);
1577 if (is_gimple_assign (stmt))
1579 ssa_name_1 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE);
1580 if (ssa_name_1 && ssa_name_1 == t2)
1581 return true;
1584 /* Check to see if t2 is expressed/defined with t1. */
1585 stmt = SSA_NAME_DEF_STMT (t2);
1586 gcc_assert (stmt != NULL);
1587 if (is_gimple_assign (stmt))
1589 ssa_name_2 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE);
1590 if (ssa_name_2 && ssa_name_2 == t1)
1591 return true;
1594 /* Compare if t1 and t2's def_stmts are identical. */
1595 if (ssa_name_2 != NULL && ssa_name_1 == ssa_name_2)
1596 return true;
1597 else
1598 return false;
1601 /* Return true if E is predicted by one of loop heuristics. */
1603 static bool
1604 predicted_by_loop_heuristics_p (basic_block bb)
1606 struct edge_prediction *i;
1607 edge_prediction **preds = bb_predictions->get (bb);
1609 if (!preds)
1610 return false;
1612 for (i = *preds; i; i = i->ep_next)
1613 if (i->ep_predictor == PRED_LOOP_ITERATIONS_GUESSED
1614 || i->ep_predictor == PRED_LOOP_ITERATIONS_MAX
1615 || i->ep_predictor == PRED_LOOP_ITERATIONS
1616 || i->ep_predictor == PRED_LOOP_EXIT
1617 || i->ep_predictor == PRED_LOOP_EXIT_WITH_RECURSION
1618 || i->ep_predictor == PRED_LOOP_EXTRA_EXIT)
1619 return true;
1620 return false;
1623 /* Predict branch probability of BB when BB contains a branch that compares
1624 an induction variable in LOOP with LOOP_IV_BASE_VAR to LOOP_BOUND_VAR. The
1625 loop exit is compared using LOOP_BOUND_CODE, with step of LOOP_BOUND_STEP.
1627 E.g.
1628 for (int i = 0; i < bound; i++) {
1629 if (i < bound - 2)
1630 computation_1();
1631 else
1632 computation_2();
1635 In this loop, we will predict the branch inside the loop to be taken. */
1637 static void
1638 predict_iv_comparison (class loop *loop, basic_block bb,
1639 tree loop_bound_var,
1640 tree loop_iv_base_var,
1641 enum tree_code loop_bound_code,
1642 int loop_bound_step)
1644 gimple *stmt;
1645 tree compare_var, compare_base;
1646 enum tree_code compare_code;
1647 tree compare_step_var;
1648 edge then_edge;
1649 edge_iterator ei;
1651 if (predicted_by_loop_heuristics_p (bb))
1652 return;
1654 stmt = last_stmt (bb);
1655 if (!stmt || gimple_code (stmt) != GIMPLE_COND)
1656 return;
1657 if (!is_comparison_with_loop_invariant_p (as_a <gcond *> (stmt),
1658 loop, &compare_var,
1659 &compare_code,
1660 &compare_step_var,
1661 &compare_base))
1662 return;
1664 /* Find the taken edge. */
1665 FOR_EACH_EDGE (then_edge, ei, bb->succs)
1666 if (then_edge->flags & EDGE_TRUE_VALUE)
1667 break;
1669 /* When comparing an IV to a loop invariant, NE is more likely to be
1670 taken while EQ is more likely to be not-taken. */
1671 if (compare_code == NE_EXPR)
1673 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1674 return;
1676 else if (compare_code == EQ_EXPR)
1678 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1679 return;
1682 if (!expr_coherent_p (loop_iv_base_var, compare_base))
1683 return;
1685 /* If loop bound, base and compare bound are all constants, we can
1686 calculate the probability directly. */
1687 if (tree_fits_shwi_p (loop_bound_var)
1688 && tree_fits_shwi_p (compare_var)
1689 && tree_fits_shwi_p (compare_base))
1691 int probability;
1692 wi::overflow_type overflow;
1693 bool overall_overflow = false;
1694 widest_int compare_count, tem;
1696 /* (loop_bound - base) / compare_step */
1697 tem = wi::sub (wi::to_widest (loop_bound_var),
1698 wi::to_widest (compare_base), SIGNED, &overflow);
1699 overall_overflow |= overflow;
1700 widest_int loop_count = wi::div_trunc (tem,
1701 wi::to_widest (compare_step_var),
1702 SIGNED, &overflow);
1703 overall_overflow |= overflow;
1705 if (!wi::neg_p (wi::to_widest (compare_step_var))
1706 ^ (compare_code == LT_EXPR || compare_code == LE_EXPR))
1708 /* (loop_bound - compare_bound) / compare_step */
1709 tem = wi::sub (wi::to_widest (loop_bound_var),
1710 wi::to_widest (compare_var), SIGNED, &overflow);
1711 overall_overflow |= overflow;
1712 compare_count = wi::div_trunc (tem, wi::to_widest (compare_step_var),
1713 SIGNED, &overflow);
1714 overall_overflow |= overflow;
1716 else
1718 /* (compare_bound - base) / compare_step */
1719 tem = wi::sub (wi::to_widest (compare_var),
1720 wi::to_widest (compare_base), SIGNED, &overflow);
1721 overall_overflow |= overflow;
1722 compare_count = wi::div_trunc (tem, wi::to_widest (compare_step_var),
1723 SIGNED, &overflow);
1724 overall_overflow |= overflow;
1726 if (compare_code == LE_EXPR || compare_code == GE_EXPR)
1727 ++compare_count;
1728 if (loop_bound_code == LE_EXPR || loop_bound_code == GE_EXPR)
1729 ++loop_count;
1730 if (wi::neg_p (compare_count))
1731 compare_count = 0;
1732 if (wi::neg_p (loop_count))
1733 loop_count = 0;
1734 if (loop_count == 0)
1735 probability = 0;
1736 else if (wi::cmps (compare_count, loop_count) == 1)
1737 probability = REG_BR_PROB_BASE;
1738 else
1740 tem = compare_count * REG_BR_PROB_BASE;
1741 tem = wi::udiv_trunc (tem, loop_count);
1742 probability = tem.to_uhwi ();
1745 /* FIXME: The branch prediction seems broken. It has only 20% hitrate. */
1746 if (!overall_overflow)
1747 predict_edge (then_edge, PRED_LOOP_IV_COMPARE, probability);
1749 return;
1752 if (expr_coherent_p (loop_bound_var, compare_var))
1754 if ((loop_bound_code == LT_EXPR || loop_bound_code == LE_EXPR)
1755 && (compare_code == LT_EXPR || compare_code == LE_EXPR))
1756 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1757 else if ((loop_bound_code == GT_EXPR || loop_bound_code == GE_EXPR)
1758 && (compare_code == GT_EXPR || compare_code == GE_EXPR))
1759 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1760 else if (loop_bound_code == NE_EXPR)
1762 /* If the loop backedge condition is "(i != bound)", we do
1763 the comparison based on the step of IV:
1764 * step < 0 : backedge condition is like (i > bound)
1765 * step > 0 : backedge condition is like (i < bound) */
1766 gcc_assert (loop_bound_step != 0);
1767 if (loop_bound_step > 0
1768 && (compare_code == LT_EXPR
1769 || compare_code == LE_EXPR))
1770 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1771 else if (loop_bound_step < 0
1772 && (compare_code == GT_EXPR
1773 || compare_code == GE_EXPR))
1774 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1775 else
1776 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1778 else
1779 /* The branch is predicted not-taken if loop_bound_code is
1780 opposite with compare_code. */
1781 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1783 else if (expr_coherent_p (loop_iv_base_var, compare_var))
1785 /* For cases like:
1786 for (i = s; i < h; i++)
1787 if (i > s + 2) ....
1788 The branch should be predicted taken. */
1789 if (loop_bound_step > 0
1790 && (compare_code == GT_EXPR || compare_code == GE_EXPR))
1791 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1792 else if (loop_bound_step < 0
1793 && (compare_code == LT_EXPR || compare_code == LE_EXPR))
1794 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1795 else
1796 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1800 /* Predict for extra loop exits that will lead to EXIT_EDGE. The extra loop
1801 exits are resulted from short-circuit conditions that will generate an
1802 if_tmp. E.g.:
1804 if (foo() || global > 10)
1805 break;
1807 This will be translated into:
1809 BB3:
1810 loop header...
1811 BB4:
1812 if foo() goto BB6 else goto BB5
1813 BB5:
1814 if global > 10 goto BB6 else goto BB7
1815 BB6:
1816 goto BB7
1817 BB7:
1818 iftmp = (PHI 0(BB5), 1(BB6))
1819 if iftmp == 1 goto BB8 else goto BB3
1820 BB8:
1821 outside of the loop...
1823 The edge BB7->BB8 is loop exit because BB8 is outside of the loop.
1824 From the dataflow, we can infer that BB4->BB6 and BB5->BB6 are also loop
1825 exits. This function takes BB7->BB8 as input, and finds out the extra loop
1826 exits to predict them using PRED_LOOP_EXTRA_EXIT. */
1828 static void
1829 predict_extra_loop_exits (edge exit_edge)
1831 unsigned i;
1832 bool check_value_one;
1833 gimple *lhs_def_stmt;
1834 gphi *phi_stmt;
1835 tree cmp_rhs, cmp_lhs;
1836 gimple *last;
1837 gcond *cmp_stmt;
1839 last = last_stmt (exit_edge->src);
1840 if (!last)
1841 return;
1842 cmp_stmt = dyn_cast <gcond *> (last);
1843 if (!cmp_stmt)
1844 return;
1846 cmp_rhs = gimple_cond_rhs (cmp_stmt);
1847 cmp_lhs = gimple_cond_lhs (cmp_stmt);
1848 if (!TREE_CONSTANT (cmp_rhs)
1849 || !(integer_zerop (cmp_rhs) || integer_onep (cmp_rhs)))
1850 return;
1851 if (TREE_CODE (cmp_lhs) != SSA_NAME)
1852 return;
1854 /* If check_value_one is true, only the phi_args with value '1' will lead
1855 to loop exit. Otherwise, only the phi_args with value '0' will lead to
1856 loop exit. */
1857 check_value_one = (((integer_onep (cmp_rhs))
1858 ^ (gimple_cond_code (cmp_stmt) == EQ_EXPR))
1859 ^ ((exit_edge->flags & EDGE_TRUE_VALUE) != 0));
1861 lhs_def_stmt = SSA_NAME_DEF_STMT (cmp_lhs);
1862 if (!lhs_def_stmt)
1863 return;
1865 phi_stmt = dyn_cast <gphi *> (lhs_def_stmt);
1866 if (!phi_stmt)
1867 return;
1869 for (i = 0; i < gimple_phi_num_args (phi_stmt); i++)
1871 edge e1;
1872 edge_iterator ei;
1873 tree val = gimple_phi_arg_def (phi_stmt, i);
1874 edge e = gimple_phi_arg_edge (phi_stmt, i);
1876 if (!TREE_CONSTANT (val) || !(integer_zerop (val) || integer_onep (val)))
1877 continue;
1878 if ((check_value_one ^ integer_onep (val)) == 1)
1879 continue;
1880 if (EDGE_COUNT (e->src->succs) != 1)
1882 predict_paths_leading_to_edge (e, PRED_LOOP_EXTRA_EXIT, NOT_TAKEN);
1883 continue;
1886 FOR_EACH_EDGE (e1, ei, e->src->preds)
1887 predict_paths_leading_to_edge (e1, PRED_LOOP_EXTRA_EXIT, NOT_TAKEN);
1892 /* Predict edge probabilities by exploiting loop structure. */
1894 static void
1895 predict_loops (void)
1897 class loop *loop;
1898 basic_block bb;
1899 hash_set <class loop *> with_recursion(10);
1901 FOR_EACH_BB_FN (bb, cfun)
1903 gimple_stmt_iterator gsi;
1904 tree decl;
1906 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1907 if (is_gimple_call (gsi_stmt (gsi))
1908 && (decl = gimple_call_fndecl (gsi_stmt (gsi))) != NULL
1909 && recursive_call_p (current_function_decl, decl))
1911 loop = bb->loop_father;
1912 while (loop && !with_recursion.add (loop))
1913 loop = loop_outer (loop);
1917 /* Try to predict out blocks in a loop that are not part of a
1918 natural loop. */
1919 FOR_EACH_LOOP (loop, LI_FROM_INNERMOST)
1921 basic_block bb, *bbs;
1922 unsigned j, n_exits = 0;
1923 vec<edge> exits;
1924 class tree_niter_desc niter_desc;
1925 edge ex;
1926 class nb_iter_bound *nb_iter;
1927 enum tree_code loop_bound_code = ERROR_MARK;
1928 tree loop_bound_step = NULL;
1929 tree loop_bound_var = NULL;
1930 tree loop_iv_base = NULL;
1931 gcond *stmt = NULL;
1932 bool recursion = with_recursion.contains (loop);
1934 exits = get_loop_exit_edges (loop);
1935 FOR_EACH_VEC_ELT (exits, j, ex)
1936 if (!unlikely_executed_edge_p (ex) && !(ex->flags & EDGE_ABNORMAL_CALL))
1937 n_exits ++;
1938 if (!n_exits)
1940 exits.release ();
1941 continue;
1944 if (dump_file && (dump_flags & TDF_DETAILS))
1945 fprintf (dump_file, "Predicting loop %i%s with %i exits.\n",
1946 loop->num, recursion ? " (with recursion)":"", n_exits);
1947 if (dump_file && (dump_flags & TDF_DETAILS)
1948 && max_loop_iterations_int (loop) >= 0)
1950 fprintf (dump_file,
1951 "Loop %d iterates at most %i times.\n", loop->num,
1952 (int)max_loop_iterations_int (loop));
1954 if (dump_file && (dump_flags & TDF_DETAILS)
1955 && likely_max_loop_iterations_int (loop) >= 0)
1957 fprintf (dump_file, "Loop %d likely iterates at most %i times.\n",
1958 loop->num, (int)likely_max_loop_iterations_int (loop));
1961 FOR_EACH_VEC_ELT (exits, j, ex)
1963 tree niter = NULL;
1964 HOST_WIDE_INT nitercst;
1965 int max = param_max_predicted_iterations;
1966 int probability;
1967 enum br_predictor predictor;
1968 widest_int nit;
1970 if (unlikely_executed_edge_p (ex)
1971 || (ex->flags & EDGE_ABNORMAL_CALL))
1972 continue;
1973 /* Loop heuristics do not expect exit conditional to be inside
1974 inner loop. We predict from innermost to outermost loop. */
1975 if (predicted_by_loop_heuristics_p (ex->src))
1977 if (dump_file && (dump_flags & TDF_DETAILS))
1978 fprintf (dump_file, "Skipping exit %i->%i because "
1979 "it is already predicted.\n",
1980 ex->src->index, ex->dest->index);
1981 continue;
1983 predict_extra_loop_exits (ex);
1985 if (number_of_iterations_exit (loop, ex, &niter_desc, false, false))
1986 niter = niter_desc.niter;
1987 if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST)
1988 niter = loop_niter_by_eval (loop, ex);
1989 if (dump_file && (dump_flags & TDF_DETAILS)
1990 && TREE_CODE (niter) == INTEGER_CST)
1992 fprintf (dump_file, "Exit %i->%i %d iterates ",
1993 ex->src->index, ex->dest->index,
1994 loop->num);
1995 print_generic_expr (dump_file, niter, TDF_SLIM);
1996 fprintf (dump_file, " times.\n");
1999 if (TREE_CODE (niter) == INTEGER_CST)
2001 if (tree_fits_uhwi_p (niter)
2002 && max
2003 && compare_tree_int (niter, max - 1) == -1)
2004 nitercst = tree_to_uhwi (niter) + 1;
2005 else
2006 nitercst = max;
2007 predictor = PRED_LOOP_ITERATIONS;
2009 /* If we have just one exit and we can derive some information about
2010 the number of iterations of the loop from the statements inside
2011 the loop, use it to predict this exit. */
2012 else if (n_exits == 1
2013 && estimated_stmt_executions (loop, &nit))
2015 if (wi::gtu_p (nit, max))
2016 nitercst = max;
2017 else
2018 nitercst = nit.to_shwi ();
2019 predictor = PRED_LOOP_ITERATIONS_GUESSED;
2021 /* If we have likely upper bound, trust it for very small iteration
2022 counts. Such loops would otherwise get mispredicted by standard
2023 LOOP_EXIT heuristics. */
2024 else if (n_exits == 1
2025 && likely_max_stmt_executions (loop, &nit)
2026 && wi::ltu_p (nit,
2027 RDIV (REG_BR_PROB_BASE,
2028 REG_BR_PROB_BASE
2029 - predictor_info
2030 [recursion
2031 ? PRED_LOOP_EXIT_WITH_RECURSION
2032 : PRED_LOOP_EXIT].hitrate)))
2034 nitercst = nit.to_shwi ();
2035 predictor = PRED_LOOP_ITERATIONS_MAX;
2037 else
2039 if (dump_file && (dump_flags & TDF_DETAILS))
2040 fprintf (dump_file, "Nothing known about exit %i->%i.\n",
2041 ex->src->index, ex->dest->index);
2042 continue;
2045 if (dump_file && (dump_flags & TDF_DETAILS))
2046 fprintf (dump_file, "Recording prediction to %i iterations by %s.\n",
2047 (int)nitercst, predictor_info[predictor].name);
2048 /* If the prediction for number of iterations is zero, do not
2049 predict the exit edges. */
2050 if (nitercst == 0)
2051 continue;
2053 probability = RDIV (REG_BR_PROB_BASE, nitercst);
2054 predict_edge (ex, predictor, probability);
2056 exits.release ();
2058 /* Find information about loop bound variables. */
2059 for (nb_iter = loop->bounds; nb_iter;
2060 nb_iter = nb_iter->next)
2061 if (nb_iter->stmt
2062 && gimple_code (nb_iter->stmt) == GIMPLE_COND)
2064 stmt = as_a <gcond *> (nb_iter->stmt);
2065 break;
2067 if (!stmt && last_stmt (loop->header)
2068 && gimple_code (last_stmt (loop->header)) == GIMPLE_COND)
2069 stmt = as_a <gcond *> (last_stmt (loop->header));
2070 if (stmt)
2071 is_comparison_with_loop_invariant_p (stmt, loop,
2072 &loop_bound_var,
2073 &loop_bound_code,
2074 &loop_bound_step,
2075 &loop_iv_base);
2077 bbs = get_loop_body (loop);
2079 for (j = 0; j < loop->num_nodes; j++)
2081 edge e;
2082 edge_iterator ei;
2084 bb = bbs[j];
2086 /* Bypass loop heuristics on continue statement. These
2087 statements construct loops via "non-loop" constructs
2088 in the source language and are better to be handled
2089 separately. */
2090 if (predicted_by_p (bb, PRED_CONTINUE))
2092 if (dump_file && (dump_flags & TDF_DETAILS))
2093 fprintf (dump_file, "BB %i predicted by continue.\n",
2094 bb->index);
2095 continue;
2098 /* If we already used more reliable loop exit predictors, do not
2099 bother with PRED_LOOP_EXIT. */
2100 if (!predicted_by_loop_heuristics_p (bb))
2102 /* For loop with many exits we don't want to predict all exits
2103 with the pretty large probability, because if all exits are
2104 considered in row, the loop would be predicted to iterate
2105 almost never. The code to divide probability by number of
2106 exits is very rough. It should compute the number of exits
2107 taken in each patch through function (not the overall number
2108 of exits that might be a lot higher for loops with wide switch
2109 statements in them) and compute n-th square root.
2111 We limit the minimal probability by 2% to avoid
2112 EDGE_PROBABILITY_RELIABLE from trusting the branch prediction
2113 as this was causing regression in perl benchmark containing such
2114 a wide loop. */
2116 int probability = ((REG_BR_PROB_BASE
2117 - predictor_info
2118 [recursion
2119 ? PRED_LOOP_EXIT_WITH_RECURSION
2120 : PRED_LOOP_EXIT].hitrate)
2121 / n_exits);
2122 if (probability < HITRATE (2))
2123 probability = HITRATE (2);
2124 FOR_EACH_EDGE (e, ei, bb->succs)
2125 if (e->dest->index < NUM_FIXED_BLOCKS
2126 || !flow_bb_inside_loop_p (loop, e->dest))
2128 if (dump_file && (dump_flags & TDF_DETAILS))
2129 fprintf (dump_file,
2130 "Predicting exit %i->%i with prob %i.\n",
2131 e->src->index, e->dest->index, probability);
2132 predict_edge (e,
2133 recursion ? PRED_LOOP_EXIT_WITH_RECURSION
2134 : PRED_LOOP_EXIT, probability);
2137 if (loop_bound_var)
2138 predict_iv_comparison (loop, bb, loop_bound_var, loop_iv_base,
2139 loop_bound_code,
2140 tree_to_shwi (loop_bound_step));
2143 /* In the following code
2144 for (loop1)
2145 if (cond)
2146 for (loop2)
2147 body;
2148 guess that cond is unlikely. */
2149 if (loop_outer (loop)->num)
2151 basic_block bb = NULL;
2152 edge preheader_edge = loop_preheader_edge (loop);
2154 if (single_pred_p (preheader_edge->src)
2155 && single_succ_p (preheader_edge->src))
2156 preheader_edge = single_pred_edge (preheader_edge->src);
2158 gimple *stmt = last_stmt (preheader_edge->src);
2159 /* Pattern match fortran loop preheader:
2160 _16 = BUILTIN_EXPECT (_15, 1, PRED_FORTRAN_LOOP_PREHEADER);
2161 _17 = (logical(kind=4)) _16;
2162 if (_17 != 0)
2163 goto <bb 11>;
2164 else
2165 goto <bb 13>;
2167 Loop guard branch prediction says nothing about duplicated loop
2168 headers produced by fortran frontend and in this case we want
2169 to predict paths leading to this preheader. */
2171 if (stmt
2172 && gimple_code (stmt) == GIMPLE_COND
2173 && gimple_cond_code (stmt) == NE_EXPR
2174 && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME
2175 && integer_zerop (gimple_cond_rhs (stmt)))
2177 gimple *call_stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (stmt));
2178 if (gimple_code (call_stmt) == GIMPLE_ASSIGN
2179 && gimple_expr_code (call_stmt) == NOP_EXPR
2180 && TREE_CODE (gimple_assign_rhs1 (call_stmt)) == SSA_NAME)
2181 call_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (call_stmt));
2182 if (gimple_call_internal_p (call_stmt, IFN_BUILTIN_EXPECT)
2183 && TREE_CODE (gimple_call_arg (call_stmt, 2)) == INTEGER_CST
2184 && tree_fits_uhwi_p (gimple_call_arg (call_stmt, 2))
2185 && tree_to_uhwi (gimple_call_arg (call_stmt, 2))
2186 == PRED_FORTRAN_LOOP_PREHEADER)
2187 bb = preheader_edge->src;
2189 if (!bb)
2191 if (!dominated_by_p (CDI_DOMINATORS,
2192 loop_outer (loop)->latch, loop->header))
2193 predict_paths_leading_to_edge (loop_preheader_edge (loop),
2194 recursion
2195 ? PRED_LOOP_GUARD_WITH_RECURSION
2196 : PRED_LOOP_GUARD,
2197 NOT_TAKEN,
2198 loop_outer (loop));
2200 else
2202 if (!dominated_by_p (CDI_DOMINATORS,
2203 loop_outer (loop)->latch, bb))
2204 predict_paths_leading_to (bb,
2205 recursion
2206 ? PRED_LOOP_GUARD_WITH_RECURSION
2207 : PRED_LOOP_GUARD,
2208 NOT_TAKEN,
2209 loop_outer (loop));
2213 /* Free basic blocks from get_loop_body. */
2214 free (bbs);
2218 /* Attempt to predict probabilities of BB outgoing edges using local
2219 properties. */
2220 static void
2221 bb_estimate_probability_locally (basic_block bb)
2223 rtx_insn *last_insn = BB_END (bb);
2224 rtx cond;
2226 if (! can_predict_insn_p (last_insn))
2227 return;
2228 cond = get_condition (last_insn, NULL, false, false);
2229 if (! cond)
2230 return;
2232 /* Try "pointer heuristic."
2233 A comparison ptr == 0 is predicted as false.
2234 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
2235 if (COMPARISON_P (cond)
2236 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
2237 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
2239 if (GET_CODE (cond) == EQ)
2240 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
2241 else if (GET_CODE (cond) == NE)
2242 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
2244 else
2246 /* Try "opcode heuristic."
2247 EQ tests are usually false and NE tests are usually true. Also,
2248 most quantities are positive, so we can make the appropriate guesses
2249 about signed comparisons against zero. */
2250 switch (GET_CODE (cond))
2252 case CONST_INT:
2253 /* Unconditional branch. */
2254 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
2255 cond == const0_rtx ? NOT_TAKEN : TAKEN);
2256 break;
2258 case EQ:
2259 case UNEQ:
2260 /* Floating point comparisons appears to behave in a very
2261 unpredictable way because of special role of = tests in
2262 FP code. */
2263 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
2265 /* Comparisons with 0 are often used for booleans and there is
2266 nothing useful to predict about them. */
2267 else if (XEXP (cond, 1) == const0_rtx
2268 || XEXP (cond, 0) == const0_rtx)
2270 else
2271 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
2272 break;
2274 case NE:
2275 case LTGT:
2276 /* Floating point comparisons appears to behave in a very
2277 unpredictable way because of special role of = tests in
2278 FP code. */
2279 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
2281 /* Comparisons with 0 are often used for booleans and there is
2282 nothing useful to predict about them. */
2283 else if (XEXP (cond, 1) == const0_rtx
2284 || XEXP (cond, 0) == const0_rtx)
2286 else
2287 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
2288 break;
2290 case ORDERED:
2291 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
2292 break;
2294 case UNORDERED:
2295 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
2296 break;
2298 case LE:
2299 case LT:
2300 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
2301 || XEXP (cond, 1) == constm1_rtx)
2302 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
2303 break;
2305 case GE:
2306 case GT:
2307 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
2308 || XEXP (cond, 1) == constm1_rtx)
2309 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
2310 break;
2312 default:
2313 break;
2317 /* Set edge->probability for each successor edge of BB. */
2318 void
2319 guess_outgoing_edge_probabilities (basic_block bb)
2321 bb_estimate_probability_locally (bb);
2322 combine_predictions_for_insn (BB_END (bb), bb);
2325 static tree expr_expected_value (tree, bitmap, enum br_predictor *predictor,
2326 HOST_WIDE_INT *probability);
2328 /* Helper function for expr_expected_value. */
2330 static tree
2331 expr_expected_value_1 (tree type, tree op0, enum tree_code code,
2332 tree op1, bitmap visited, enum br_predictor *predictor,
2333 HOST_WIDE_INT *probability)
2335 gimple *def;
2337 /* Reset returned probability value. */
2338 *probability = -1;
2339 *predictor = PRED_UNCONDITIONAL;
2341 if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
2343 if (TREE_CONSTANT (op0))
2344 return op0;
2346 if (code == IMAGPART_EXPR)
2348 if (TREE_CODE (TREE_OPERAND (op0, 0)) == SSA_NAME)
2350 def = SSA_NAME_DEF_STMT (TREE_OPERAND (op0, 0));
2351 if (is_gimple_call (def)
2352 && gimple_call_internal_p (def)
2353 && (gimple_call_internal_fn (def)
2354 == IFN_ATOMIC_COMPARE_EXCHANGE))
2356 /* Assume that any given atomic operation has low contention,
2357 and thus the compare-and-swap operation succeeds. */
2358 *predictor = PRED_COMPARE_AND_SWAP;
2359 return build_one_cst (TREE_TYPE (op0));
2364 if (code != SSA_NAME)
2365 return NULL_TREE;
2367 def = SSA_NAME_DEF_STMT (op0);
2369 /* If we were already here, break the infinite cycle. */
2370 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (op0)))
2371 return NULL;
2373 if (gimple_code (def) == GIMPLE_PHI)
2375 /* All the arguments of the PHI node must have the same constant
2376 length. */
2377 int i, n = gimple_phi_num_args (def);
2378 tree val = NULL, new_val;
2380 for (i = 0; i < n; i++)
2382 tree arg = PHI_ARG_DEF (def, i);
2383 enum br_predictor predictor2;
2385 /* If this PHI has itself as an argument, we cannot
2386 determine the string length of this argument. However,
2387 if we can find an expected constant value for the other
2388 PHI args then we can still be sure that this is
2389 likely a constant. So be optimistic and just
2390 continue with the next argument. */
2391 if (arg == PHI_RESULT (def))
2392 continue;
2394 HOST_WIDE_INT probability2;
2395 new_val = expr_expected_value (arg, visited, &predictor2,
2396 &probability2);
2398 /* It is difficult to combine value predictors. Simply assume
2399 that later predictor is weaker and take its prediction. */
2400 if (*predictor < predictor2)
2402 *predictor = predictor2;
2403 *probability = probability2;
2405 if (!new_val)
2406 return NULL;
2407 if (!val)
2408 val = new_val;
2409 else if (!operand_equal_p (val, new_val, false))
2410 return NULL;
2412 return val;
2414 if (is_gimple_assign (def))
2416 if (gimple_assign_lhs (def) != op0)
2417 return NULL;
2419 return expr_expected_value_1 (TREE_TYPE (gimple_assign_lhs (def)),
2420 gimple_assign_rhs1 (def),
2421 gimple_assign_rhs_code (def),
2422 gimple_assign_rhs2 (def),
2423 visited, predictor, probability);
2426 if (is_gimple_call (def))
2428 tree decl = gimple_call_fndecl (def);
2429 if (!decl)
2431 if (gimple_call_internal_p (def)
2432 && gimple_call_internal_fn (def) == IFN_BUILTIN_EXPECT)
2434 gcc_assert (gimple_call_num_args (def) == 3);
2435 tree val = gimple_call_arg (def, 0);
2436 if (TREE_CONSTANT (val))
2437 return val;
2438 tree val2 = gimple_call_arg (def, 2);
2439 gcc_assert (TREE_CODE (val2) == INTEGER_CST
2440 && tree_fits_uhwi_p (val2)
2441 && tree_to_uhwi (val2) < END_PREDICTORS);
2442 *predictor = (enum br_predictor) tree_to_uhwi (val2);
2443 if (*predictor == PRED_BUILTIN_EXPECT)
2444 *probability
2445 = HITRATE (param_builtin_expect_probability);
2446 return gimple_call_arg (def, 1);
2448 return NULL;
2451 if (DECL_IS_MALLOC (decl) || DECL_IS_OPERATOR_NEW_P (decl))
2453 if (predictor)
2454 *predictor = PRED_MALLOC_NONNULL;
2455 return boolean_true_node;
2458 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
2459 switch (DECL_FUNCTION_CODE (decl))
2461 case BUILT_IN_EXPECT:
2463 tree val;
2464 if (gimple_call_num_args (def) != 2)
2465 return NULL;
2466 val = gimple_call_arg (def, 0);
2467 if (TREE_CONSTANT (val))
2468 return val;
2469 *predictor = PRED_BUILTIN_EXPECT;
2470 *probability
2471 = HITRATE (param_builtin_expect_probability);
2472 return gimple_call_arg (def, 1);
2474 case BUILT_IN_EXPECT_WITH_PROBABILITY:
2476 tree val;
2477 if (gimple_call_num_args (def) != 3)
2478 return NULL;
2479 val = gimple_call_arg (def, 0);
2480 if (TREE_CONSTANT (val))
2481 return val;
2482 /* Compute final probability as:
2483 probability * REG_BR_PROB_BASE. */
2484 tree prob = gimple_call_arg (def, 2);
2485 tree t = TREE_TYPE (prob);
2486 tree base = build_int_cst (integer_type_node,
2487 REG_BR_PROB_BASE);
2488 base = build_real_from_int_cst (t, base);
2489 tree r = fold_build2_initializer_loc (UNKNOWN_LOCATION,
2490 MULT_EXPR, t, prob, base);
2491 if (TREE_CODE (r) != REAL_CST)
2493 error_at (gimple_location (def),
2494 "probability %qE must be "
2495 "constant floating-point expression", prob);
2496 return NULL;
2498 HOST_WIDE_INT probi
2499 = real_to_integer (TREE_REAL_CST_PTR (r));
2500 if (probi >= 0 && probi <= REG_BR_PROB_BASE)
2502 *predictor = PRED_BUILTIN_EXPECT_WITH_PROBABILITY;
2503 *probability = probi;
2505 else
2506 error_at (gimple_location (def),
2507 "probability %qE is outside "
2508 "the range [0.0, 1.0]", prob);
2510 return gimple_call_arg (def, 1);
2513 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N:
2514 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
2515 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
2516 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
2517 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
2518 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
2519 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE:
2520 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N:
2521 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
2522 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
2523 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
2524 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
2525 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
2526 /* Assume that any given atomic operation has low contention,
2527 and thus the compare-and-swap operation succeeds. */
2528 *predictor = PRED_COMPARE_AND_SWAP;
2529 return boolean_true_node;
2530 case BUILT_IN_REALLOC:
2531 if (predictor)
2532 *predictor = PRED_MALLOC_NONNULL;
2533 return boolean_true_node;
2534 default:
2535 break;
2539 return NULL;
2542 if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
2544 tree res;
2545 enum br_predictor predictor2;
2546 HOST_WIDE_INT probability2;
2547 op0 = expr_expected_value (op0, visited, predictor, probability);
2548 if (!op0)
2549 return NULL;
2550 op1 = expr_expected_value (op1, visited, &predictor2, &probability2);
2551 if (!op1)
2552 return NULL;
2553 res = fold_build2 (code, type, op0, op1);
2554 if (TREE_CODE (res) == INTEGER_CST
2555 && TREE_CODE (op0) == INTEGER_CST
2556 && TREE_CODE (op1) == INTEGER_CST)
2558 /* Combine binary predictions. */
2559 if (*probability != -1 || probability2 != -1)
2561 HOST_WIDE_INT p1 = get_predictor_value (*predictor, *probability);
2562 HOST_WIDE_INT p2 = get_predictor_value (predictor2, probability2);
2563 *probability = RDIV (p1 * p2, REG_BR_PROB_BASE);
2566 if (*predictor < predictor2)
2567 *predictor = predictor2;
2569 return res;
2571 return NULL;
2573 if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
2575 tree res;
2576 op0 = expr_expected_value (op0, visited, predictor, probability);
2577 if (!op0)
2578 return NULL;
2579 res = fold_build1 (code, type, op0);
2580 if (TREE_CONSTANT (res))
2581 return res;
2582 return NULL;
2584 return NULL;
2587 /* Return constant EXPR will likely have at execution time, NULL if unknown.
2588 The function is used by builtin_expect branch predictor so the evidence
2589 must come from this construct and additional possible constant folding.
2591 We may want to implement more involved value guess (such as value range
2592 propagation based prediction), but such tricks shall go to new
2593 implementation. */
2595 static tree
2596 expr_expected_value (tree expr, bitmap visited,
2597 enum br_predictor *predictor,
2598 HOST_WIDE_INT *probability)
2600 enum tree_code code;
2601 tree op0, op1;
2603 if (TREE_CONSTANT (expr))
2605 *predictor = PRED_UNCONDITIONAL;
2606 *probability = -1;
2607 return expr;
2610 extract_ops_from_tree (expr, &code, &op0, &op1);
2611 return expr_expected_value_1 (TREE_TYPE (expr),
2612 op0, code, op1, visited, predictor,
2613 probability);
2617 /* Return probability of a PREDICTOR. If the predictor has variable
2618 probability return passed PROBABILITY. */
2620 static HOST_WIDE_INT
2621 get_predictor_value (br_predictor predictor, HOST_WIDE_INT probability)
2623 switch (predictor)
2625 case PRED_BUILTIN_EXPECT:
2626 case PRED_BUILTIN_EXPECT_WITH_PROBABILITY:
2627 gcc_assert (probability != -1);
2628 return probability;
2629 default:
2630 gcc_assert (probability == -1);
2631 return predictor_info[(int) predictor].hitrate;
2635 /* Predict using opcode of the last statement in basic block. */
2636 static void
2637 tree_predict_by_opcode (basic_block bb)
2639 gimple *stmt = last_stmt (bb);
2640 edge then_edge;
2641 tree op0, op1;
2642 tree type;
2643 tree val;
2644 enum tree_code cmp;
2645 edge_iterator ei;
2646 enum br_predictor predictor;
2647 HOST_WIDE_INT probability;
2649 if (!stmt)
2650 return;
2652 if (gswitch *sw = dyn_cast <gswitch *> (stmt))
2654 tree index = gimple_switch_index (sw);
2655 tree val = expr_expected_value (index, auto_bitmap (),
2656 &predictor, &probability);
2657 if (val && TREE_CODE (val) == INTEGER_CST)
2659 edge e = find_taken_edge_switch_expr (sw, val);
2660 if (predictor == PRED_BUILTIN_EXPECT)
2662 int percent = param_builtin_expect_probability;
2663 gcc_assert (percent >= 0 && percent <= 100);
2664 predict_edge (e, PRED_BUILTIN_EXPECT,
2665 HITRATE (percent));
2667 else
2668 predict_edge_def (e, predictor, TAKEN);
2672 if (gimple_code (stmt) != GIMPLE_COND)
2673 return;
2674 FOR_EACH_EDGE (then_edge, ei, bb->succs)
2675 if (then_edge->flags & EDGE_TRUE_VALUE)
2676 break;
2677 op0 = gimple_cond_lhs (stmt);
2678 op1 = gimple_cond_rhs (stmt);
2679 cmp = gimple_cond_code (stmt);
2680 type = TREE_TYPE (op0);
2681 val = expr_expected_value_1 (boolean_type_node, op0, cmp, op1, auto_bitmap (),
2682 &predictor, &probability);
2683 if (val && TREE_CODE (val) == INTEGER_CST)
2685 HOST_WIDE_INT prob = get_predictor_value (predictor, probability);
2686 if (integer_zerop (val))
2687 prob = REG_BR_PROB_BASE - prob;
2688 predict_edge (then_edge, predictor, prob);
2690 /* Try "pointer heuristic."
2691 A comparison ptr == 0 is predicted as false.
2692 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
2693 if (POINTER_TYPE_P (type))
2695 if (cmp == EQ_EXPR)
2696 predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN);
2697 else if (cmp == NE_EXPR)
2698 predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN);
2700 else
2702 /* Try "opcode heuristic."
2703 EQ tests are usually false and NE tests are usually true. Also,
2704 most quantities are positive, so we can make the appropriate guesses
2705 about signed comparisons against zero. */
2706 switch (cmp)
2708 case EQ_EXPR:
2709 case UNEQ_EXPR:
2710 /* Floating point comparisons appears to behave in a very
2711 unpredictable way because of special role of = tests in
2712 FP code. */
2713 if (FLOAT_TYPE_P (type))
2715 /* Comparisons with 0 are often used for booleans and there is
2716 nothing useful to predict about them. */
2717 else if (integer_zerop (op0) || integer_zerop (op1))
2719 else
2720 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN);
2721 break;
2723 case NE_EXPR:
2724 case LTGT_EXPR:
2725 /* Floating point comparisons appears to behave in a very
2726 unpredictable way because of special role of = tests in
2727 FP code. */
2728 if (FLOAT_TYPE_P (type))
2730 /* Comparisons with 0 are often used for booleans and there is
2731 nothing useful to predict about them. */
2732 else if (integer_zerop (op0)
2733 || integer_zerop (op1))
2735 else
2736 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN);
2737 break;
2739 case ORDERED_EXPR:
2740 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN);
2741 break;
2743 case UNORDERED_EXPR:
2744 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN);
2745 break;
2747 case LE_EXPR:
2748 case LT_EXPR:
2749 if (integer_zerop (op1)
2750 || integer_onep (op1)
2751 || integer_all_onesp (op1)
2752 || real_zerop (op1)
2753 || real_onep (op1)
2754 || real_minus_onep (op1))
2755 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN);
2756 break;
2758 case GE_EXPR:
2759 case GT_EXPR:
2760 if (integer_zerop (op1)
2761 || integer_onep (op1)
2762 || integer_all_onesp (op1)
2763 || real_zerop (op1)
2764 || real_onep (op1)
2765 || real_minus_onep (op1))
2766 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN);
2767 break;
2769 default:
2770 break;
2774 /* Returns TRUE if the STMT is exit(0) like statement. */
2776 static bool
2777 is_exit_with_zero_arg (const gimple *stmt)
2779 /* This is not exit, _exit or _Exit. */
2780 if (!gimple_call_builtin_p (stmt, BUILT_IN_EXIT)
2781 && !gimple_call_builtin_p (stmt, BUILT_IN__EXIT)
2782 && !gimple_call_builtin_p (stmt, BUILT_IN__EXIT2))
2783 return false;
2785 /* Argument is an interger zero. */
2786 return integer_zerop (gimple_call_arg (stmt, 0));
2789 /* Try to guess whether the value of return means error code. */
2791 static enum br_predictor
2792 return_prediction (tree val, enum prediction *prediction)
2794 /* VOID. */
2795 if (!val)
2796 return PRED_NO_PREDICTION;
2797 /* Different heuristics for pointers and scalars. */
2798 if (POINTER_TYPE_P (TREE_TYPE (val)))
2800 /* NULL is usually not returned. */
2801 if (integer_zerop (val))
2803 *prediction = NOT_TAKEN;
2804 return PRED_NULL_RETURN;
2807 else if (INTEGRAL_TYPE_P (TREE_TYPE (val)))
2809 /* Negative return values are often used to indicate
2810 errors. */
2811 if (TREE_CODE (val) == INTEGER_CST
2812 && tree_int_cst_sgn (val) < 0)
2814 *prediction = NOT_TAKEN;
2815 return PRED_NEGATIVE_RETURN;
2817 /* Constant return values seems to be commonly taken.
2818 Zero/one often represent booleans so exclude them from the
2819 heuristics. */
2820 if (TREE_CONSTANT (val)
2821 && (!integer_zerop (val) && !integer_onep (val)))
2823 *prediction = NOT_TAKEN;
2824 return PRED_CONST_RETURN;
2827 return PRED_NO_PREDICTION;
2830 /* Return zero if phi result could have values other than -1, 0 or 1,
2831 otherwise return a bitmask, with bits 0, 1 and 2 set if -1, 0 and 1
2832 values are used or likely. */
2834 static int
2835 zero_one_minusone (gphi *phi, int limit)
2837 int phi_num_args = gimple_phi_num_args (phi);
2838 int ret = 0;
2839 for (int i = 0; i < phi_num_args; i++)
2841 tree t = PHI_ARG_DEF (phi, i);
2842 if (TREE_CODE (t) != INTEGER_CST)
2843 continue;
2844 wide_int w = wi::to_wide (t);
2845 if (w == -1)
2846 ret |= 1;
2847 else if (w == 0)
2848 ret |= 2;
2849 else if (w == 1)
2850 ret |= 4;
2851 else
2852 return 0;
2854 for (int i = 0; i < phi_num_args; i++)
2856 tree t = PHI_ARG_DEF (phi, i);
2857 if (TREE_CODE (t) == INTEGER_CST)
2858 continue;
2859 if (TREE_CODE (t) != SSA_NAME)
2860 return 0;
2861 gimple *g = SSA_NAME_DEF_STMT (t);
2862 if (gimple_code (g) == GIMPLE_PHI && limit > 0)
2863 if (int r = zero_one_minusone (as_a <gphi *> (g), limit - 1))
2865 ret |= r;
2866 continue;
2868 if (!is_gimple_assign (g))
2869 return 0;
2870 if (gimple_assign_cast_p (g))
2872 tree rhs1 = gimple_assign_rhs1 (g);
2873 if (TREE_CODE (rhs1) != SSA_NAME
2874 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2875 || TYPE_PRECISION (TREE_TYPE (rhs1)) != 1
2876 || !TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2877 return 0;
2878 ret |= (2 | 4);
2879 continue;
2881 if (TREE_CODE_CLASS (gimple_assign_rhs_code (g)) != tcc_comparison)
2882 return 0;
2883 ret |= (2 | 4);
2885 return ret;
2888 /* Find the basic block with return expression and look up for possible
2889 return value trying to apply RETURN_PREDICTION heuristics. */
2890 static void
2891 apply_return_prediction (void)
2893 greturn *return_stmt = NULL;
2894 tree return_val;
2895 edge e;
2896 gphi *phi;
2897 int phi_num_args, i;
2898 enum br_predictor pred;
2899 enum prediction direction;
2900 edge_iterator ei;
2902 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
2904 gimple *last = last_stmt (e->src);
2905 if (last
2906 && gimple_code (last) == GIMPLE_RETURN)
2908 return_stmt = as_a <greturn *> (last);
2909 break;
2912 if (!e)
2913 return;
2914 return_val = gimple_return_retval (return_stmt);
2915 if (!return_val)
2916 return;
2917 if (TREE_CODE (return_val) != SSA_NAME
2918 || !SSA_NAME_DEF_STMT (return_val)
2919 || gimple_code (SSA_NAME_DEF_STMT (return_val)) != GIMPLE_PHI)
2920 return;
2921 phi = as_a <gphi *> (SSA_NAME_DEF_STMT (return_val));
2922 phi_num_args = gimple_phi_num_args (phi);
2923 pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction);
2925 /* Avoid the case where the function returns -1, 0 and 1 values and
2926 nothing else. Those could be qsort etc. comparison functions
2927 where the negative return isn't less probable than positive.
2928 For this require that the function returns at least -1 or 1
2929 or -1 and a boolean value or comparison result, so that functions
2930 returning just -1 and 0 are treated as if -1 represents error value. */
2931 if (INTEGRAL_TYPE_P (TREE_TYPE (return_val))
2932 && !TYPE_UNSIGNED (TREE_TYPE (return_val))
2933 && TYPE_PRECISION (TREE_TYPE (return_val)) > 1)
2934 if (int r = zero_one_minusone (phi, 3))
2935 if ((r & (1 | 4)) == (1 | 4))
2936 return;
2938 /* Avoid the degenerate case where all return values form the function
2939 belongs to same category (ie they are all positive constants)
2940 so we can hardly say something about them. */
2941 for (i = 1; i < phi_num_args; i++)
2942 if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction))
2943 break;
2944 if (i != phi_num_args)
2945 for (i = 0; i < phi_num_args; i++)
2947 pred = return_prediction (PHI_ARG_DEF (phi, i), &direction);
2948 if (pred != PRED_NO_PREDICTION)
2949 predict_paths_leading_to_edge (gimple_phi_arg_edge (phi, i), pred,
2950 direction);
2954 /* Look for basic block that contains unlikely to happen events
2955 (such as noreturn calls) and mark all paths leading to execution
2956 of this basic blocks as unlikely. */
2958 static void
2959 tree_bb_level_predictions (void)
2961 basic_block bb;
2962 bool has_return_edges = false;
2963 edge e;
2964 edge_iterator ei;
2966 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
2967 if (!unlikely_executed_edge_p (e) && !(e->flags & EDGE_ABNORMAL_CALL))
2969 has_return_edges = true;
2970 break;
2973 apply_return_prediction ();
2975 FOR_EACH_BB_FN (bb, cfun)
2977 gimple_stmt_iterator gsi;
2979 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2981 gimple *stmt = gsi_stmt (gsi);
2982 tree decl;
2984 if (is_gimple_call (stmt))
2986 if (gimple_call_noreturn_p (stmt)
2987 && has_return_edges
2988 && !is_exit_with_zero_arg (stmt))
2989 predict_paths_leading_to (bb, PRED_NORETURN,
2990 NOT_TAKEN);
2991 decl = gimple_call_fndecl (stmt);
2992 if (decl
2993 && lookup_attribute ("cold",
2994 DECL_ATTRIBUTES (decl)))
2995 predict_paths_leading_to (bb, PRED_COLD_FUNCTION,
2996 NOT_TAKEN);
2997 if (decl && recursive_call_p (current_function_decl, decl))
2998 predict_paths_leading_to (bb, PRED_RECURSIVE_CALL,
2999 NOT_TAKEN);
3001 else if (gimple_code (stmt) == GIMPLE_PREDICT)
3003 predict_paths_leading_to (bb, gimple_predict_predictor (stmt),
3004 gimple_predict_outcome (stmt));
3005 /* Keep GIMPLE_PREDICT around so early inlining will propagate
3006 hints to callers. */
3012 /* Callback for hash_map::traverse, asserts that the pointer map is
3013 empty. */
3015 bool
3016 assert_is_empty (const_basic_block const &, edge_prediction *const &value,
3017 void *)
3019 gcc_assert (!value);
3020 return false;
3023 /* Predict branch probabilities and estimate profile for basic block BB.
3024 When LOCAL_ONLY is set do not use any global properties of CFG. */
3026 static void
3027 tree_estimate_probability_bb (basic_block bb, bool local_only)
3029 edge e;
3030 edge_iterator ei;
3032 FOR_EACH_EDGE (e, ei, bb->succs)
3034 /* Look for block we are guarding (ie we dominate it,
3035 but it doesn't postdominate us). */
3036 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && e->dest != bb
3037 && !local_only
3038 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
3039 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
3041 gimple_stmt_iterator bi;
3043 /* The call heuristic claims that a guarded function call
3044 is improbable. This is because such calls are often used
3045 to signal exceptional situations such as printing error
3046 messages. */
3047 for (bi = gsi_start_bb (e->dest); !gsi_end_p (bi);
3048 gsi_next (&bi))
3050 gimple *stmt = gsi_stmt (bi);
3051 if (is_gimple_call (stmt)
3052 && !gimple_inexpensive_call_p (as_a <gcall *> (stmt))
3053 /* Constant and pure calls are hardly used to signalize
3054 something exceptional. */
3055 && gimple_has_side_effects (stmt))
3057 if (gimple_call_fndecl (stmt))
3058 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
3059 else if (virtual_method_call_p (gimple_call_fn (stmt)))
3060 predict_edge_def (e, PRED_POLYMORPHIC_CALL, NOT_TAKEN);
3061 else
3062 predict_edge_def (e, PRED_INDIR_CALL, TAKEN);
3063 break;
3068 tree_predict_by_opcode (bb);
3071 /* Predict branch probabilities and estimate profile of the tree CFG.
3072 This function can be called from the loop optimizers to recompute
3073 the profile information.
3074 If DRY_RUN is set, do not modify CFG and only produce dump files. */
3076 void
3077 tree_estimate_probability (bool dry_run)
3079 basic_block bb;
3081 add_noreturn_fake_exit_edges ();
3082 connect_infinite_loops_to_exit ();
3083 /* We use loop_niter_by_eval, which requires that the loops have
3084 preheaders. */
3085 create_preheaders (CP_SIMPLE_PREHEADERS);
3086 calculate_dominance_info (CDI_POST_DOMINATORS);
3087 /* Decide which edges are known to be unlikely. This improves later
3088 branch prediction. */
3089 determine_unlikely_bbs ();
3091 bb_predictions = new hash_map<const_basic_block, edge_prediction *>;
3092 tree_bb_level_predictions ();
3093 record_loop_exits ();
3095 if (number_of_loops (cfun) > 1)
3096 predict_loops ();
3098 FOR_EACH_BB_FN (bb, cfun)
3099 tree_estimate_probability_bb (bb, false);
3101 FOR_EACH_BB_FN (bb, cfun)
3102 combine_predictions_for_bb (bb, dry_run);
3104 if (flag_checking)
3105 bb_predictions->traverse<void *, assert_is_empty> (NULL);
3107 delete bb_predictions;
3108 bb_predictions = NULL;
3110 if (!dry_run)
3111 estimate_bb_frequencies (false);
3112 free_dominance_info (CDI_POST_DOMINATORS);
3113 remove_fake_exit_edges ();
3116 /* Set edge->probability for each successor edge of BB. */
3117 void
3118 tree_guess_outgoing_edge_probabilities (basic_block bb)
3120 bb_predictions = new hash_map<const_basic_block, edge_prediction *>;
3121 tree_estimate_probability_bb (bb, true);
3122 combine_predictions_for_bb (bb, false);
3123 if (flag_checking)
3124 bb_predictions->traverse<void *, assert_is_empty> (NULL);
3125 delete bb_predictions;
3126 bb_predictions = NULL;
3129 /* Predict edges to successors of CUR whose sources are not postdominated by
3130 BB by PRED and recurse to all postdominators. */
3132 static void
3133 predict_paths_for_bb (basic_block cur, basic_block bb,
3134 enum br_predictor pred,
3135 enum prediction taken,
3136 bitmap visited, class loop *in_loop = NULL)
3138 edge e;
3139 edge_iterator ei;
3140 basic_block son;
3142 /* If we exited the loop or CUR is unconditional in the loop, there is
3143 nothing to do. */
3144 if (in_loop
3145 && (!flow_bb_inside_loop_p (in_loop, cur)
3146 || dominated_by_p (CDI_DOMINATORS, in_loop->latch, cur)))
3147 return;
3149 /* We are looking for all edges forming edge cut induced by
3150 set of all blocks postdominated by BB. */
3151 FOR_EACH_EDGE (e, ei, cur->preds)
3152 if (e->src->index >= NUM_FIXED_BLOCKS
3153 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, bb))
3155 edge e2;
3156 edge_iterator ei2;
3157 bool found = false;
3159 /* Ignore fake edges and eh, we predict them as not taken anyway. */
3160 if (unlikely_executed_edge_p (e))
3161 continue;
3162 gcc_assert (bb == cur || dominated_by_p (CDI_POST_DOMINATORS, cur, bb));
3164 /* See if there is an edge from e->src that is not abnormal
3165 and does not lead to BB and does not exit the loop. */
3166 FOR_EACH_EDGE (e2, ei2, e->src->succs)
3167 if (e2 != e
3168 && !unlikely_executed_edge_p (e2)
3169 && !dominated_by_p (CDI_POST_DOMINATORS, e2->dest, bb)
3170 && (!in_loop || !loop_exit_edge_p (in_loop, e2)))
3172 found = true;
3173 break;
3176 /* If there is non-abnormal path leaving e->src, predict edge
3177 using predictor. Otherwise we need to look for paths
3178 leading to e->src.
3180 The second may lead to infinite loop in the case we are predicitng
3181 regions that are only reachable by abnormal edges. We simply
3182 prevent visiting given BB twice. */
3183 if (found)
3185 if (!edge_predicted_by_p (e, pred, taken))
3186 predict_edge_def (e, pred, taken);
3188 else if (bitmap_set_bit (visited, e->src->index))
3189 predict_paths_for_bb (e->src, e->src, pred, taken, visited, in_loop);
3191 for (son = first_dom_son (CDI_POST_DOMINATORS, cur);
3192 son;
3193 son = next_dom_son (CDI_POST_DOMINATORS, son))
3194 predict_paths_for_bb (son, bb, pred, taken, visited, in_loop);
3197 /* Sets branch probabilities according to PREDiction and
3198 FLAGS. */
3200 static void
3201 predict_paths_leading_to (basic_block bb, enum br_predictor pred,
3202 enum prediction taken, class loop *in_loop)
3204 predict_paths_for_bb (bb, bb, pred, taken, auto_bitmap (), in_loop);
3207 /* Like predict_paths_leading_to but take edge instead of basic block. */
3209 static void
3210 predict_paths_leading_to_edge (edge e, enum br_predictor pred,
3211 enum prediction taken, class loop *in_loop)
3213 bool has_nonloop_edge = false;
3214 edge_iterator ei;
3215 edge e2;
3217 basic_block bb = e->src;
3218 FOR_EACH_EDGE (e2, ei, bb->succs)
3219 if (e2->dest != e->src && e2->dest != e->dest
3220 && !unlikely_executed_edge_p (e)
3221 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e2->dest))
3223 has_nonloop_edge = true;
3224 break;
3226 if (!has_nonloop_edge)
3228 predict_paths_for_bb (bb, bb, pred, taken, auto_bitmap (), in_loop);
3230 else
3231 predict_edge_def (e, pred, taken);
3234 /* This is used to carry information about basic blocks. It is
3235 attached to the AUX field of the standard CFG block. */
3237 class block_info
3239 public:
3240 /* Estimated frequency of execution of basic_block. */
3241 sreal frequency;
3243 /* To keep queue of basic blocks to process. */
3244 basic_block next;
3246 /* Number of predecessors we need to visit first. */
3247 int npredecessors;
3250 /* Similar information for edges. */
3251 class edge_prob_info
3253 public:
3254 /* In case edge is a loopback edge, the probability edge will be reached
3255 in case header is. Estimated number of iterations of the loop can be
3256 then computed as 1 / (1 - back_edge_prob). */
3257 sreal back_edge_prob;
3258 /* True if the edge is a loopback edge in the natural loop. */
3259 unsigned int back_edge:1;
3262 #define BLOCK_INFO(B) ((block_info *) (B)->aux)
3263 #undef EDGE_INFO
3264 #define EDGE_INFO(E) ((edge_prob_info *) (E)->aux)
3266 /* Helper function for estimate_bb_frequencies.
3267 Propagate the frequencies in blocks marked in
3268 TOVISIT, starting in HEAD. */
3270 static void
3271 propagate_freq (basic_block head, bitmap tovisit)
3273 basic_block bb;
3274 basic_block last;
3275 unsigned i;
3276 edge e;
3277 basic_block nextbb;
3278 bitmap_iterator bi;
3280 /* For each basic block we need to visit count number of his predecessors
3281 we need to visit first. */
3282 EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi)
3284 edge_iterator ei;
3285 int count = 0;
3287 bb = BASIC_BLOCK_FOR_FN (cfun, i);
3289 FOR_EACH_EDGE (e, ei, bb->preds)
3291 bool visit = bitmap_bit_p (tovisit, e->src->index);
3293 if (visit && !(e->flags & EDGE_DFS_BACK))
3294 count++;
3295 else if (visit && dump_file && !EDGE_INFO (e)->back_edge)
3296 fprintf (dump_file,
3297 "Irreducible region hit, ignoring edge to %i->%i\n",
3298 e->src->index, bb->index);
3300 BLOCK_INFO (bb)->npredecessors = count;
3301 /* When function never returns, we will never process exit block. */
3302 if (!count && bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
3303 bb->count = profile_count::zero ();
3306 BLOCK_INFO (head)->frequency = 1;
3307 last = head;
3308 for (bb = head; bb; bb = nextbb)
3310 edge_iterator ei;
3311 sreal cyclic_probability = 0;
3312 sreal frequency = 0;
3314 nextbb = BLOCK_INFO (bb)->next;
3315 BLOCK_INFO (bb)->next = NULL;
3317 /* Compute frequency of basic block. */
3318 if (bb != head)
3320 if (flag_checking)
3321 FOR_EACH_EDGE (e, ei, bb->preds)
3322 gcc_assert (!bitmap_bit_p (tovisit, e->src->index)
3323 || (e->flags & EDGE_DFS_BACK));
3325 FOR_EACH_EDGE (e, ei, bb->preds)
3326 if (EDGE_INFO (e)->back_edge)
3328 cyclic_probability += EDGE_INFO (e)->back_edge_prob;
3330 else if (!(e->flags & EDGE_DFS_BACK))
3332 /* frequency += (e->probability
3333 * BLOCK_INFO (e->src)->frequency /
3334 REG_BR_PROB_BASE); */
3336 /* FIXME: Graphite is producing edges with no profile. Once
3337 this is fixed, drop this. */
3338 sreal tmp = e->probability.initialized_p () ?
3339 e->probability.to_reg_br_prob_base () : 0;
3340 tmp *= BLOCK_INFO (e->src)->frequency;
3341 tmp *= real_inv_br_prob_base;
3342 frequency += tmp;
3345 if (cyclic_probability == 0)
3347 BLOCK_INFO (bb)->frequency = frequency;
3349 else
3351 if (cyclic_probability > real_almost_one)
3352 cyclic_probability = real_almost_one;
3354 /* BLOCK_INFO (bb)->frequency = frequency
3355 / (1 - cyclic_probability) */
3357 cyclic_probability = sreal (1) - cyclic_probability;
3358 BLOCK_INFO (bb)->frequency = frequency / cyclic_probability;
3362 bitmap_clear_bit (tovisit, bb->index);
3364 e = find_edge (bb, head);
3365 if (e)
3367 /* EDGE_INFO (e)->back_edge_prob
3368 = ((e->probability * BLOCK_INFO (bb)->frequency)
3369 / REG_BR_PROB_BASE); */
3371 /* FIXME: Graphite is producing edges with no profile. Once
3372 this is fixed, drop this. */
3373 sreal tmp = e->probability.initialized_p () ?
3374 e->probability.to_reg_br_prob_base () : 0;
3375 tmp *= BLOCK_INFO (bb)->frequency;
3376 EDGE_INFO (e)->back_edge_prob = tmp * real_inv_br_prob_base;
3379 /* Propagate to successor blocks. */
3380 FOR_EACH_EDGE (e, ei, bb->succs)
3381 if (!(e->flags & EDGE_DFS_BACK)
3382 && BLOCK_INFO (e->dest)->npredecessors)
3384 BLOCK_INFO (e->dest)->npredecessors--;
3385 if (!BLOCK_INFO (e->dest)->npredecessors)
3387 if (!nextbb)
3388 nextbb = e->dest;
3389 else
3390 BLOCK_INFO (last)->next = e->dest;
3392 last = e->dest;
3398 /* Estimate frequencies in loops at same nest level. */
3400 static void
3401 estimate_loops_at_level (class loop *first_loop)
3403 class loop *loop;
3405 for (loop = first_loop; loop; loop = loop->next)
3407 edge e;
3408 basic_block *bbs;
3409 unsigned i;
3410 auto_bitmap tovisit;
3412 estimate_loops_at_level (loop->inner);
3414 /* Find current loop back edge and mark it. */
3415 e = loop_latch_edge (loop);
3416 EDGE_INFO (e)->back_edge = 1;
3418 bbs = get_loop_body (loop);
3419 for (i = 0; i < loop->num_nodes; i++)
3420 bitmap_set_bit (tovisit, bbs[i]->index);
3421 free (bbs);
3422 propagate_freq (loop->header, tovisit);
3426 /* Propagates frequencies through structure of loops. */
3428 static void
3429 estimate_loops (void)
3431 auto_bitmap tovisit;
3432 basic_block bb;
3434 /* Start by estimating the frequencies in the loops. */
3435 if (number_of_loops (cfun) > 1)
3436 estimate_loops_at_level (current_loops->tree_root->inner);
3438 /* Now propagate the frequencies through all the blocks. */
3439 FOR_ALL_BB_FN (bb, cfun)
3441 bitmap_set_bit (tovisit, bb->index);
3443 propagate_freq (ENTRY_BLOCK_PTR_FOR_FN (cfun), tovisit);
3446 /* Drop the profile for NODE to guessed, and update its frequency based on
3447 whether it is expected to be hot given the CALL_COUNT. */
3449 static void
3450 drop_profile (struct cgraph_node *node, profile_count call_count)
3452 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
3453 /* In the case where this was called by another function with a
3454 dropped profile, call_count will be 0. Since there are no
3455 non-zero call counts to this function, we don't know for sure
3456 whether it is hot, and therefore it will be marked normal below. */
3457 bool hot = maybe_hot_count_p (NULL, call_count);
3459 if (dump_file)
3460 fprintf (dump_file,
3461 "Dropping 0 profile for %s. %s based on calls.\n",
3462 node->dump_name (),
3463 hot ? "Function is hot" : "Function is normal");
3464 /* We only expect to miss profiles for functions that are reached
3465 via non-zero call edges in cases where the function may have
3466 been linked from another module or library (COMDATs and extern
3467 templates). See the comments below for handle_missing_profiles.
3468 Also, only warn in cases where the missing counts exceed the
3469 number of training runs. In certain cases with an execv followed
3470 by a no-return call the profile for the no-return call is not
3471 dumped and there can be a mismatch. */
3472 if (!DECL_COMDAT (node->decl) && !DECL_EXTERNAL (node->decl)
3473 && call_count > profile_info->runs)
3475 if (flag_profile_correction)
3477 if (dump_file)
3478 fprintf (dump_file,
3479 "Missing counts for called function %s\n",
3480 node->dump_name ());
3482 else
3483 warning (0, "Missing counts for called function %s",
3484 node->dump_name ());
3487 basic_block bb;
3488 if (opt_for_fn (node->decl, flag_guess_branch_prob))
3490 bool clear_zeros
3491 = !ENTRY_BLOCK_PTR_FOR_FN (fn)->count.nonzero_p ();
3492 FOR_ALL_BB_FN (bb, fn)
3493 if (clear_zeros || !(bb->count == profile_count::zero ()))
3494 bb->count = bb->count.guessed_local ();
3495 fn->cfg->count_max = fn->cfg->count_max.guessed_local ();
3497 else
3499 FOR_ALL_BB_FN (bb, fn)
3500 bb->count = profile_count::uninitialized ();
3501 fn->cfg->count_max = profile_count::uninitialized ();
3504 struct cgraph_edge *e;
3505 for (e = node->callees; e; e = e->next_callee)
3506 e->count = gimple_bb (e->call_stmt)->count;
3507 for (e = node->indirect_calls; e; e = e->next_callee)
3508 e->count = gimple_bb (e->call_stmt)->count;
3509 node->count = ENTRY_BLOCK_PTR_FOR_FN (fn)->count;
3511 profile_status_for_fn (fn)
3512 = (flag_guess_branch_prob ? PROFILE_GUESSED : PROFILE_ABSENT);
3513 node->frequency
3514 = hot ? NODE_FREQUENCY_HOT : NODE_FREQUENCY_NORMAL;
3517 /* In the case of COMDAT routines, multiple object files will contain the same
3518 function and the linker will select one for the binary. In that case
3519 all the other copies from the profile instrument binary will be missing
3520 profile counts. Look for cases where this happened, due to non-zero
3521 call counts going to 0-count functions, and drop the profile to guessed
3522 so that we can use the estimated probabilities and avoid optimizing only
3523 for size.
3525 The other case where the profile may be missing is when the routine
3526 is not going to be emitted to the object file, e.g. for "extern template"
3527 class methods. Those will be marked DECL_EXTERNAL. Emit a warning in
3528 all other cases of non-zero calls to 0-count functions. */
3530 void
3531 handle_missing_profiles (void)
3533 const int unlikely_frac = param_unlikely_bb_count_fraction;
3534 struct cgraph_node *node;
3535 auto_vec<struct cgraph_node *, 64> worklist;
3537 /* See if 0 count function has non-0 count callers. In this case we
3538 lost some profile. Drop its function profile to PROFILE_GUESSED. */
3539 FOR_EACH_DEFINED_FUNCTION (node)
3541 struct cgraph_edge *e;
3542 profile_count call_count = profile_count::zero ();
3543 gcov_type max_tp_first_run = 0;
3544 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
3546 if (node->count.ipa ().nonzero_p ())
3547 continue;
3548 for (e = node->callers; e; e = e->next_caller)
3549 if (e->count.ipa ().initialized_p () && e->count.ipa () > 0)
3551 call_count = call_count + e->count.ipa ();
3553 if (e->caller->tp_first_run > max_tp_first_run)
3554 max_tp_first_run = e->caller->tp_first_run;
3557 /* If time profile is missing, let assign the maximum that comes from
3558 caller functions. */
3559 if (!node->tp_first_run && max_tp_first_run)
3560 node->tp_first_run = max_tp_first_run + 1;
3562 if (call_count > 0
3563 && fn && fn->cfg
3564 && call_count.apply_scale (unlikely_frac, 1) >= profile_info->runs)
3566 drop_profile (node, call_count);
3567 worklist.safe_push (node);
3571 /* Propagate the profile dropping to other 0-count COMDATs that are
3572 potentially called by COMDATs we already dropped the profile on. */
3573 while (worklist.length () > 0)
3575 struct cgraph_edge *e;
3577 node = worklist.pop ();
3578 for (e = node->callees; e; e = e->next_caller)
3580 struct cgraph_node *callee = e->callee;
3581 struct function *fn = DECL_STRUCT_FUNCTION (callee->decl);
3583 if (!(e->count.ipa () == profile_count::zero ())
3584 && callee->count.ipa ().nonzero_p ())
3585 continue;
3586 if ((DECL_COMDAT (callee->decl) || DECL_EXTERNAL (callee->decl))
3587 && fn && fn->cfg
3588 && profile_status_for_fn (fn) == PROFILE_READ)
3590 drop_profile (node, profile_count::zero ());
3591 worklist.safe_push (callee);
3597 /* Convert counts measured by profile driven feedback to frequencies.
3598 Return nonzero iff there was any nonzero execution count. */
3600 bool
3601 update_max_bb_count (void)
3603 profile_count true_count_max = profile_count::uninitialized ();
3604 basic_block bb;
3606 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
3607 true_count_max = true_count_max.max (bb->count);
3609 cfun->cfg->count_max = true_count_max;
3611 return true_count_max.ipa ().nonzero_p ();
3614 /* Return true if function is likely to be expensive, so there is no point to
3615 optimize performance of prologue, epilogue or do inlining at the expense
3616 of code size growth. THRESHOLD is the limit of number of instructions
3617 function can execute at average to be still considered not expensive. */
3619 bool
3620 expensive_function_p (int threshold)
3622 basic_block bb;
3624 /* If profile was scaled in a way entry block has count 0, then the function
3625 is deifnitly taking a lot of time. */
3626 if (!ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.nonzero_p ())
3627 return true;
3629 profile_count limit = ENTRY_BLOCK_PTR_FOR_FN
3630 (cfun)->count.apply_scale (threshold, 1);
3631 profile_count sum = profile_count::zero ();
3632 FOR_EACH_BB_FN (bb, cfun)
3634 rtx_insn *insn;
3636 if (!bb->count.initialized_p ())
3638 if (dump_file)
3639 fprintf (dump_file, "Function is considered expensive because"
3640 " count of bb %i is not initialized\n", bb->index);
3641 return true;
3644 FOR_BB_INSNS (bb, insn)
3645 if (active_insn_p (insn))
3647 sum += bb->count;
3648 if (sum > limit)
3649 return true;
3653 return false;
3656 /* All basic blocks that are reachable only from unlikely basic blocks are
3657 unlikely. */
3659 void
3660 propagate_unlikely_bbs_forward (void)
3662 auto_vec<basic_block, 64> worklist;
3663 basic_block bb;
3664 edge_iterator ei;
3665 edge e;
3667 if (!(ENTRY_BLOCK_PTR_FOR_FN (cfun)->count == profile_count::zero ()))
3669 ENTRY_BLOCK_PTR_FOR_FN (cfun)->aux = (void *)(size_t) 1;
3670 worklist.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3672 while (worklist.length () > 0)
3674 bb = worklist.pop ();
3675 FOR_EACH_EDGE (e, ei, bb->succs)
3676 if (!(e->count () == profile_count::zero ())
3677 && !(e->dest->count == profile_count::zero ())
3678 && !e->dest->aux)
3680 e->dest->aux = (void *)(size_t) 1;
3681 worklist.safe_push (e->dest);
3686 FOR_ALL_BB_FN (bb, cfun)
3688 if (!bb->aux)
3690 if (!(bb->count == profile_count::zero ())
3691 && (dump_file && (dump_flags & TDF_DETAILS)))
3692 fprintf (dump_file,
3693 "Basic block %i is marked unlikely by forward prop\n",
3694 bb->index);
3695 bb->count = profile_count::zero ();
3697 else
3698 bb->aux = NULL;
3702 /* Determine basic blocks/edges that are known to be unlikely executed and set
3703 their counters to zero.
3704 This is done with first identifying obviously unlikely BBs/edges and then
3705 propagating in both directions. */
3707 static void
3708 determine_unlikely_bbs ()
3710 basic_block bb;
3711 auto_vec<basic_block, 64> worklist;
3712 edge_iterator ei;
3713 edge e;
3715 FOR_EACH_BB_FN (bb, cfun)
3717 if (!(bb->count == profile_count::zero ())
3718 && unlikely_executed_bb_p (bb))
3720 if (dump_file && (dump_flags & TDF_DETAILS))
3721 fprintf (dump_file, "Basic block %i is locally unlikely\n",
3722 bb->index);
3723 bb->count = profile_count::zero ();
3726 FOR_EACH_EDGE (e, ei, bb->succs)
3727 if (!(e->probability == profile_probability::never ())
3728 && unlikely_executed_edge_p (e))
3730 if (dump_file && (dump_flags & TDF_DETAILS))
3731 fprintf (dump_file, "Edge %i->%i is locally unlikely\n",
3732 bb->index, e->dest->index);
3733 e->probability = profile_probability::never ();
3736 gcc_checking_assert (!bb->aux);
3738 propagate_unlikely_bbs_forward ();
3740 auto_vec<int, 64> nsuccs;
3741 nsuccs.safe_grow_cleared (last_basic_block_for_fn (cfun));
3742 FOR_ALL_BB_FN (bb, cfun)
3743 if (!(bb->count == profile_count::zero ())
3744 && bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3746 nsuccs[bb->index] = 0;
3747 FOR_EACH_EDGE (e, ei, bb->succs)
3748 if (!(e->probability == profile_probability::never ())
3749 && !(e->dest->count == profile_count::zero ()))
3750 nsuccs[bb->index]++;
3751 if (!nsuccs[bb->index])
3752 worklist.safe_push (bb);
3754 while (worklist.length () > 0)
3756 bb = worklist.pop ();
3757 if (bb->count == profile_count::zero ())
3758 continue;
3759 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
3761 bool found = false;
3762 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
3763 !gsi_end_p (gsi); gsi_next (&gsi))
3764 if (stmt_can_terminate_bb_p (gsi_stmt (gsi))
3765 /* stmt_can_terminate_bb_p special cases noreturns because it
3766 assumes that fake edges are created. We want to know that
3767 noreturn alone does not imply BB to be unlikely. */
3768 || (is_gimple_call (gsi_stmt (gsi))
3769 && (gimple_call_flags (gsi_stmt (gsi)) & ECF_NORETURN)))
3771 found = true;
3772 break;
3774 if (found)
3775 continue;
3777 if (dump_file && (dump_flags & TDF_DETAILS))
3778 fprintf (dump_file,
3779 "Basic block %i is marked unlikely by backward prop\n",
3780 bb->index);
3781 bb->count = profile_count::zero ();
3782 FOR_EACH_EDGE (e, ei, bb->preds)
3783 if (!(e->probability == profile_probability::never ()))
3785 if (!(e->src->count == profile_count::zero ()))
3787 gcc_checking_assert (nsuccs[e->src->index] > 0);
3788 nsuccs[e->src->index]--;
3789 if (!nsuccs[e->src->index])
3790 worklist.safe_push (e->src);
3794 /* Finally all edges from non-0 regions to 0 are unlikely. */
3795 FOR_ALL_BB_FN (bb, cfun)
3797 if (!(bb->count == profile_count::zero ()))
3798 FOR_EACH_EDGE (e, ei, bb->succs)
3799 if (!(e->probability == profile_probability::never ())
3800 && e->dest->count == profile_count::zero ())
3802 if (dump_file && (dump_flags & TDF_DETAILS))
3803 fprintf (dump_file, "Edge %i->%i is unlikely because "
3804 "it enters unlikely block\n",
3805 bb->index, e->dest->index);
3806 e->probability = profile_probability::never ();
3809 edge other = NULL;
3811 FOR_EACH_EDGE (e, ei, bb->succs)
3812 if (e->probability == profile_probability::never ())
3814 else if (other)
3816 other = NULL;
3817 break;
3819 else
3820 other = e;
3821 if (other
3822 && !(other->probability == profile_probability::always ()))
3824 if (dump_file && (dump_flags & TDF_DETAILS))
3825 fprintf (dump_file, "Edge %i->%i is locally likely\n",
3826 bb->index, other->dest->index);
3827 other->probability = profile_probability::always ();
3830 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count == profile_count::zero ())
3831 cgraph_node::get (current_function_decl)->count = profile_count::zero ();
3834 /* Estimate and propagate basic block frequencies using the given branch
3835 probabilities. If FORCE is true, the frequencies are used to estimate
3836 the counts even when there are already non-zero profile counts. */
3838 void
3839 estimate_bb_frequencies (bool force)
3841 basic_block bb;
3842 sreal freq_max;
3844 determine_unlikely_bbs ();
3846 if (force || profile_status_for_fn (cfun) != PROFILE_READ
3847 || !update_max_bb_count ())
3849 static int real_values_initialized = 0;
3851 if (!real_values_initialized)
3853 real_values_initialized = 1;
3854 real_br_prob_base = REG_BR_PROB_BASE;
3855 /* Scaling frequencies up to maximal profile count may result in
3856 frequent overflows especially when inlining loops.
3857 Small scalling results in unnecesary precision loss. Stay in
3858 the half of the (exponential) range. */
3859 real_bb_freq_max = (uint64_t)1 << (profile_count::n_bits / 2);
3860 real_one_half = sreal (1, -1);
3861 real_inv_br_prob_base = sreal (1) / real_br_prob_base;
3862 real_almost_one = sreal (1) - real_inv_br_prob_base;
3865 mark_dfs_back_edges ();
3867 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->probability =
3868 profile_probability::always ();
3870 /* Set up block info for each basic block. */
3871 alloc_aux_for_blocks (sizeof (block_info));
3872 alloc_aux_for_edges (sizeof (edge_prob_info));
3873 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
3875 edge e;
3876 edge_iterator ei;
3878 FOR_EACH_EDGE (e, ei, bb->succs)
3880 /* FIXME: Graphite is producing edges with no profile. Once
3881 this is fixed, drop this. */
3882 if (e->probability.initialized_p ())
3883 EDGE_INFO (e)->back_edge_prob
3884 = e->probability.to_reg_br_prob_base ();
3885 else
3886 EDGE_INFO (e)->back_edge_prob = REG_BR_PROB_BASE / 2;
3887 EDGE_INFO (e)->back_edge_prob *= real_inv_br_prob_base;
3891 /* First compute frequencies locally for each loop from innermost
3892 to outermost to examine frequencies for back edges. */
3893 estimate_loops ();
3895 freq_max = 0;
3896 FOR_EACH_BB_FN (bb, cfun)
3897 if (freq_max < BLOCK_INFO (bb)->frequency)
3898 freq_max = BLOCK_INFO (bb)->frequency;
3900 freq_max = real_bb_freq_max / freq_max;
3901 if (freq_max < 16)
3902 freq_max = 16;
3903 profile_count ipa_count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ();
3904 cfun->cfg->count_max = profile_count::uninitialized ();
3905 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
3907 sreal tmp = BLOCK_INFO (bb)->frequency * freq_max + real_one_half;
3908 profile_count count = profile_count::from_gcov_type (tmp.to_int ());
3910 /* If we have profile feedback in which this function was never
3911 executed, then preserve this info. */
3912 if (!(bb->count == profile_count::zero ()))
3913 bb->count = count.guessed_local ().combine_with_ipa_count (ipa_count);
3914 cfun->cfg->count_max = cfun->cfg->count_max.max (bb->count);
3917 free_aux_for_blocks ();
3918 free_aux_for_edges ();
3920 compute_function_frequency ();
3923 /* Decide whether function is hot, cold or unlikely executed. */
3924 void
3925 compute_function_frequency (void)
3927 basic_block bb;
3928 struct cgraph_node *node = cgraph_node::get (current_function_decl);
3930 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
3931 || MAIN_NAME_P (DECL_NAME (current_function_decl)))
3932 node->only_called_at_startup = true;
3933 if (DECL_STATIC_DESTRUCTOR (current_function_decl))
3934 node->only_called_at_exit = true;
3936 if (profile_status_for_fn (cfun) != PROFILE_READ)
3938 int flags = flags_from_decl_or_type (current_function_decl);
3939 if ((ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa_p ()
3940 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa() == profile_count::zero ())
3941 || lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl))
3942 != NULL)
3944 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
3945 warn_function_cold (current_function_decl);
3947 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (current_function_decl))
3948 != NULL)
3949 node->frequency = NODE_FREQUENCY_HOT;
3950 else if (flags & ECF_NORETURN)
3951 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
3952 else if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
3953 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
3954 else if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
3955 || DECL_STATIC_DESTRUCTOR (current_function_decl))
3956 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
3957 return;
3960 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
3961 warn_function_cold (current_function_decl);
3962 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa() == profile_count::zero ())
3963 return;
3964 FOR_EACH_BB_FN (bb, cfun)
3966 if (maybe_hot_bb_p (cfun, bb))
3968 node->frequency = NODE_FREQUENCY_HOT;
3969 return;
3971 if (!probably_never_executed_bb_p (cfun, bb))
3972 node->frequency = NODE_FREQUENCY_NORMAL;
3976 /* Build PREDICT_EXPR. */
3977 tree
3978 build_predict_expr (enum br_predictor predictor, enum prediction taken)
3980 tree t = build1 (PREDICT_EXPR, void_type_node,
3981 build_int_cst (integer_type_node, predictor));
3982 SET_PREDICT_EXPR_OUTCOME (t, taken);
3983 return t;
3986 const char *
3987 predictor_name (enum br_predictor predictor)
3989 return predictor_info[predictor].name;
3992 /* Predict branch probabilities and estimate profile of the tree CFG. */
3994 namespace {
3996 const pass_data pass_data_profile =
3998 GIMPLE_PASS, /* type */
3999 "profile_estimate", /* name */
4000 OPTGROUP_NONE, /* optinfo_flags */
4001 TV_BRANCH_PROB, /* tv_id */
4002 PROP_cfg, /* properties_required */
4003 0, /* properties_provided */
4004 0, /* properties_destroyed */
4005 0, /* todo_flags_start */
4006 0, /* todo_flags_finish */
4009 class pass_profile : public gimple_opt_pass
4011 public:
4012 pass_profile (gcc::context *ctxt)
4013 : gimple_opt_pass (pass_data_profile, ctxt)
4016 /* opt_pass methods: */
4017 virtual bool gate (function *) { return flag_guess_branch_prob; }
4018 virtual unsigned int execute (function *);
4020 }; // class pass_profile
4022 unsigned int
4023 pass_profile::execute (function *fun)
4025 unsigned nb_loops;
4027 if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
4028 return 0;
4030 loop_optimizer_init (LOOPS_NORMAL);
4031 if (dump_file && (dump_flags & TDF_DETAILS))
4032 flow_loops_dump (dump_file, NULL, 0);
4034 mark_irreducible_loops ();
4036 nb_loops = number_of_loops (fun);
4037 if (nb_loops > 1)
4038 scev_initialize ();
4040 tree_estimate_probability (false);
4042 if (nb_loops > 1)
4043 scev_finalize ();
4045 loop_optimizer_finalize ();
4046 if (dump_file && (dump_flags & TDF_DETAILS))
4047 gimple_dump_cfg (dump_file, dump_flags);
4048 if (profile_status_for_fn (fun) == PROFILE_ABSENT)
4049 profile_status_for_fn (fun) = PROFILE_GUESSED;
4050 if (dump_file && (dump_flags & TDF_DETAILS))
4052 class loop *loop;
4053 FOR_EACH_LOOP (loop, LI_FROM_INNERMOST)
4054 if (loop->header->count.initialized_p ())
4055 fprintf (dump_file, "Loop got predicted %d to iterate %i times.\n",
4056 loop->num,
4057 (int)expected_loop_iterations_unbounded (loop));
4059 return 0;
4062 } // anon namespace
4064 gimple_opt_pass *
4065 make_pass_profile (gcc::context *ctxt)
4067 return new pass_profile (ctxt);
4070 /* Return true when PRED predictor should be removed after early
4071 tree passes. Most of the predictors are beneficial to survive
4072 as early inlining can also distribute then into caller's bodies. */
4074 static bool
4075 strip_predictor_early (enum br_predictor pred)
4077 switch (pred)
4079 case PRED_TREE_EARLY_RETURN:
4080 return true;
4081 default:
4082 return false;
4086 /* Get rid of all builtin_expect calls and GIMPLE_PREDICT statements
4087 we no longer need. EARLY is set to true when called from early
4088 optimizations. */
4090 unsigned int
4091 strip_predict_hints (function *fun, bool early)
4093 basic_block bb;
4094 gimple *ass_stmt;
4095 tree var;
4096 bool changed = false;
4098 FOR_EACH_BB_FN (bb, fun)
4100 gimple_stmt_iterator bi;
4101 for (bi = gsi_start_bb (bb); !gsi_end_p (bi);)
4103 gimple *stmt = gsi_stmt (bi);
4105 if (gimple_code (stmt) == GIMPLE_PREDICT)
4107 if (!early
4108 || strip_predictor_early (gimple_predict_predictor (stmt)))
4110 gsi_remove (&bi, true);
4111 changed = true;
4112 continue;
4115 else if (is_gimple_call (stmt))
4117 tree fndecl = gimple_call_fndecl (stmt);
4119 if (!early
4120 && ((fndecl != NULL_TREE
4121 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
4122 && gimple_call_num_args (stmt) == 2)
4123 || (fndecl != NULL_TREE
4124 && fndecl_built_in_p (fndecl,
4125 BUILT_IN_EXPECT_WITH_PROBABILITY)
4126 && gimple_call_num_args (stmt) == 3)
4127 || (gimple_call_internal_p (stmt)
4128 && gimple_call_internal_fn (stmt) == IFN_BUILTIN_EXPECT)))
4130 var = gimple_call_lhs (stmt);
4131 changed = true;
4132 if (var)
4134 ass_stmt
4135 = gimple_build_assign (var, gimple_call_arg (stmt, 0));
4136 gsi_replace (&bi, ass_stmt, true);
4138 else
4140 gsi_remove (&bi, true);
4141 continue;
4145 gsi_next (&bi);
4148 return changed ? TODO_cleanup_cfg : 0;
4151 namespace {
4153 const pass_data pass_data_strip_predict_hints =
4155 GIMPLE_PASS, /* type */
4156 "*strip_predict_hints", /* name */
4157 OPTGROUP_NONE, /* optinfo_flags */
4158 TV_BRANCH_PROB, /* tv_id */
4159 PROP_cfg, /* properties_required */
4160 0, /* properties_provided */
4161 0, /* properties_destroyed */
4162 0, /* todo_flags_start */
4163 0, /* todo_flags_finish */
4166 class pass_strip_predict_hints : public gimple_opt_pass
4168 public:
4169 pass_strip_predict_hints (gcc::context *ctxt)
4170 : gimple_opt_pass (pass_data_strip_predict_hints, ctxt)
4173 /* opt_pass methods: */
4174 opt_pass * clone () { return new pass_strip_predict_hints (m_ctxt); }
4175 void set_pass_param (unsigned int n, bool param)
4177 gcc_assert (n == 0);
4178 early_p = param;
4181 virtual unsigned int execute (function *);
4183 private:
4184 bool early_p;
4186 }; // class pass_strip_predict_hints
4188 unsigned int
4189 pass_strip_predict_hints::execute (function *fun)
4191 return strip_predict_hints (fun, early_p);
4194 } // anon namespace
4196 gimple_opt_pass *
4197 make_pass_strip_predict_hints (gcc::context *ctxt)
4199 return new pass_strip_predict_hints (ctxt);
4202 /* Rebuild function frequencies. Passes are in general expected to
4203 maintain profile by hand, however in some cases this is not possible:
4204 for example when inlining several functions with loops freuqencies might run
4205 out of scale and thus needs to be recomputed. */
4207 void
4208 rebuild_frequencies (void)
4210 timevar_push (TV_REBUILD_FREQUENCIES);
4212 /* When the max bb count in the function is small, there is a higher
4213 chance that there were truncation errors in the integer scaling
4214 of counts by inlining and other optimizations. This could lead
4215 to incorrect classification of code as being cold when it isn't.
4216 In that case, force the estimation of bb counts/frequencies from the
4217 branch probabilities, rather than computing frequencies from counts,
4218 which may also lead to frequencies incorrectly reduced to 0. There
4219 is less precision in the probabilities, so we only do this for small
4220 max counts. */
4221 cfun->cfg->count_max = profile_count::uninitialized ();
4222 basic_block bb;
4223 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
4224 cfun->cfg->count_max = cfun->cfg->count_max.max (bb->count);
4226 if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
4228 loop_optimizer_init (0);
4229 add_noreturn_fake_exit_edges ();
4230 mark_irreducible_loops ();
4231 connect_infinite_loops_to_exit ();
4232 estimate_bb_frequencies (true);
4233 remove_fake_exit_edges ();
4234 loop_optimizer_finalize ();
4236 else if (profile_status_for_fn (cfun) == PROFILE_READ)
4237 update_max_bb_count ();
4238 else if (profile_status_for_fn (cfun) == PROFILE_ABSENT
4239 && !flag_guess_branch_prob)
4241 else
4242 gcc_unreachable ();
4243 timevar_pop (TV_REBUILD_FREQUENCIES);
4246 /* Perform a dry run of the branch prediction pass and report comparsion of
4247 the predicted and real profile into the dump file. */
4249 void
4250 report_predictor_hitrates (void)
4252 unsigned nb_loops;
4254 loop_optimizer_init (LOOPS_NORMAL);
4255 if (dump_file && (dump_flags & TDF_DETAILS))
4256 flow_loops_dump (dump_file, NULL, 0);
4258 mark_irreducible_loops ();
4260 nb_loops = number_of_loops (cfun);
4261 if (nb_loops > 1)
4262 scev_initialize ();
4264 tree_estimate_probability (true);
4266 if (nb_loops > 1)
4267 scev_finalize ();
4269 loop_optimizer_finalize ();
4272 /* Force edge E to be cold.
4273 If IMPOSSIBLE is true, for edge to have count and probability 0 otherwise
4274 keep low probability to represent possible error in a guess. This is used
4275 i.e. in case we predict loop to likely iterate given number of times but
4276 we are not 100% sure.
4278 This function locally updates profile without attempt to keep global
4279 consistency which cannot be reached in full generality without full profile
4280 rebuild from probabilities alone. Doing so is not necessarily a good idea
4281 because frequencies and counts may be more realistic then probabilities.
4283 In some cases (such as for elimination of early exits during full loop
4284 unrolling) the caller can ensure that profile will get consistent
4285 afterwards. */
4287 void
4288 force_edge_cold (edge e, bool impossible)
4290 profile_count count_sum = profile_count::zero ();
4291 profile_probability prob_sum = profile_probability::never ();
4292 edge_iterator ei;
4293 edge e2;
4294 bool uninitialized_exit = false;
4296 /* When branch probability guesses are not known, then do nothing. */
4297 if (!impossible && !e->count ().initialized_p ())
4298 return;
4300 profile_probability goal = (impossible ? profile_probability::never ()
4301 : profile_probability::very_unlikely ());
4303 /* If edge is already improbably or cold, just return. */
4304 if (e->probability <= goal
4305 && (!impossible || e->count () == profile_count::zero ()))
4306 return;
4307 FOR_EACH_EDGE (e2, ei, e->src->succs)
4308 if (e2 != e)
4310 if (e->flags & EDGE_FAKE)
4311 continue;
4312 if (e2->count ().initialized_p ())
4313 count_sum += e2->count ();
4314 if (e2->probability.initialized_p ())
4315 prob_sum += e2->probability;
4316 else
4317 uninitialized_exit = true;
4320 /* If we are not guessing profiles but have some other edges out,
4321 just assume the control flow goes elsewhere. */
4322 if (uninitialized_exit)
4323 e->probability = goal;
4324 /* If there are other edges out of e->src, redistribute probabilitity
4325 there. */
4326 else if (prob_sum > profile_probability::never ())
4328 if (!(e->probability < goal))
4329 e->probability = goal;
4331 profile_probability prob_comp = prob_sum / e->probability.invert ();
4333 if (dump_file && (dump_flags & TDF_DETAILS))
4334 fprintf (dump_file, "Making edge %i->%i %s by redistributing "
4335 "probability to other edges.\n",
4336 e->src->index, e->dest->index,
4337 impossible ? "impossible" : "cold");
4338 FOR_EACH_EDGE (e2, ei, e->src->succs)
4339 if (e2 != e)
4341 e2->probability /= prob_comp;
4343 if (current_ir_type () != IR_GIMPLE
4344 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
4345 update_br_prob_note (e->src);
4347 /* If all edges out of e->src are unlikely, the basic block itself
4348 is unlikely. */
4349 else
4351 if (prob_sum == profile_probability::never ())
4352 e->probability = profile_probability::always ();
4353 else
4355 if (impossible)
4356 e->probability = profile_probability::never ();
4357 /* If BB has some edges out that are not impossible, we cannot
4358 assume that BB itself is. */
4359 impossible = false;
4361 if (current_ir_type () != IR_GIMPLE
4362 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
4363 update_br_prob_note (e->src);
4364 if (e->src->count == profile_count::zero ())
4365 return;
4366 if (count_sum == profile_count::zero () && impossible)
4368 bool found = false;
4369 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
4371 else if (current_ir_type () == IR_GIMPLE)
4372 for (gimple_stmt_iterator gsi = gsi_start_bb (e->src);
4373 !gsi_end_p (gsi); gsi_next (&gsi))
4375 if (stmt_can_terminate_bb_p (gsi_stmt (gsi)))
4377 found = true;
4378 break;
4381 /* FIXME: Implement RTL path. */
4382 else
4383 found = true;
4384 if (!found)
4386 if (dump_file && (dump_flags & TDF_DETAILS))
4387 fprintf (dump_file,
4388 "Making bb %i impossible and dropping count to 0.\n",
4389 e->src->index);
4390 e->src->count = profile_count::zero ();
4391 FOR_EACH_EDGE (e2, ei, e->src->preds)
4392 force_edge_cold (e2, impossible);
4393 return;
4397 /* If we did not adjusting, the source basic block has no likely edeges
4398 leaving other direction. In that case force that bb cold, too.
4399 This in general is difficult task to do, but handle special case when
4400 BB has only one predecestor. This is common case when we are updating
4401 after loop transforms. */
4402 if (!(prob_sum > profile_probability::never ())
4403 && count_sum == profile_count::zero ()
4404 && single_pred_p (e->src) && e->src->count.to_frequency (cfun)
4405 > (impossible ? 0 : 1))
4407 int old_frequency = e->src->count.to_frequency (cfun);
4408 if (dump_file && (dump_flags & TDF_DETAILS))
4409 fprintf (dump_file, "Making bb %i %s.\n", e->src->index,
4410 impossible ? "impossible" : "cold");
4411 int new_frequency = MIN (e->src->count.to_frequency (cfun),
4412 impossible ? 0 : 1);
4413 if (impossible)
4414 e->src->count = profile_count::zero ();
4415 else
4416 e->src->count = e->count ().apply_scale (new_frequency,
4417 old_frequency);
4418 force_edge_cold (single_pred_edge (e->src), impossible);
4420 else if (dump_file && (dump_flags & TDF_DETAILS)
4421 && maybe_hot_bb_p (cfun, e->src))
4422 fprintf (dump_file, "Giving up on making bb %i %s.\n", e->src->index,
4423 impossible ? "impossible" : "cold");
4427 #if CHECKING_P
4429 namespace selftest {
4431 /* Test that value range of predictor values defined in predict.def is
4432 within range (50, 100]. */
4434 struct branch_predictor
4436 const char *name;
4437 int probability;
4440 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) { NAME, HITRATE },
4442 static void
4443 test_prediction_value_range ()
4445 branch_predictor predictors[] = {
4446 #include "predict.def"
4447 { NULL, PROB_UNINITIALIZED }
4450 for (unsigned i = 0; predictors[i].name != NULL; i++)
4452 if (predictors[i].probability == PROB_UNINITIALIZED)
4453 continue;
4455 unsigned p = 100 * predictors[i].probability / REG_BR_PROB_BASE;
4456 ASSERT_TRUE (p >= 50 && p <= 100);
4460 #undef DEF_PREDICTOR
4462 /* Run all of the selfests within this file. */
4464 void
4465 predict_c_tests ()
4467 test_prediction_value_range ();
4470 } // namespace selftest
4471 #endif /* CHECKING_P. */