1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 [1] "Branch Prediction for Free"
23 Ball and Larus; PLDI '93.
24 [2] "Static Branch Frequency and Program Profile Analysis"
25 Wu and Larus; MICRO-27.
26 [3] "Corpus-based Static Branch Prediction"
27 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
32 #include "coretypes.h"
37 #include "gimple-predict.h"
41 #include "fold-const.h"
45 #include "insn-config.h"
50 #include "diagnostic-core.h"
64 #include "internal-fn.h"
65 #include "gimple-iterator.h"
68 #include "tree-ssa-loop-niter.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-pass.h"
71 #include "tree-scalar-evolution.h"
73 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
74 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
75 static sreal real_almost_one
, real_br_prob_base
,
76 real_inv_br_prob_base
, real_one_half
, real_bb_freq_max
;
78 static void combine_predictions_for_insn (rtx_insn
*, basic_block
);
79 static void dump_prediction (FILE *, enum br_predictor
, int, basic_block
, int);
80 static void predict_paths_leading_to (basic_block
, enum br_predictor
, enum prediction
);
81 static void predict_paths_leading_to_edge (edge
, enum br_predictor
, enum prediction
);
82 static bool can_predict_insn_p (const rtx_insn
*);
84 /* Information we hold about each branch predictor.
85 Filled using information from predict.def. */
89 const char *const name
; /* Name used in the debugging dumps. */
90 const int hitrate
; /* Expected hitrate used by
91 predict_insn_def call. */
95 /* Use given predictor without Dempster-Shaffer theory if it matches
96 using first_match heuristics. */
97 #define PRED_FLAG_FIRST_MATCH 1
99 /* Recompute hitrate in percent to our representation. */
101 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
103 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
104 static const struct predictor_info predictor_info
[]= {
105 #include "predict.def"
107 /* Upper bound on predictors. */
112 /* Return TRUE if frequency FREQ is considered to be hot. */
115 maybe_hot_frequency_p (struct function
*fun
, int freq
)
117 struct cgraph_node
*node
= cgraph_node::get (fun
->decl
);
119 || !opt_for_fn (fun
->decl
, flag_branch_probabilities
))
121 if (node
->frequency
== NODE_FREQUENCY_UNLIKELY_EXECUTED
)
123 if (node
->frequency
== NODE_FREQUENCY_HOT
)
126 if (profile_status_for_fn (fun
) == PROFILE_ABSENT
)
128 if (node
->frequency
== NODE_FREQUENCY_EXECUTED_ONCE
129 && freq
< (ENTRY_BLOCK_PTR_FOR_FN (fun
)->frequency
* 2 / 3))
131 if (PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION
) == 0)
133 if (freq
< (ENTRY_BLOCK_PTR_FOR_FN (fun
)->frequency
134 / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION
)))
139 static gcov_type min_count
= -1;
141 /* Determine the threshold for hot BB counts. */
144 get_hot_bb_threshold ()
146 gcov_working_set_t
*ws
;
149 ws
= find_working_set (PARAM_VALUE (HOT_BB_COUNT_WS_PERMILLE
));
151 min_count
= ws
->min_counter
;
156 /* Set the threshold for hot BB counts. */
159 set_hot_bb_threshold (gcov_type min
)
164 /* Return TRUE if frequency FREQ is considered to be hot. */
167 maybe_hot_count_p (struct function
*fun
, gcov_type count
)
169 if (fun
&& profile_status_for_fn (fun
) != PROFILE_READ
)
171 /* Code executed at most once is not hot. */
172 if (profile_info
->runs
>= count
)
174 return (count
>= get_hot_bb_threshold ());
177 /* Return true in case BB can be CPU intensive and should be optimized
178 for maximal performance. */
181 maybe_hot_bb_p (struct function
*fun
, const_basic_block bb
)
183 gcc_checking_assert (fun
);
184 if (profile_status_for_fn (fun
) == PROFILE_READ
)
185 return maybe_hot_count_p (fun
, bb
->count
);
186 return maybe_hot_frequency_p (fun
, bb
->frequency
);
189 /* Return true in case BB can be CPU intensive and should be optimized
190 for maximal performance. */
193 maybe_hot_edge_p (edge e
)
195 if (profile_status_for_fn (cfun
) == PROFILE_READ
)
196 return maybe_hot_count_p (cfun
, e
->count
);
197 return maybe_hot_frequency_p (cfun
, EDGE_FREQUENCY (e
));
200 /* Return true if profile COUNT and FREQUENCY, or function FUN static
201 node frequency reflects never being executed. */
204 probably_never_executed (struct function
*fun
,
205 gcov_type count
, int frequency
)
207 gcc_checking_assert (fun
);
208 if (profile_status_for_fn (fun
) == PROFILE_READ
)
210 int unlikely_count_fraction
= PARAM_VALUE (UNLIKELY_BB_COUNT_FRACTION
);
211 if (count
* unlikely_count_fraction
>= profile_info
->runs
)
215 if (!ENTRY_BLOCK_PTR_FOR_FN (fun
)->frequency
)
217 if (ENTRY_BLOCK_PTR_FOR_FN (fun
)->count
)
219 gcov_type computed_count
;
220 /* Check for possibility of overflow, in which case entry bb count
221 is large enough to do the division first without losing much
223 if (ENTRY_BLOCK_PTR_FOR_FN (fun
)->count
< REG_BR_PROB_BASE
*
226 gcov_type scaled_count
227 = frequency
* ENTRY_BLOCK_PTR_FOR_FN (fun
)->count
*
228 unlikely_count_fraction
;
229 computed_count
= RDIV (scaled_count
,
230 ENTRY_BLOCK_PTR_FOR_FN (fun
)->frequency
);
234 computed_count
= RDIV (ENTRY_BLOCK_PTR_FOR_FN (fun
)->count
,
235 ENTRY_BLOCK_PTR_FOR_FN (fun
)->frequency
);
236 computed_count
*= frequency
* unlikely_count_fraction
;
238 if (computed_count
>= profile_info
->runs
)
243 if ((!profile_info
|| !(opt_for_fn (fun
->decl
, flag_branch_probabilities
)))
244 && (cgraph_node::get (fun
->decl
)->frequency
245 == NODE_FREQUENCY_UNLIKELY_EXECUTED
))
251 /* Return true in case BB is probably never executed. */
254 probably_never_executed_bb_p (struct function
*fun
, const_basic_block bb
)
256 return probably_never_executed (fun
, bb
->count
, bb
->frequency
);
260 /* Return true in case edge E is probably never executed. */
263 probably_never_executed_edge_p (struct function
*fun
, edge e
)
265 return probably_never_executed (fun
, e
->count
, EDGE_FREQUENCY (e
));
268 /* Return true when current function should always be optimized for size. */
271 optimize_function_for_size_p (struct function
*fun
)
273 if (!fun
|| !fun
->decl
)
274 return optimize_size
;
275 cgraph_node
*n
= cgraph_node::get (fun
->decl
);
276 return n
&& n
->optimize_for_size_p ();
279 /* Return true when current function should always be optimized for speed. */
282 optimize_function_for_speed_p (struct function
*fun
)
284 return !optimize_function_for_size_p (fun
);
287 /* Return TRUE when BB should be optimized for size. */
290 optimize_bb_for_size_p (const_basic_block bb
)
292 return (optimize_function_for_size_p (cfun
)
293 || (bb
&& !maybe_hot_bb_p (cfun
, bb
)));
296 /* Return TRUE when BB should be optimized for speed. */
299 optimize_bb_for_speed_p (const_basic_block bb
)
301 return !optimize_bb_for_size_p (bb
);
304 /* Return TRUE when BB should be optimized for size. */
307 optimize_edge_for_size_p (edge e
)
309 return optimize_function_for_size_p (cfun
) || !maybe_hot_edge_p (e
);
312 /* Return TRUE when BB should be optimized for speed. */
315 optimize_edge_for_speed_p (edge e
)
317 return !optimize_edge_for_size_p (e
);
320 /* Return TRUE when BB should be optimized for size. */
323 optimize_insn_for_size_p (void)
325 return optimize_function_for_size_p (cfun
) || !crtl
->maybe_hot_insn_p
;
328 /* Return TRUE when BB should be optimized for speed. */
331 optimize_insn_for_speed_p (void)
333 return !optimize_insn_for_size_p ();
336 /* Return TRUE when LOOP should be optimized for size. */
339 optimize_loop_for_size_p (struct loop
*loop
)
341 return optimize_bb_for_size_p (loop
->header
);
344 /* Return TRUE when LOOP should be optimized for speed. */
347 optimize_loop_for_speed_p (struct loop
*loop
)
349 return optimize_bb_for_speed_p (loop
->header
);
352 /* Return TRUE when LOOP nest should be optimized for speed. */
355 optimize_loop_nest_for_speed_p (struct loop
*loop
)
357 struct loop
*l
= loop
;
358 if (optimize_loop_for_speed_p (loop
))
361 while (l
&& l
!= loop
)
363 if (optimize_loop_for_speed_p (l
))
371 while (l
!= loop
&& !l
->next
)
380 /* Return TRUE when LOOP nest should be optimized for size. */
383 optimize_loop_nest_for_size_p (struct loop
*loop
)
385 return !optimize_loop_nest_for_speed_p (loop
);
388 /* Return true when edge E is likely to be well predictable by branch
392 predictable_edge_p (edge e
)
394 if (profile_status_for_fn (cfun
) == PROFILE_ABSENT
)
397 <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME
) * REG_BR_PROB_BASE
/ 100)
398 || (REG_BR_PROB_BASE
- e
->probability
399 <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME
) * REG_BR_PROB_BASE
/ 100))
405 /* Set RTL expansion for BB profile. */
408 rtl_profile_for_bb (basic_block bb
)
410 crtl
->maybe_hot_insn_p
= maybe_hot_bb_p (cfun
, bb
);
413 /* Set RTL expansion for edge profile. */
416 rtl_profile_for_edge (edge e
)
418 crtl
->maybe_hot_insn_p
= maybe_hot_edge_p (e
);
421 /* Set RTL expansion to default mode (i.e. when profile info is not known). */
423 default_rtl_profile (void)
425 crtl
->maybe_hot_insn_p
= true;
428 /* Return true if the one of outgoing edges is already predicted by
432 rtl_predicted_by_p (const_basic_block bb
, enum br_predictor predictor
)
435 if (!INSN_P (BB_END (bb
)))
437 for (note
= REG_NOTES (BB_END (bb
)); note
; note
= XEXP (note
, 1))
438 if (REG_NOTE_KIND (note
) == REG_BR_PRED
439 && INTVAL (XEXP (XEXP (note
, 0), 0)) == (int)predictor
)
444 /* Structure representing predictions in tree level. */
446 struct edge_prediction
{
447 struct edge_prediction
*ep_next
;
449 enum br_predictor ep_predictor
;
453 /* This map contains for a basic block the list of predictions for the
456 static hash_map
<const_basic_block
, edge_prediction
*> *bb_predictions
;
458 /* Return true if the one of outgoing edges is already predicted by
462 gimple_predicted_by_p (const_basic_block bb
, enum br_predictor predictor
)
464 struct edge_prediction
*i
;
465 edge_prediction
**preds
= bb_predictions
->get (bb
);
470 for (i
= *preds
; i
; i
= i
->ep_next
)
471 if (i
->ep_predictor
== predictor
)
476 /* Return true when the probability of edge is reliable.
478 The profile guessing code is good at predicting branch outcome (ie.
479 taken/not taken), that is predicted right slightly over 75% of time.
480 It is however notoriously poor on predicting the probability itself.
481 In general the profile appear a lot flatter (with probabilities closer
482 to 50%) than the reality so it is bad idea to use it to drive optimization
483 such as those disabling dynamic branch prediction for well predictable
486 There are two exceptions - edges leading to noreturn edges and edges
487 predicted by number of iterations heuristics are predicted well. This macro
488 should be able to distinguish those, but at the moment it simply check for
489 noreturn heuristic that is only one giving probability over 99% or bellow
490 1%. In future we might want to propagate reliability information across the
491 CFG if we find this information useful on multiple places. */
493 probability_reliable_p (int prob
)
495 return (profile_status_for_fn (cfun
) == PROFILE_READ
496 || (profile_status_for_fn (cfun
) == PROFILE_GUESSED
497 && (prob
<= HITRATE (1) || prob
>= HITRATE (99))));
500 /* Same predicate as above, working on edges. */
502 edge_probability_reliable_p (const_edge e
)
504 return probability_reliable_p (e
->probability
);
507 /* Same predicate as edge_probability_reliable_p, working on notes. */
509 br_prob_note_reliable_p (const_rtx note
)
511 gcc_assert (REG_NOTE_KIND (note
) == REG_BR_PROB
);
512 return probability_reliable_p (XINT (note
, 0));
516 predict_insn (rtx_insn
*insn
, enum br_predictor predictor
, int probability
)
518 gcc_assert (any_condjump_p (insn
));
519 if (!flag_guess_branch_prob
)
522 add_reg_note (insn
, REG_BR_PRED
,
523 gen_rtx_CONCAT (VOIDmode
,
524 GEN_INT ((int) predictor
),
525 GEN_INT ((int) probability
)));
528 /* Predict insn by given predictor. */
531 predict_insn_def (rtx_insn
*insn
, enum br_predictor predictor
,
532 enum prediction taken
)
534 int probability
= predictor_info
[(int) predictor
].hitrate
;
537 probability
= REG_BR_PROB_BASE
- probability
;
539 predict_insn (insn
, predictor
, probability
);
542 /* Predict edge E with given probability if possible. */
545 rtl_predict_edge (edge e
, enum br_predictor predictor
, int probability
)
548 last_insn
= BB_END (e
->src
);
550 /* We can store the branch prediction information only about
551 conditional jumps. */
552 if (!any_condjump_p (last_insn
))
555 /* We always store probability of branching. */
556 if (e
->flags
& EDGE_FALLTHRU
)
557 probability
= REG_BR_PROB_BASE
- probability
;
559 predict_insn (last_insn
, predictor
, probability
);
562 /* Predict edge E with the given PROBABILITY. */
564 gimple_predict_edge (edge e
, enum br_predictor predictor
, int probability
)
566 gcc_assert (profile_status_for_fn (cfun
) != PROFILE_GUESSED
);
567 if ((e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
) && EDGE_COUNT (e
->src
->succs
) >
569 && flag_guess_branch_prob
&& optimize
)
571 struct edge_prediction
*i
= XNEW (struct edge_prediction
);
572 edge_prediction
*&preds
= bb_predictions
->get_or_insert (e
->src
);
576 i
->ep_probability
= probability
;
577 i
->ep_predictor
= predictor
;
582 /* Remove all predictions on given basic block that are attached
585 remove_predictions_associated_with_edge (edge e
)
590 edge_prediction
**preds
= bb_predictions
->get (e
->src
);
594 struct edge_prediction
**prediction
= preds
;
595 struct edge_prediction
*next
;
599 if ((*prediction
)->ep_edge
== e
)
601 next
= (*prediction
)->ep_next
;
606 prediction
= &((*prediction
)->ep_next
);
611 /* Clears the list of predictions stored for BB. */
614 clear_bb_predictions (basic_block bb
)
616 edge_prediction
**preds
= bb_predictions
->get (bb
);
617 struct edge_prediction
*pred
, *next
;
622 for (pred
= *preds
; pred
; pred
= next
)
624 next
= pred
->ep_next
;
630 /* Return true when we can store prediction on insn INSN.
631 At the moment we represent predictions only on conditional
632 jumps, not at computed jump or other complicated cases. */
634 can_predict_insn_p (const rtx_insn
*insn
)
636 return (JUMP_P (insn
)
637 && any_condjump_p (insn
)
638 && EDGE_COUNT (BLOCK_FOR_INSN (insn
)->succs
) >= 2);
641 /* Predict edge E by given predictor if possible. */
644 predict_edge_def (edge e
, enum br_predictor predictor
,
645 enum prediction taken
)
647 int probability
= predictor_info
[(int) predictor
].hitrate
;
650 probability
= REG_BR_PROB_BASE
- probability
;
652 predict_edge (e
, predictor
, probability
);
655 /* Invert all branch predictions or probability notes in the INSN. This needs
656 to be done each time we invert the condition used by the jump. */
659 invert_br_probabilities (rtx insn
)
663 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
664 if (REG_NOTE_KIND (note
) == REG_BR_PROB
)
665 XINT (note
, 0) = REG_BR_PROB_BASE
- XINT (note
, 0);
666 else if (REG_NOTE_KIND (note
) == REG_BR_PRED
)
667 XEXP (XEXP (note
, 0), 1)
668 = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (XEXP (note
, 0), 1)));
671 /* Dump information about the branch prediction to the output file. */
674 dump_prediction (FILE *file
, enum br_predictor predictor
, int probability
,
675 basic_block bb
, int used
)
683 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
684 if (! (e
->flags
& EDGE_FALLTHRU
))
687 fprintf (file
, " %s heuristics%s: %.1f%%",
688 predictor_info
[predictor
].name
,
689 used
? "" : " (ignored)", probability
* 100.0 / REG_BR_PROB_BASE
);
693 fprintf (file
, " exec %" PRId64
, bb
->count
);
696 fprintf (file
, " hit %" PRId64
, e
->count
);
697 fprintf (file
, " (%.1f%%)", e
->count
* 100.0 / bb
->count
);
701 fprintf (file
, "\n");
704 /* We can not predict the probabilities of outgoing edges of bb. Set them
705 evenly and hope for the best. */
707 set_even_probabilities (basic_block bb
)
713 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
714 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
716 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
717 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
718 e
->probability
= (REG_BR_PROB_BASE
+ nedges
/ 2) / nedges
;
723 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
724 note if not already present. Remove now useless REG_BR_PRED notes. */
727 combine_predictions_for_insn (rtx_insn
*insn
, basic_block bb
)
732 int best_probability
= PROB_EVEN
;
733 enum br_predictor best_predictor
= END_PREDICTORS
;
734 int combined_probability
= REG_BR_PROB_BASE
/ 2;
736 bool first_match
= false;
739 if (!can_predict_insn_p (insn
))
741 set_even_probabilities (bb
);
745 prob_note
= find_reg_note (insn
, REG_BR_PROB
, 0);
746 pnote
= ®_NOTES (insn
);
748 fprintf (dump_file
, "Predictions for insn %i bb %i\n", INSN_UID (insn
),
751 /* We implement "first match" heuristics and use probability guessed
752 by predictor with smallest index. */
753 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
754 if (REG_NOTE_KIND (note
) == REG_BR_PRED
)
756 enum br_predictor predictor
= ((enum br_predictor
)
757 INTVAL (XEXP (XEXP (note
, 0), 0)));
758 int probability
= INTVAL (XEXP (XEXP (note
, 0), 1));
761 if (best_predictor
> predictor
)
762 best_probability
= probability
, best_predictor
= predictor
;
764 d
= (combined_probability
* probability
765 + (REG_BR_PROB_BASE
- combined_probability
)
766 * (REG_BR_PROB_BASE
- probability
));
768 /* Use FP math to avoid overflows of 32bit integers. */
770 /* If one probability is 0% and one 100%, avoid division by zero. */
771 combined_probability
= REG_BR_PROB_BASE
/ 2;
773 combined_probability
= (((double) combined_probability
) * probability
774 * REG_BR_PROB_BASE
/ d
+ 0.5);
777 /* Decide which heuristic to use. In case we didn't match anything,
778 use no_prediction heuristic, in case we did match, use either
779 first match or Dempster-Shaffer theory depending on the flags. */
781 if (predictor_info
[best_predictor
].flags
& PRED_FLAG_FIRST_MATCH
)
785 dump_prediction (dump_file
, PRED_NO_PREDICTION
,
786 combined_probability
, bb
, true);
789 dump_prediction (dump_file
, PRED_DS_THEORY
, combined_probability
,
791 dump_prediction (dump_file
, PRED_FIRST_MATCH
, best_probability
,
796 combined_probability
= best_probability
;
797 dump_prediction (dump_file
, PRED_COMBINED
, combined_probability
, bb
, true);
801 if (REG_NOTE_KIND (*pnote
) == REG_BR_PRED
)
803 enum br_predictor predictor
= ((enum br_predictor
)
804 INTVAL (XEXP (XEXP (*pnote
, 0), 0)));
805 int probability
= INTVAL (XEXP (XEXP (*pnote
, 0), 1));
807 dump_prediction (dump_file
, predictor
, probability
, bb
,
808 !first_match
|| best_predictor
== predictor
);
809 *pnote
= XEXP (*pnote
, 1);
812 pnote
= &XEXP (*pnote
, 1);
817 add_int_reg_note (insn
, REG_BR_PROB
, combined_probability
);
819 /* Save the prediction into CFG in case we are seeing non-degenerated
821 if (!single_succ_p (bb
))
823 BRANCH_EDGE (bb
)->probability
= combined_probability
;
824 FALLTHRU_EDGE (bb
)->probability
825 = REG_BR_PROB_BASE
- combined_probability
;
828 else if (!single_succ_p (bb
))
830 int prob
= XINT (prob_note
, 0);
832 BRANCH_EDGE (bb
)->probability
= prob
;
833 FALLTHRU_EDGE (bb
)->probability
= REG_BR_PROB_BASE
- prob
;
836 single_succ_edge (bb
)->probability
= REG_BR_PROB_BASE
;
839 /* Combine predictions into single probability and store them into CFG.
840 Remove now useless prediction entries. */
843 combine_predictions_for_bb (basic_block bb
)
845 int best_probability
= PROB_EVEN
;
846 enum br_predictor best_predictor
= END_PREDICTORS
;
847 int combined_probability
= REG_BR_PROB_BASE
/ 2;
849 bool first_match
= false;
851 struct edge_prediction
*pred
;
853 edge e
, first
= NULL
, second
= NULL
;
856 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
857 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
860 if (first
&& !second
)
866 /* When there is no successor or only one choice, prediction is easy.
868 We are lazy for now and predict only basic blocks with two outgoing
869 edges. It is possible to predict generic case too, but we have to
870 ignore first match heuristics and do more involved combining. Implement
875 set_even_probabilities (bb
);
876 clear_bb_predictions (bb
);
878 fprintf (dump_file
, "%i edges in bb %i predicted to even probabilities\n",
884 fprintf (dump_file
, "Predictions for bb %i\n", bb
->index
);
886 edge_prediction
**preds
= bb_predictions
->get (bb
);
889 /* We implement "first match" heuristics and use probability guessed
890 by predictor with smallest index. */
891 for (pred
= *preds
; pred
; pred
= pred
->ep_next
)
893 enum br_predictor predictor
= pred
->ep_predictor
;
894 int probability
= pred
->ep_probability
;
896 if (pred
->ep_edge
!= first
)
897 probability
= REG_BR_PROB_BASE
- probability
;
900 /* First match heuristics would be widly confused if we predicted
902 if (best_predictor
> predictor
)
904 struct edge_prediction
*pred2
;
905 int prob
= probability
;
907 for (pred2
= (struct edge_prediction
*) *preds
;
908 pred2
; pred2
= pred2
->ep_next
)
909 if (pred2
!= pred
&& pred2
->ep_predictor
== pred
->ep_predictor
)
911 int probability2
= pred
->ep_probability
;
913 if (pred2
->ep_edge
!= first
)
914 probability2
= REG_BR_PROB_BASE
- probability2
;
916 if ((probability
< REG_BR_PROB_BASE
/ 2) !=
917 (probability2
< REG_BR_PROB_BASE
/ 2))
920 /* If the same predictor later gave better result, go for it! */
921 if ((probability
>= REG_BR_PROB_BASE
/ 2 && (probability2
> probability
))
922 || (probability
<= REG_BR_PROB_BASE
/ 2 && (probability2
< probability
)))
926 best_probability
= prob
, best_predictor
= predictor
;
929 d
= (combined_probability
* probability
930 + (REG_BR_PROB_BASE
- combined_probability
)
931 * (REG_BR_PROB_BASE
- probability
));
933 /* Use FP math to avoid overflows of 32bit integers. */
935 /* If one probability is 0% and one 100%, avoid division by zero. */
936 combined_probability
= REG_BR_PROB_BASE
/ 2;
938 combined_probability
= (((double) combined_probability
)
940 * REG_BR_PROB_BASE
/ d
+ 0.5);
944 /* Decide which heuristic to use. In case we didn't match anything,
945 use no_prediction heuristic, in case we did match, use either
946 first match or Dempster-Shaffer theory depending on the flags. */
948 if (predictor_info
[best_predictor
].flags
& PRED_FLAG_FIRST_MATCH
)
952 dump_prediction (dump_file
, PRED_NO_PREDICTION
, combined_probability
, bb
, true);
955 dump_prediction (dump_file
, PRED_DS_THEORY
, combined_probability
, bb
,
957 dump_prediction (dump_file
, PRED_FIRST_MATCH
, best_probability
, bb
,
962 combined_probability
= best_probability
;
963 dump_prediction (dump_file
, PRED_COMBINED
, combined_probability
, bb
, true);
967 for (pred
= (struct edge_prediction
*) *preds
; pred
; pred
= pred
->ep_next
)
969 enum br_predictor predictor
= pred
->ep_predictor
;
970 int probability
= pred
->ep_probability
;
972 if (pred
->ep_edge
!= EDGE_SUCC (bb
, 0))
973 probability
= REG_BR_PROB_BASE
- probability
;
974 dump_prediction (dump_file
, predictor
, probability
, bb
,
975 !first_match
|| best_predictor
== predictor
);
978 clear_bb_predictions (bb
);
982 first
->probability
= combined_probability
;
983 second
->probability
= REG_BR_PROB_BASE
- combined_probability
;
987 /* Check if T1 and T2 satisfy the IV_COMPARE condition.
988 Return the SSA_NAME if the condition satisfies, NULL otherwise.
990 T1 and T2 should be one of the following cases:
991 1. T1 is SSA_NAME, T2 is NULL
992 2. T1 is SSA_NAME, T2 is INTEGER_CST between [-4, 4]
993 3. T2 is SSA_NAME, T1 is INTEGER_CST between [-4, 4] */
996 strips_small_constant (tree t1
, tree t2
)
1003 else if (TREE_CODE (t1
) == SSA_NAME
)
1005 else if (tree_fits_shwi_p (t1
))
1006 value
= tree_to_shwi (t1
);
1012 else if (tree_fits_shwi_p (t2
))
1013 value
= tree_to_shwi (t2
);
1014 else if (TREE_CODE (t2
) == SSA_NAME
)
1022 if (value
<= 4 && value
>= -4)
1028 /* Return the SSA_NAME in T or T's operands.
1029 Return NULL if SSA_NAME cannot be found. */
1032 get_base_value (tree t
)
1034 if (TREE_CODE (t
) == SSA_NAME
)
1037 if (!BINARY_CLASS_P (t
))
1040 switch (TREE_OPERAND_LENGTH (t
))
1043 return strips_small_constant (TREE_OPERAND (t
, 0), NULL
);
1045 return strips_small_constant (TREE_OPERAND (t
, 0),
1046 TREE_OPERAND (t
, 1));
1052 /* Check the compare STMT in LOOP. If it compares an induction
1053 variable to a loop invariant, return true, and save
1054 LOOP_INVARIANT, COMPARE_CODE and LOOP_STEP.
1055 Otherwise return false and set LOOP_INVAIANT to NULL. */
1058 is_comparison_with_loop_invariant_p (gcond
*stmt
, struct loop
*loop
,
1059 tree
*loop_invariant
,
1060 enum tree_code
*compare_code
,
1064 tree op0
, op1
, bound
, base
;
1066 enum tree_code code
;
1069 code
= gimple_cond_code (stmt
);
1070 *loop_invariant
= NULL
;
1086 op0
= gimple_cond_lhs (stmt
);
1087 op1
= gimple_cond_rhs (stmt
);
1089 if ((TREE_CODE (op0
) != SSA_NAME
&& TREE_CODE (op0
) != INTEGER_CST
)
1090 || (TREE_CODE (op1
) != SSA_NAME
&& TREE_CODE (op1
) != INTEGER_CST
))
1092 if (!simple_iv (loop
, loop_containing_stmt (stmt
), op0
, &iv0
, true))
1094 if (!simple_iv (loop
, loop_containing_stmt (stmt
), op1
, &iv1
, true))
1096 if (TREE_CODE (iv0
.step
) != INTEGER_CST
1097 || TREE_CODE (iv1
.step
) != INTEGER_CST
)
1099 if ((integer_zerop (iv0
.step
) && integer_zerop (iv1
.step
))
1100 || (!integer_zerop (iv0
.step
) && !integer_zerop (iv1
.step
)))
1103 if (integer_zerop (iv0
.step
))
1105 if (code
!= NE_EXPR
&& code
!= EQ_EXPR
)
1106 code
= invert_tree_comparison (code
, false);
1109 if (tree_fits_shwi_p (iv1
.step
))
1118 if (tree_fits_shwi_p (iv0
.step
))
1124 if (TREE_CODE (bound
) != INTEGER_CST
)
1125 bound
= get_base_value (bound
);
1128 if (TREE_CODE (base
) != INTEGER_CST
)
1129 base
= get_base_value (base
);
1133 *loop_invariant
= bound
;
1134 *compare_code
= code
;
1136 *loop_iv_base
= base
;
1140 /* Compare two SSA_NAMEs: returns TRUE if T1 and T2 are value coherent. */
1143 expr_coherent_p (tree t1
, tree t2
)
1146 tree ssa_name_1
= NULL
;
1147 tree ssa_name_2
= NULL
;
1149 gcc_assert (TREE_CODE (t1
) == SSA_NAME
|| TREE_CODE (t1
) == INTEGER_CST
);
1150 gcc_assert (TREE_CODE (t2
) == SSA_NAME
|| TREE_CODE (t2
) == INTEGER_CST
);
1155 if (TREE_CODE (t1
) == INTEGER_CST
&& TREE_CODE (t2
) == INTEGER_CST
)
1157 if (TREE_CODE (t1
) == INTEGER_CST
|| TREE_CODE (t2
) == INTEGER_CST
)
1160 /* Check to see if t1 is expressed/defined with t2. */
1161 stmt
= SSA_NAME_DEF_STMT (t1
);
1162 gcc_assert (stmt
!= NULL
);
1163 if (is_gimple_assign (stmt
))
1165 ssa_name_1
= SINGLE_SSA_TREE_OPERAND (stmt
, SSA_OP_USE
);
1166 if (ssa_name_1
&& ssa_name_1
== t2
)
1170 /* Check to see if t2 is expressed/defined with t1. */
1171 stmt
= SSA_NAME_DEF_STMT (t2
);
1172 gcc_assert (stmt
!= NULL
);
1173 if (is_gimple_assign (stmt
))
1175 ssa_name_2
= SINGLE_SSA_TREE_OPERAND (stmt
, SSA_OP_USE
);
1176 if (ssa_name_2
&& ssa_name_2
== t1
)
1180 /* Compare if t1 and t2's def_stmts are identical. */
1181 if (ssa_name_2
!= NULL
&& ssa_name_1
== ssa_name_2
)
1187 /* Predict branch probability of BB when BB contains a branch that compares
1188 an induction variable in LOOP with LOOP_IV_BASE_VAR to LOOP_BOUND_VAR. The
1189 loop exit is compared using LOOP_BOUND_CODE, with step of LOOP_BOUND_STEP.
1192 for (int i = 0; i < bound; i++) {
1199 In this loop, we will predict the branch inside the loop to be taken. */
1202 predict_iv_comparison (struct loop
*loop
, basic_block bb
,
1203 tree loop_bound_var
,
1204 tree loop_iv_base_var
,
1205 enum tree_code loop_bound_code
,
1206 int loop_bound_step
)
1209 tree compare_var
, compare_base
;
1210 enum tree_code compare_code
;
1211 tree compare_step_var
;
1215 if (predicted_by_p (bb
, PRED_LOOP_ITERATIONS_GUESSED
)
1216 || predicted_by_p (bb
, PRED_LOOP_ITERATIONS
)
1217 || predicted_by_p (bb
, PRED_LOOP_EXIT
))
1220 stmt
= last_stmt (bb
);
1221 if (!stmt
|| gimple_code (stmt
) != GIMPLE_COND
)
1223 if (!is_comparison_with_loop_invariant_p (as_a
<gcond
*> (stmt
),
1230 /* Find the taken edge. */
1231 FOR_EACH_EDGE (then_edge
, ei
, bb
->succs
)
1232 if (then_edge
->flags
& EDGE_TRUE_VALUE
)
1235 /* When comparing an IV to a loop invariant, NE is more likely to be
1236 taken while EQ is more likely to be not-taken. */
1237 if (compare_code
== NE_EXPR
)
1239 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, TAKEN
);
1242 else if (compare_code
== EQ_EXPR
)
1244 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, NOT_TAKEN
);
1248 if (!expr_coherent_p (loop_iv_base_var
, compare_base
))
1251 /* If loop bound, base and compare bound are all constants, we can
1252 calculate the probability directly. */
1253 if (tree_fits_shwi_p (loop_bound_var
)
1254 && tree_fits_shwi_p (compare_var
)
1255 && tree_fits_shwi_p (compare_base
))
1258 bool overflow
, overall_overflow
= false;
1259 widest_int compare_count
, tem
;
1261 /* (loop_bound - base) / compare_step */
1262 tem
= wi::sub (wi::to_widest (loop_bound_var
),
1263 wi::to_widest (compare_base
), SIGNED
, &overflow
);
1264 overall_overflow
|= overflow
;
1265 widest_int loop_count
= wi::div_trunc (tem
,
1266 wi::to_widest (compare_step_var
),
1268 overall_overflow
|= overflow
;
1270 if (!wi::neg_p (wi::to_widest (compare_step_var
))
1271 ^ (compare_code
== LT_EXPR
|| compare_code
== LE_EXPR
))
1273 /* (loop_bound - compare_bound) / compare_step */
1274 tem
= wi::sub (wi::to_widest (loop_bound_var
),
1275 wi::to_widest (compare_var
), SIGNED
, &overflow
);
1276 overall_overflow
|= overflow
;
1277 compare_count
= wi::div_trunc (tem
, wi::to_widest (compare_step_var
),
1279 overall_overflow
|= overflow
;
1283 /* (compare_bound - base) / compare_step */
1284 tem
= wi::sub (wi::to_widest (compare_var
),
1285 wi::to_widest (compare_base
), SIGNED
, &overflow
);
1286 overall_overflow
|= overflow
;
1287 compare_count
= wi::div_trunc (tem
, wi::to_widest (compare_step_var
),
1289 overall_overflow
|= overflow
;
1291 if (compare_code
== LE_EXPR
|| compare_code
== GE_EXPR
)
1293 if (loop_bound_code
== LE_EXPR
|| loop_bound_code
== GE_EXPR
)
1295 if (wi::neg_p (compare_count
))
1297 if (wi::neg_p (loop_count
))
1299 if (loop_count
== 0)
1301 else if (wi::cmps (compare_count
, loop_count
) == 1)
1302 probability
= REG_BR_PROB_BASE
;
1305 tem
= compare_count
* REG_BR_PROB_BASE
;
1306 tem
= wi::udiv_trunc (tem
, loop_count
);
1307 probability
= tem
.to_uhwi ();
1310 if (!overall_overflow
)
1311 predict_edge (then_edge
, PRED_LOOP_IV_COMPARE
, probability
);
1316 if (expr_coherent_p (loop_bound_var
, compare_var
))
1318 if ((loop_bound_code
== LT_EXPR
|| loop_bound_code
== LE_EXPR
)
1319 && (compare_code
== LT_EXPR
|| compare_code
== LE_EXPR
))
1320 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, TAKEN
);
1321 else if ((loop_bound_code
== GT_EXPR
|| loop_bound_code
== GE_EXPR
)
1322 && (compare_code
== GT_EXPR
|| compare_code
== GE_EXPR
))
1323 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, TAKEN
);
1324 else if (loop_bound_code
== NE_EXPR
)
1326 /* If the loop backedge condition is "(i != bound)", we do
1327 the comparison based on the step of IV:
1328 * step < 0 : backedge condition is like (i > bound)
1329 * step > 0 : backedge condition is like (i < bound) */
1330 gcc_assert (loop_bound_step
!= 0);
1331 if (loop_bound_step
> 0
1332 && (compare_code
== LT_EXPR
1333 || compare_code
== LE_EXPR
))
1334 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, TAKEN
);
1335 else if (loop_bound_step
< 0
1336 && (compare_code
== GT_EXPR
1337 || compare_code
== GE_EXPR
))
1338 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, TAKEN
);
1340 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, NOT_TAKEN
);
1343 /* The branch is predicted not-taken if loop_bound_code is
1344 opposite with compare_code. */
1345 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, NOT_TAKEN
);
1347 else if (expr_coherent_p (loop_iv_base_var
, compare_var
))
1350 for (i = s; i < h; i++)
1352 The branch should be predicted taken. */
1353 if (loop_bound_step
> 0
1354 && (compare_code
== GT_EXPR
|| compare_code
== GE_EXPR
))
1355 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, TAKEN
);
1356 else if (loop_bound_step
< 0
1357 && (compare_code
== LT_EXPR
|| compare_code
== LE_EXPR
))
1358 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, TAKEN
);
1360 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, NOT_TAKEN
);
1364 /* Predict for extra loop exits that will lead to EXIT_EDGE. The extra loop
1365 exits are resulted from short-circuit conditions that will generate an
1368 if (foo() || global > 10)
1371 This will be translated into:
1376 if foo() goto BB6 else goto BB5
1378 if global > 10 goto BB6 else goto BB7
1382 iftmp = (PHI 0(BB5), 1(BB6))
1383 if iftmp == 1 goto BB8 else goto BB3
1385 outside of the loop...
1387 The edge BB7->BB8 is loop exit because BB8 is outside of the loop.
1388 From the dataflow, we can infer that BB4->BB6 and BB5->BB6 are also loop
1389 exits. This function takes BB7->BB8 as input, and finds out the extra loop
1390 exits to predict them using PRED_LOOP_EXIT. */
1393 predict_extra_loop_exits (edge exit_edge
)
1396 bool check_value_one
;
1397 gimple lhs_def_stmt
;
1399 tree cmp_rhs
, cmp_lhs
;
1403 last
= last_stmt (exit_edge
->src
);
1406 cmp_stmt
= dyn_cast
<gcond
*> (last
);
1410 cmp_rhs
= gimple_cond_rhs (cmp_stmt
);
1411 cmp_lhs
= gimple_cond_lhs (cmp_stmt
);
1412 if (!TREE_CONSTANT (cmp_rhs
)
1413 || !(integer_zerop (cmp_rhs
) || integer_onep (cmp_rhs
)))
1415 if (TREE_CODE (cmp_lhs
) != SSA_NAME
)
1418 /* If check_value_one is true, only the phi_args with value '1' will lead
1419 to loop exit. Otherwise, only the phi_args with value '0' will lead to
1421 check_value_one
= (((integer_onep (cmp_rhs
))
1422 ^ (gimple_cond_code (cmp_stmt
) == EQ_EXPR
))
1423 ^ ((exit_edge
->flags
& EDGE_TRUE_VALUE
) != 0));
1425 lhs_def_stmt
= SSA_NAME_DEF_STMT (cmp_lhs
);
1429 phi_stmt
= dyn_cast
<gphi
*> (lhs_def_stmt
);
1433 for (i
= 0; i
< gimple_phi_num_args (phi_stmt
); i
++)
1437 tree val
= gimple_phi_arg_def (phi_stmt
, i
);
1438 edge e
= gimple_phi_arg_edge (phi_stmt
, i
);
1440 if (!TREE_CONSTANT (val
) || !(integer_zerop (val
) || integer_onep (val
)))
1442 if ((check_value_one
^ integer_onep (val
)) == 1)
1444 if (EDGE_COUNT (e
->src
->succs
) != 1)
1446 predict_paths_leading_to_edge (e
, PRED_LOOP_EXIT
, NOT_TAKEN
);
1450 FOR_EACH_EDGE (e1
, ei
, e
->src
->preds
)
1451 predict_paths_leading_to_edge (e1
, PRED_LOOP_EXIT
, NOT_TAKEN
);
1455 /* Predict edge probabilities by exploiting loop structure. */
1458 predict_loops (void)
1462 /* Try to predict out blocks in a loop that are not part of a
1464 FOR_EACH_LOOP (loop
, 0)
1466 basic_block bb
, *bbs
;
1467 unsigned j
, n_exits
;
1469 struct tree_niter_desc niter_desc
;
1471 struct nb_iter_bound
*nb_iter
;
1472 enum tree_code loop_bound_code
= ERROR_MARK
;
1473 tree loop_bound_step
= NULL
;
1474 tree loop_bound_var
= NULL
;
1475 tree loop_iv_base
= NULL
;
1478 exits
= get_loop_exit_edges (loop
);
1479 n_exits
= exits
.length ();
1486 FOR_EACH_VEC_ELT (exits
, j
, ex
)
1489 HOST_WIDE_INT nitercst
;
1490 int max
= PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS
);
1492 enum br_predictor predictor
;
1494 predict_extra_loop_exits (ex
);
1496 if (number_of_iterations_exit (loop
, ex
, &niter_desc
, false, false))
1497 niter
= niter_desc
.niter
;
1498 if (!niter
|| TREE_CODE (niter_desc
.niter
) != INTEGER_CST
)
1499 niter
= loop_niter_by_eval (loop
, ex
);
1501 if (TREE_CODE (niter
) == INTEGER_CST
)
1503 if (tree_fits_uhwi_p (niter
)
1505 && compare_tree_int (niter
, max
- 1) == -1)
1506 nitercst
= tree_to_uhwi (niter
) + 1;
1509 predictor
= PRED_LOOP_ITERATIONS
;
1511 /* If we have just one exit and we can derive some information about
1512 the number of iterations of the loop from the statements inside
1513 the loop, use it to predict this exit. */
1514 else if (n_exits
== 1)
1516 nitercst
= estimated_stmt_executions_int (loop
);
1522 predictor
= PRED_LOOP_ITERATIONS_GUESSED
;
1527 /* If the prediction for number of iterations is zero, do not
1528 predict the exit edges. */
1532 probability
= ((REG_BR_PROB_BASE
+ nitercst
/ 2) / nitercst
);
1533 predict_edge (ex
, predictor
, probability
);
1537 /* Find information about loop bound variables. */
1538 for (nb_iter
= loop
->bounds
; nb_iter
;
1539 nb_iter
= nb_iter
->next
)
1541 && gimple_code (nb_iter
->stmt
) == GIMPLE_COND
)
1543 stmt
= as_a
<gcond
*> (nb_iter
->stmt
);
1546 if (!stmt
&& last_stmt (loop
->header
)
1547 && gimple_code (last_stmt (loop
->header
)) == GIMPLE_COND
)
1548 stmt
= as_a
<gcond
*> (last_stmt (loop
->header
));
1550 is_comparison_with_loop_invariant_p (stmt
, loop
,
1556 bbs
= get_loop_body (loop
);
1558 for (j
= 0; j
< loop
->num_nodes
; j
++)
1560 int header_found
= 0;
1566 /* Bypass loop heuristics on continue statement. These
1567 statements construct loops via "non-loop" constructs
1568 in the source language and are better to be handled
1570 if (predicted_by_p (bb
, PRED_CONTINUE
))
1573 /* Loop branch heuristics - predict an edge back to a
1574 loop's head as taken. */
1575 if (bb
== loop
->latch
)
1577 e
= find_edge (loop
->latch
, loop
->header
);
1581 predict_edge_def (e
, PRED_LOOP_BRANCH
, TAKEN
);
1585 /* Loop exit heuristics - predict an edge exiting the loop if the
1586 conditional has no loop header successors as not taken. */
1588 /* If we already used more reliable loop exit predictors, do not
1589 bother with PRED_LOOP_EXIT. */
1590 && !predicted_by_p (bb
, PRED_LOOP_ITERATIONS_GUESSED
)
1591 && !predicted_by_p (bb
, PRED_LOOP_ITERATIONS
))
1593 /* For loop with many exits we don't want to predict all exits
1594 with the pretty large probability, because if all exits are
1595 considered in row, the loop would be predicted to iterate
1596 almost never. The code to divide probability by number of
1597 exits is very rough. It should compute the number of exits
1598 taken in each patch through function (not the overall number
1599 of exits that might be a lot higher for loops with wide switch
1600 statements in them) and compute n-th square root.
1602 We limit the minimal probability by 2% to avoid
1603 EDGE_PROBABILITY_RELIABLE from trusting the branch prediction
1604 as this was causing regression in perl benchmark containing such
1607 int probability
= ((REG_BR_PROB_BASE
1608 - predictor_info
[(int) PRED_LOOP_EXIT
].hitrate
)
1610 if (probability
< HITRATE (2))
1611 probability
= HITRATE (2);
1612 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1613 if (e
->dest
->index
< NUM_FIXED_BLOCKS
1614 || !flow_bb_inside_loop_p (loop
, e
->dest
))
1615 predict_edge (e
, PRED_LOOP_EXIT
, probability
);
1618 predict_iv_comparison (loop
, bb
, loop_bound_var
, loop_iv_base
,
1620 tree_to_shwi (loop_bound_step
));
1623 /* Free basic blocks from get_loop_body. */
1628 /* Attempt to predict probabilities of BB outgoing edges using local
1631 bb_estimate_probability_locally (basic_block bb
)
1633 rtx_insn
*last_insn
= BB_END (bb
);
1636 if (! can_predict_insn_p (last_insn
))
1638 cond
= get_condition (last_insn
, NULL
, false, false);
1642 /* Try "pointer heuristic."
1643 A comparison ptr == 0 is predicted as false.
1644 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1645 if (COMPARISON_P (cond
)
1646 && ((REG_P (XEXP (cond
, 0)) && REG_POINTER (XEXP (cond
, 0)))
1647 || (REG_P (XEXP (cond
, 1)) && REG_POINTER (XEXP (cond
, 1)))))
1649 if (GET_CODE (cond
) == EQ
)
1650 predict_insn_def (last_insn
, PRED_POINTER
, NOT_TAKEN
);
1651 else if (GET_CODE (cond
) == NE
)
1652 predict_insn_def (last_insn
, PRED_POINTER
, TAKEN
);
1656 /* Try "opcode heuristic."
1657 EQ tests are usually false and NE tests are usually true. Also,
1658 most quantities are positive, so we can make the appropriate guesses
1659 about signed comparisons against zero. */
1660 switch (GET_CODE (cond
))
1663 /* Unconditional branch. */
1664 predict_insn_def (last_insn
, PRED_UNCONDITIONAL
,
1665 cond
== const0_rtx
? NOT_TAKEN
: TAKEN
);
1670 /* Floating point comparisons appears to behave in a very
1671 unpredictable way because of special role of = tests in
1673 if (FLOAT_MODE_P (GET_MODE (XEXP (cond
, 0))))
1675 /* Comparisons with 0 are often used for booleans and there is
1676 nothing useful to predict about them. */
1677 else if (XEXP (cond
, 1) == const0_rtx
1678 || XEXP (cond
, 0) == const0_rtx
)
1681 predict_insn_def (last_insn
, PRED_OPCODE_NONEQUAL
, NOT_TAKEN
);
1686 /* Floating point comparisons appears to behave in a very
1687 unpredictable way because of special role of = tests in
1689 if (FLOAT_MODE_P (GET_MODE (XEXP (cond
, 0))))
1691 /* Comparisons with 0 are often used for booleans and there is
1692 nothing useful to predict about them. */
1693 else if (XEXP (cond
, 1) == const0_rtx
1694 || XEXP (cond
, 0) == const0_rtx
)
1697 predict_insn_def (last_insn
, PRED_OPCODE_NONEQUAL
, TAKEN
);
1701 predict_insn_def (last_insn
, PRED_FPOPCODE
, TAKEN
);
1705 predict_insn_def (last_insn
, PRED_FPOPCODE
, NOT_TAKEN
);
1710 if (XEXP (cond
, 1) == const0_rtx
|| XEXP (cond
, 1) == const1_rtx
1711 || XEXP (cond
, 1) == constm1_rtx
)
1712 predict_insn_def (last_insn
, PRED_OPCODE_POSITIVE
, NOT_TAKEN
);
1717 if (XEXP (cond
, 1) == const0_rtx
|| XEXP (cond
, 1) == const1_rtx
1718 || XEXP (cond
, 1) == constm1_rtx
)
1719 predict_insn_def (last_insn
, PRED_OPCODE_POSITIVE
, TAKEN
);
1727 /* Set edge->probability for each successor edge of BB. */
1729 guess_outgoing_edge_probabilities (basic_block bb
)
1731 bb_estimate_probability_locally (bb
);
1732 combine_predictions_for_insn (BB_END (bb
), bb
);
1735 static tree
expr_expected_value (tree
, bitmap
, enum br_predictor
*predictor
);
1737 /* Helper function for expr_expected_value. */
1740 expr_expected_value_1 (tree type
, tree op0
, enum tree_code code
,
1741 tree op1
, bitmap visited
, enum br_predictor
*predictor
)
1746 *predictor
= PRED_UNCONDITIONAL
;
1748 if (get_gimple_rhs_class (code
) == GIMPLE_SINGLE_RHS
)
1750 if (TREE_CONSTANT (op0
))
1753 if (code
!= SSA_NAME
)
1756 def
= SSA_NAME_DEF_STMT (op0
);
1758 /* If we were already here, break the infinite cycle. */
1759 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (op0
)))
1762 if (gimple_code (def
) == GIMPLE_PHI
)
1764 /* All the arguments of the PHI node must have the same constant
1766 int i
, n
= gimple_phi_num_args (def
);
1767 tree val
= NULL
, new_val
;
1769 for (i
= 0; i
< n
; i
++)
1771 tree arg
= PHI_ARG_DEF (def
, i
);
1772 enum br_predictor predictor2
;
1774 /* If this PHI has itself as an argument, we cannot
1775 determine the string length of this argument. However,
1776 if we can find an expected constant value for the other
1777 PHI args then we can still be sure that this is
1778 likely a constant. So be optimistic and just
1779 continue with the next argument. */
1780 if (arg
== PHI_RESULT (def
))
1783 new_val
= expr_expected_value (arg
, visited
, &predictor2
);
1785 /* It is difficult to combine value predictors. Simply assume
1786 that later predictor is weaker and take its prediction. */
1787 if (predictor
&& *predictor
< predictor2
)
1788 *predictor
= predictor2
;
1793 else if (!operand_equal_p (val
, new_val
, false))
1798 if (is_gimple_assign (def
))
1800 if (gimple_assign_lhs (def
) != op0
)
1803 return expr_expected_value_1 (TREE_TYPE (gimple_assign_lhs (def
)),
1804 gimple_assign_rhs1 (def
),
1805 gimple_assign_rhs_code (def
),
1806 gimple_assign_rhs2 (def
),
1807 visited
, predictor
);
1810 if (is_gimple_call (def
))
1812 tree decl
= gimple_call_fndecl (def
);
1815 if (gimple_call_internal_p (def
)
1816 && gimple_call_internal_fn (def
) == IFN_BUILTIN_EXPECT
)
1818 gcc_assert (gimple_call_num_args (def
) == 3);
1819 tree val
= gimple_call_arg (def
, 0);
1820 if (TREE_CONSTANT (val
))
1824 tree val2
= gimple_call_arg (def
, 2);
1825 gcc_assert (TREE_CODE (val2
) == INTEGER_CST
1826 && tree_fits_uhwi_p (val2
)
1827 && tree_to_uhwi (val2
) < END_PREDICTORS
);
1828 *predictor
= (enum br_predictor
) tree_to_uhwi (val2
);
1830 return gimple_call_arg (def
, 1);
1834 if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
1835 switch (DECL_FUNCTION_CODE (decl
))
1837 case BUILT_IN_EXPECT
:
1840 if (gimple_call_num_args (def
) != 2)
1842 val
= gimple_call_arg (def
, 0);
1843 if (TREE_CONSTANT (val
))
1846 *predictor
= PRED_BUILTIN_EXPECT
;
1847 return gimple_call_arg (def
, 1);
1850 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N
:
1851 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
1852 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
1853 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
1854 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
1855 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
1856 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE
:
1857 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N
:
1858 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
1859 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
1860 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
1861 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
1862 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
1863 /* Assume that any given atomic operation has low contention,
1864 and thus the compare-and-swap operation succeeds. */
1866 *predictor
= PRED_COMPARE_AND_SWAP
;
1867 return boolean_true_node
;
1876 if (get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
)
1879 enum br_predictor predictor2
;
1880 op0
= expr_expected_value (op0
, visited
, predictor
);
1883 op1
= expr_expected_value (op1
, visited
, &predictor2
);
1884 if (predictor
&& *predictor
< predictor2
)
1885 *predictor
= predictor2
;
1888 res
= fold_build2 (code
, type
, op0
, op1
);
1889 if (TREE_CONSTANT (res
))
1893 if (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
)
1896 op0
= expr_expected_value (op0
, visited
, predictor
);
1899 res
= fold_build1 (code
, type
, op0
);
1900 if (TREE_CONSTANT (res
))
1907 /* Return constant EXPR will likely have at execution time, NULL if unknown.
1908 The function is used by builtin_expect branch predictor so the evidence
1909 must come from this construct and additional possible constant folding.
1911 We may want to implement more involved value guess (such as value range
1912 propagation based prediction), but such tricks shall go to new
1916 expr_expected_value (tree expr
, bitmap visited
,
1917 enum br_predictor
*predictor
)
1919 enum tree_code code
;
1922 if (TREE_CONSTANT (expr
))
1925 *predictor
= PRED_UNCONDITIONAL
;
1929 extract_ops_from_tree (expr
, &code
, &op0
, &op1
);
1930 return expr_expected_value_1 (TREE_TYPE (expr
),
1931 op0
, code
, op1
, visited
, predictor
);
1934 /* Predict using opcode of the last statement in basic block. */
1936 tree_predict_by_opcode (basic_block bb
)
1938 gimple stmt
= last_stmt (bb
);
1946 enum br_predictor predictor
;
1948 if (!stmt
|| gimple_code (stmt
) != GIMPLE_COND
)
1950 FOR_EACH_EDGE (then_edge
, ei
, bb
->succs
)
1951 if (then_edge
->flags
& EDGE_TRUE_VALUE
)
1953 op0
= gimple_cond_lhs (stmt
);
1954 op1
= gimple_cond_rhs (stmt
);
1955 cmp
= gimple_cond_code (stmt
);
1956 type
= TREE_TYPE (op0
);
1957 visited
= BITMAP_ALLOC (NULL
);
1958 val
= expr_expected_value_1 (boolean_type_node
, op0
, cmp
, op1
, visited
,
1960 BITMAP_FREE (visited
);
1961 if (val
&& TREE_CODE (val
) == INTEGER_CST
)
1963 if (predictor
== PRED_BUILTIN_EXPECT
)
1965 int percent
= PARAM_VALUE (BUILTIN_EXPECT_PROBABILITY
);
1967 gcc_assert (percent
>= 0 && percent
<= 100);
1968 if (integer_zerop (val
))
1969 percent
= 100 - percent
;
1970 predict_edge (then_edge
, PRED_BUILTIN_EXPECT
, HITRATE (percent
));
1973 predict_edge (then_edge
, predictor
,
1974 integer_zerop (val
) ? NOT_TAKEN
: TAKEN
);
1976 /* Try "pointer heuristic."
1977 A comparison ptr == 0 is predicted as false.
1978 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1979 if (POINTER_TYPE_P (type
))
1982 predict_edge_def (then_edge
, PRED_TREE_POINTER
, NOT_TAKEN
);
1983 else if (cmp
== NE_EXPR
)
1984 predict_edge_def (then_edge
, PRED_TREE_POINTER
, TAKEN
);
1988 /* Try "opcode heuristic."
1989 EQ tests are usually false and NE tests are usually true. Also,
1990 most quantities are positive, so we can make the appropriate guesses
1991 about signed comparisons against zero. */
1996 /* Floating point comparisons appears to behave in a very
1997 unpredictable way because of special role of = tests in
1999 if (FLOAT_TYPE_P (type
))
2001 /* Comparisons with 0 are often used for booleans and there is
2002 nothing useful to predict about them. */
2003 else if (integer_zerop (op0
) || integer_zerop (op1
))
2006 predict_edge_def (then_edge
, PRED_TREE_OPCODE_NONEQUAL
, NOT_TAKEN
);
2011 /* Floating point comparisons appears to behave in a very
2012 unpredictable way because of special role of = tests in
2014 if (FLOAT_TYPE_P (type
))
2016 /* Comparisons with 0 are often used for booleans and there is
2017 nothing useful to predict about them. */
2018 else if (integer_zerop (op0
)
2019 || integer_zerop (op1
))
2022 predict_edge_def (then_edge
, PRED_TREE_OPCODE_NONEQUAL
, TAKEN
);
2026 predict_edge_def (then_edge
, PRED_TREE_FPOPCODE
, TAKEN
);
2029 case UNORDERED_EXPR
:
2030 predict_edge_def (then_edge
, PRED_TREE_FPOPCODE
, NOT_TAKEN
);
2035 if (integer_zerop (op1
)
2036 || integer_onep (op1
)
2037 || integer_all_onesp (op1
)
2040 || real_minus_onep (op1
))
2041 predict_edge_def (then_edge
, PRED_TREE_OPCODE_POSITIVE
, NOT_TAKEN
);
2046 if (integer_zerop (op1
)
2047 || integer_onep (op1
)
2048 || integer_all_onesp (op1
)
2051 || real_minus_onep (op1
))
2052 predict_edge_def (then_edge
, PRED_TREE_OPCODE_POSITIVE
, TAKEN
);
2060 /* Try to guess whether the value of return means error code. */
2062 static enum br_predictor
2063 return_prediction (tree val
, enum prediction
*prediction
)
2067 return PRED_NO_PREDICTION
;
2068 /* Different heuristics for pointers and scalars. */
2069 if (POINTER_TYPE_P (TREE_TYPE (val
)))
2071 /* NULL is usually not returned. */
2072 if (integer_zerop (val
))
2074 *prediction
= NOT_TAKEN
;
2075 return PRED_NULL_RETURN
;
2078 else if (INTEGRAL_TYPE_P (TREE_TYPE (val
)))
2080 /* Negative return values are often used to indicate
2082 if (TREE_CODE (val
) == INTEGER_CST
2083 && tree_int_cst_sgn (val
) < 0)
2085 *prediction
= NOT_TAKEN
;
2086 return PRED_NEGATIVE_RETURN
;
2088 /* Constant return values seems to be commonly taken.
2089 Zero/one often represent booleans so exclude them from the
2091 if (TREE_CONSTANT (val
)
2092 && (!integer_zerop (val
) && !integer_onep (val
)))
2094 *prediction
= TAKEN
;
2095 return PRED_CONST_RETURN
;
2098 return PRED_NO_PREDICTION
;
2101 /* Find the basic block with return expression and look up for possible
2102 return value trying to apply RETURN_PREDICTION heuristics. */
2104 apply_return_prediction (void)
2106 greturn
*return_stmt
= NULL
;
2110 int phi_num_args
, i
;
2111 enum br_predictor pred
;
2112 enum prediction direction
;
2115 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
2117 gimple last
= last_stmt (e
->src
);
2119 && gimple_code (last
) == GIMPLE_RETURN
)
2121 return_stmt
= as_a
<greturn
*> (last
);
2127 return_val
= gimple_return_retval (return_stmt
);
2130 if (TREE_CODE (return_val
) != SSA_NAME
2131 || !SSA_NAME_DEF_STMT (return_val
)
2132 || gimple_code (SSA_NAME_DEF_STMT (return_val
)) != GIMPLE_PHI
)
2134 phi
= as_a
<gphi
*> (SSA_NAME_DEF_STMT (return_val
));
2135 phi_num_args
= gimple_phi_num_args (phi
);
2136 pred
= return_prediction (PHI_ARG_DEF (phi
, 0), &direction
);
2138 /* Avoid the degenerate case where all return values form the function
2139 belongs to same category (ie they are all positive constants)
2140 so we can hardly say something about them. */
2141 for (i
= 1; i
< phi_num_args
; i
++)
2142 if (pred
!= return_prediction (PHI_ARG_DEF (phi
, i
), &direction
))
2144 if (i
!= phi_num_args
)
2145 for (i
= 0; i
< phi_num_args
; i
++)
2147 pred
= return_prediction (PHI_ARG_DEF (phi
, i
), &direction
);
2148 if (pred
!= PRED_NO_PREDICTION
)
2149 predict_paths_leading_to_edge (gimple_phi_arg_edge (phi
, i
), pred
,
2154 /* Look for basic block that contains unlikely to happen events
2155 (such as noreturn calls) and mark all paths leading to execution
2156 of this basic blocks as unlikely. */
2159 tree_bb_level_predictions (void)
2162 bool has_return_edges
= false;
2166 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
2167 if (!(e
->flags
& (EDGE_ABNORMAL
| EDGE_FAKE
| EDGE_EH
)))
2169 has_return_edges
= true;
2173 apply_return_prediction ();
2175 FOR_EACH_BB_FN (bb
, cfun
)
2177 gimple_stmt_iterator gsi
;
2179 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2181 gimple stmt
= gsi_stmt (gsi
);
2184 if (is_gimple_call (stmt
))
2186 if ((gimple_call_flags (stmt
) & ECF_NORETURN
)
2187 && has_return_edges
)
2188 predict_paths_leading_to (bb
, PRED_NORETURN
,
2190 decl
= gimple_call_fndecl (stmt
);
2192 && lookup_attribute ("cold",
2193 DECL_ATTRIBUTES (decl
)))
2194 predict_paths_leading_to (bb
, PRED_COLD_FUNCTION
,
2197 else if (gimple_code (stmt
) == GIMPLE_PREDICT
)
2199 predict_paths_leading_to (bb
, gimple_predict_predictor (stmt
),
2200 gimple_predict_outcome (stmt
));
2201 /* Keep GIMPLE_PREDICT around so early inlining will propagate
2202 hints to callers. */
2208 #ifdef ENABLE_CHECKING
2210 /* Callback for hash_map::traverse, asserts that the pointer map is
2214 assert_is_empty (const_basic_block
const &, edge_prediction
*const &value
,
2217 gcc_assert (!value
);
2222 /* Predict branch probabilities and estimate profile for basic block BB. */
2225 tree_estimate_probability_bb (basic_block bb
)
2231 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2233 /* Predict edges to user labels with attributes. */
2234 if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
2236 gimple_stmt_iterator gi
;
2237 for (gi
= gsi_start_bb (e
->dest
); !gsi_end_p (gi
); gsi_next (&gi
))
2239 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gi
));
2244 decl
= gimple_label_label (label_stmt
);
2245 if (DECL_ARTIFICIAL (decl
))
2248 /* Finally, we have a user-defined label. */
2249 if (lookup_attribute ("cold", DECL_ATTRIBUTES (decl
)))
2250 predict_edge_def (e
, PRED_COLD_LABEL
, NOT_TAKEN
);
2251 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (decl
)))
2252 predict_edge_def (e
, PRED_HOT_LABEL
, TAKEN
);
2256 /* Predict early returns to be probable, as we've already taken
2257 care for error returns and other cases are often used for
2258 fast paths through function.
2260 Since we've already removed the return statements, we are
2261 looking for CFG like:
2271 if (e
->dest
!= bb
->next_bb
2272 && e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
2273 && single_succ_p (e
->dest
)
2274 && single_succ_edge (e
->dest
)->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
2275 && (last
= last_stmt (e
->dest
)) != NULL
2276 && gimple_code (last
) == GIMPLE_RETURN
)
2281 if (single_succ_p (bb
))
2283 FOR_EACH_EDGE (e1
, ei1
, bb
->preds
)
2284 if (!predicted_by_p (e1
->src
, PRED_NULL_RETURN
)
2285 && !predicted_by_p (e1
->src
, PRED_CONST_RETURN
)
2286 && !predicted_by_p (e1
->src
, PRED_NEGATIVE_RETURN
))
2287 predict_edge_def (e1
, PRED_TREE_EARLY_RETURN
, NOT_TAKEN
);
2290 if (!predicted_by_p (e
->src
, PRED_NULL_RETURN
)
2291 && !predicted_by_p (e
->src
, PRED_CONST_RETURN
)
2292 && !predicted_by_p (e
->src
, PRED_NEGATIVE_RETURN
))
2293 predict_edge_def (e
, PRED_TREE_EARLY_RETURN
, NOT_TAKEN
);
2296 /* Look for block we are guarding (ie we dominate it,
2297 but it doesn't postdominate us). */
2298 if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
) && e
->dest
!= bb
2299 && dominated_by_p (CDI_DOMINATORS
, e
->dest
, e
->src
)
2300 && !dominated_by_p (CDI_POST_DOMINATORS
, e
->src
, e
->dest
))
2302 gimple_stmt_iterator bi
;
2304 /* The call heuristic claims that a guarded function call
2305 is improbable. This is because such calls are often used
2306 to signal exceptional situations such as printing error
2308 for (bi
= gsi_start_bb (e
->dest
); !gsi_end_p (bi
);
2311 gimple stmt
= gsi_stmt (bi
);
2312 if (is_gimple_call (stmt
)
2313 /* Constant and pure calls are hardly used to signalize
2314 something exceptional. */
2315 && gimple_has_side_effects (stmt
))
2317 predict_edge_def (e
, PRED_CALL
, NOT_TAKEN
);
2323 tree_predict_by_opcode (bb
);
2326 /* Predict branch probabilities and estimate profile of the tree CFG.
2327 This function can be called from the loop optimizers to recompute
2328 the profile information. */
2331 tree_estimate_probability (void)
2335 add_noreturn_fake_exit_edges ();
2336 connect_infinite_loops_to_exit ();
2337 /* We use loop_niter_by_eval, which requires that the loops have
2339 create_preheaders (CP_SIMPLE_PREHEADERS
);
2340 calculate_dominance_info (CDI_POST_DOMINATORS
);
2342 bb_predictions
= new hash_map
<const_basic_block
, edge_prediction
*>;
2343 tree_bb_level_predictions ();
2344 record_loop_exits ();
2346 if (number_of_loops (cfun
) > 1)
2349 FOR_EACH_BB_FN (bb
, cfun
)
2350 tree_estimate_probability_bb (bb
);
2352 FOR_EACH_BB_FN (bb
, cfun
)
2353 combine_predictions_for_bb (bb
);
2355 #ifdef ENABLE_CHECKING
2356 bb_predictions
->traverse
<void *, assert_is_empty
> (NULL
);
2358 delete bb_predictions
;
2359 bb_predictions
= NULL
;
2361 estimate_bb_frequencies (false);
2362 free_dominance_info (CDI_POST_DOMINATORS
);
2363 remove_fake_exit_edges ();
2366 /* Predict edges to successors of CUR whose sources are not postdominated by
2367 BB by PRED and recurse to all postdominators. */
2370 predict_paths_for_bb (basic_block cur
, basic_block bb
,
2371 enum br_predictor pred
,
2372 enum prediction taken
,
2379 /* We are looking for all edges forming edge cut induced by
2380 set of all blocks postdominated by BB. */
2381 FOR_EACH_EDGE (e
, ei
, cur
->preds
)
2382 if (e
->src
->index
>= NUM_FIXED_BLOCKS
2383 && !dominated_by_p (CDI_POST_DOMINATORS
, e
->src
, bb
))
2389 /* Ignore fake edges and eh, we predict them as not taken anyway. */
2390 if (e
->flags
& (EDGE_EH
| EDGE_FAKE
))
2392 gcc_assert (bb
== cur
|| dominated_by_p (CDI_POST_DOMINATORS
, cur
, bb
));
2394 /* See if there is an edge from e->src that is not abnormal
2395 and does not lead to BB. */
2396 FOR_EACH_EDGE (e2
, ei2
, e
->src
->succs
)
2398 && !(e2
->flags
& (EDGE_EH
| EDGE_FAKE
))
2399 && !dominated_by_p (CDI_POST_DOMINATORS
, e2
->dest
, bb
))
2405 /* If there is non-abnormal path leaving e->src, predict edge
2406 using predictor. Otherwise we need to look for paths
2409 The second may lead to infinite loop in the case we are predicitng
2410 regions that are only reachable by abnormal edges. We simply
2411 prevent visiting given BB twice. */
2413 predict_edge_def (e
, pred
, taken
);
2414 else if (bitmap_set_bit (visited
, e
->src
->index
))
2415 predict_paths_for_bb (e
->src
, e
->src
, pred
, taken
, visited
);
2417 for (son
= first_dom_son (CDI_POST_DOMINATORS
, cur
);
2419 son
= next_dom_son (CDI_POST_DOMINATORS
, son
))
2420 predict_paths_for_bb (son
, bb
, pred
, taken
, visited
);
2423 /* Sets branch probabilities according to PREDiction and
2427 predict_paths_leading_to (basic_block bb
, enum br_predictor pred
,
2428 enum prediction taken
)
2430 bitmap visited
= BITMAP_ALLOC (NULL
);
2431 predict_paths_for_bb (bb
, bb
, pred
, taken
, visited
);
2432 BITMAP_FREE (visited
);
2435 /* Like predict_paths_leading_to but take edge instead of basic block. */
2438 predict_paths_leading_to_edge (edge e
, enum br_predictor pred
,
2439 enum prediction taken
)
2441 bool has_nonloop_edge
= false;
2445 basic_block bb
= e
->src
;
2446 FOR_EACH_EDGE (e2
, ei
, bb
->succs
)
2447 if (e2
->dest
!= e
->src
&& e2
->dest
!= e
->dest
2448 && !(e
->flags
& (EDGE_EH
| EDGE_FAKE
))
2449 && !dominated_by_p (CDI_POST_DOMINATORS
, e
->src
, e2
->dest
))
2451 has_nonloop_edge
= true;
2454 if (!has_nonloop_edge
)
2456 bitmap visited
= BITMAP_ALLOC (NULL
);
2457 predict_paths_for_bb (bb
, bb
, pred
, taken
, visited
);
2458 BITMAP_FREE (visited
);
2461 predict_edge_def (e
, pred
, taken
);
2464 /* This is used to carry information about basic blocks. It is
2465 attached to the AUX field of the standard CFG block. */
2469 /* Estimated frequency of execution of basic_block. */
2472 /* To keep queue of basic blocks to process. */
2475 /* Number of predecessors we need to visit first. */
2479 /* Similar information for edges. */
2480 struct edge_prob_info
2482 /* In case edge is a loopback edge, the probability edge will be reached
2483 in case header is. Estimated number of iterations of the loop can be
2484 then computed as 1 / (1 - back_edge_prob). */
2485 sreal back_edge_prob
;
2486 /* True if the edge is a loopback edge in the natural loop. */
2487 unsigned int back_edge
:1;
2490 #define BLOCK_INFO(B) ((block_info *) (B)->aux)
2492 #define EDGE_INFO(E) ((edge_prob_info *) (E)->aux)
2494 /* Helper function for estimate_bb_frequencies.
2495 Propagate the frequencies in blocks marked in
2496 TOVISIT, starting in HEAD. */
2499 propagate_freq (basic_block head
, bitmap tovisit
)
2508 /* For each basic block we need to visit count number of his predecessors
2509 we need to visit first. */
2510 EXECUTE_IF_SET_IN_BITMAP (tovisit
, 0, i
, bi
)
2515 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
2517 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
2519 bool visit
= bitmap_bit_p (tovisit
, e
->src
->index
);
2521 if (visit
&& !(e
->flags
& EDGE_DFS_BACK
))
2523 else if (visit
&& dump_file
&& !EDGE_INFO (e
)->back_edge
)
2525 "Irreducible region hit, ignoring edge to %i->%i\n",
2526 e
->src
->index
, bb
->index
);
2528 BLOCK_INFO (bb
)->npredecessors
= count
;
2529 /* When function never returns, we will never process exit block. */
2530 if (!count
&& bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
2531 bb
->count
= bb
->frequency
= 0;
2534 BLOCK_INFO (head
)->frequency
= 1;
2536 for (bb
= head
; bb
; bb
= nextbb
)
2539 sreal cyclic_probability
= 0;
2540 sreal frequency
= 0;
2542 nextbb
= BLOCK_INFO (bb
)->next
;
2543 BLOCK_INFO (bb
)->next
= NULL
;
2545 /* Compute frequency of basic block. */
2548 #ifdef ENABLE_CHECKING
2549 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
2550 gcc_assert (!bitmap_bit_p (tovisit
, e
->src
->index
)
2551 || (e
->flags
& EDGE_DFS_BACK
));
2554 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
2555 if (EDGE_INFO (e
)->back_edge
)
2557 cyclic_probability
+= EDGE_INFO (e
)->back_edge_prob
;
2559 else if (!(e
->flags
& EDGE_DFS_BACK
))
2561 /* frequency += (e->probability
2562 * BLOCK_INFO (e->src)->frequency /
2563 REG_BR_PROB_BASE); */
2565 sreal tmp
= e
->probability
;
2566 tmp
*= BLOCK_INFO (e
->src
)->frequency
;
2567 tmp
*= real_inv_br_prob_base
;
2571 if (cyclic_probability
== 0)
2573 BLOCK_INFO (bb
)->frequency
= frequency
;
2577 if (cyclic_probability
> real_almost_one
)
2578 cyclic_probability
= real_almost_one
;
2580 /* BLOCK_INFO (bb)->frequency = frequency
2581 / (1 - cyclic_probability) */
2583 cyclic_probability
= sreal (1) - cyclic_probability
;
2584 BLOCK_INFO (bb
)->frequency
= frequency
/ cyclic_probability
;
2588 bitmap_clear_bit (tovisit
, bb
->index
);
2590 e
= find_edge (bb
, head
);
2593 /* EDGE_INFO (e)->back_edge_prob
2594 = ((e->probability * BLOCK_INFO (bb)->frequency)
2595 / REG_BR_PROB_BASE); */
2597 sreal tmp
= e
->probability
;
2598 tmp
*= BLOCK_INFO (bb
)->frequency
;
2599 EDGE_INFO (e
)->back_edge_prob
= tmp
* real_inv_br_prob_base
;
2602 /* Propagate to successor blocks. */
2603 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2604 if (!(e
->flags
& EDGE_DFS_BACK
)
2605 && BLOCK_INFO (e
->dest
)->npredecessors
)
2607 BLOCK_INFO (e
->dest
)->npredecessors
--;
2608 if (!BLOCK_INFO (e
->dest
)->npredecessors
)
2613 BLOCK_INFO (last
)->next
= e
->dest
;
2621 /* Estimate frequencies in loops at same nest level. */
2624 estimate_loops_at_level (struct loop
*first_loop
)
2628 for (loop
= first_loop
; loop
; loop
= loop
->next
)
2633 bitmap tovisit
= BITMAP_ALLOC (NULL
);
2635 estimate_loops_at_level (loop
->inner
);
2637 /* Find current loop back edge and mark it. */
2638 e
= loop_latch_edge (loop
);
2639 EDGE_INFO (e
)->back_edge
= 1;
2641 bbs
= get_loop_body (loop
);
2642 for (i
= 0; i
< loop
->num_nodes
; i
++)
2643 bitmap_set_bit (tovisit
, bbs
[i
]->index
);
2645 propagate_freq (loop
->header
, tovisit
);
2646 BITMAP_FREE (tovisit
);
2650 /* Propagates frequencies through structure of loops. */
2653 estimate_loops (void)
2655 bitmap tovisit
= BITMAP_ALLOC (NULL
);
2658 /* Start by estimating the frequencies in the loops. */
2659 if (number_of_loops (cfun
) > 1)
2660 estimate_loops_at_level (current_loops
->tree_root
->inner
);
2662 /* Now propagate the frequencies through all the blocks. */
2663 FOR_ALL_BB_FN (bb
, cfun
)
2665 bitmap_set_bit (tovisit
, bb
->index
);
2667 propagate_freq (ENTRY_BLOCK_PTR_FOR_FN (cfun
), tovisit
);
2668 BITMAP_FREE (tovisit
);
2671 /* Drop the profile for NODE to guessed, and update its frequency based on
2672 whether it is expected to be hot given the CALL_COUNT. */
2675 drop_profile (struct cgraph_node
*node
, gcov_type call_count
)
2677 struct function
*fn
= DECL_STRUCT_FUNCTION (node
->decl
);
2678 /* In the case where this was called by another function with a
2679 dropped profile, call_count will be 0. Since there are no
2680 non-zero call counts to this function, we don't know for sure
2681 whether it is hot, and therefore it will be marked normal below. */
2682 bool hot
= maybe_hot_count_p (NULL
, call_count
);
2686 "Dropping 0 profile for %s/%i. %s based on calls.\n",
2687 node
->name (), node
->order
,
2688 hot
? "Function is hot" : "Function is normal");
2689 /* We only expect to miss profiles for functions that are reached
2690 via non-zero call edges in cases where the function may have
2691 been linked from another module or library (COMDATs and extern
2692 templates). See the comments below for handle_missing_profiles.
2693 Also, only warn in cases where the missing counts exceed the
2694 number of training runs. In certain cases with an execv followed
2695 by a no-return call the profile for the no-return call is not
2696 dumped and there can be a mismatch. */
2697 if (!DECL_COMDAT (node
->decl
) && !DECL_EXTERNAL (node
->decl
)
2698 && call_count
> profile_info
->runs
)
2700 if (flag_profile_correction
)
2704 "Missing counts for called function %s/%i\n",
2705 node
->name (), node
->order
);
2708 warning (0, "Missing counts for called function %s/%i",
2709 node
->name (), node
->order
);
2712 profile_status_for_fn (fn
)
2713 = (flag_guess_branch_prob
? PROFILE_GUESSED
: PROFILE_ABSENT
);
2715 = hot
? NODE_FREQUENCY_HOT
: NODE_FREQUENCY_NORMAL
;
2718 /* In the case of COMDAT routines, multiple object files will contain the same
2719 function and the linker will select one for the binary. In that case
2720 all the other copies from the profile instrument binary will be missing
2721 profile counts. Look for cases where this happened, due to non-zero
2722 call counts going to 0-count functions, and drop the profile to guessed
2723 so that we can use the estimated probabilities and avoid optimizing only
2726 The other case where the profile may be missing is when the routine
2727 is not going to be emitted to the object file, e.g. for "extern template"
2728 class methods. Those will be marked DECL_EXTERNAL. Emit a warning in
2729 all other cases of non-zero calls to 0-count functions. */
2732 handle_missing_profiles (void)
2734 struct cgraph_node
*node
;
2735 int unlikely_count_fraction
= PARAM_VALUE (UNLIKELY_BB_COUNT_FRACTION
);
2736 vec
<struct cgraph_node
*> worklist
;
2737 worklist
.create (64);
2739 /* See if 0 count function has non-0 count callers. In this case we
2740 lost some profile. Drop its function profile to PROFILE_GUESSED. */
2741 FOR_EACH_DEFINED_FUNCTION (node
)
2743 struct cgraph_edge
*e
;
2744 gcov_type call_count
= 0;
2745 gcov_type max_tp_first_run
= 0;
2746 struct function
*fn
= DECL_STRUCT_FUNCTION (node
->decl
);
2750 for (e
= node
->callers
; e
; e
= e
->next_caller
)
2752 call_count
+= e
->count
;
2754 if (e
->caller
->tp_first_run
> max_tp_first_run
)
2755 max_tp_first_run
= e
->caller
->tp_first_run
;
2758 /* If time profile is missing, let assign the maximum that comes from
2759 caller functions. */
2760 if (!node
->tp_first_run
&& max_tp_first_run
)
2761 node
->tp_first_run
= max_tp_first_run
+ 1;
2765 && (call_count
* unlikely_count_fraction
>= profile_info
->runs
))
2767 drop_profile (node
, call_count
);
2768 worklist
.safe_push (node
);
2772 /* Propagate the profile dropping to other 0-count COMDATs that are
2773 potentially called by COMDATs we already dropped the profile on. */
2774 while (worklist
.length () > 0)
2776 struct cgraph_edge
*e
;
2778 node
= worklist
.pop ();
2779 for (e
= node
->callees
; e
; e
= e
->next_caller
)
2781 struct cgraph_node
*callee
= e
->callee
;
2782 struct function
*fn
= DECL_STRUCT_FUNCTION (callee
->decl
);
2784 if (callee
->count
> 0)
2786 if (DECL_COMDAT (callee
->decl
) && fn
&& fn
->cfg
2787 && profile_status_for_fn (fn
) == PROFILE_READ
)
2789 drop_profile (node
, 0);
2790 worklist
.safe_push (callee
);
2794 worklist
.release ();
2797 /* Convert counts measured by profile driven feedback to frequencies.
2798 Return nonzero iff there was any nonzero execution count. */
2801 counts_to_freqs (void)
2803 gcov_type count_max
, true_count_max
= 0;
2806 /* Don't overwrite the estimated frequencies when the profile for
2807 the function is missing. We may drop this function PROFILE_GUESSED
2808 later in drop_profile (). */
2809 if (!flag_auto_profile
&& !ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
)
2812 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
), NULL
, next_bb
)
2813 true_count_max
= MAX (bb
->count
, true_count_max
);
2815 count_max
= MAX (true_count_max
, 1);
2816 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
), NULL
, next_bb
)
2817 bb
->frequency
= (bb
->count
* BB_FREQ_MAX
+ count_max
/ 2) / count_max
;
2819 return true_count_max
;
2822 /* Return true if function is likely to be expensive, so there is no point to
2823 optimize performance of prologue, epilogue or do inlining at the expense
2824 of code size growth. THRESHOLD is the limit of number of instructions
2825 function can execute at average to be still considered not expensive. */
2828 expensive_function_p (int threshold
)
2830 unsigned int sum
= 0;
2834 /* We can not compute accurately for large thresholds due to scaled
2836 gcc_assert (threshold
<= BB_FREQ_MAX
);
2838 /* Frequencies are out of range. This either means that function contains
2839 internal loop executing more than BB_FREQ_MAX times or profile feedback
2840 is available and function has not been executed at all. */
2841 if (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
== 0)
2844 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
2845 limit
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
* threshold
;
2846 FOR_EACH_BB_FN (bb
, cfun
)
2850 FOR_BB_INSNS (bb
, insn
)
2851 if (active_insn_p (insn
))
2853 sum
+= bb
->frequency
;
2862 /* Estimate and propagate basic block frequencies using the given branch
2863 probabilities. If FORCE is true, the frequencies are used to estimate
2864 the counts even when there are already non-zero profile counts. */
2867 estimate_bb_frequencies (bool force
)
2872 if (force
|| profile_status_for_fn (cfun
) != PROFILE_READ
|| !counts_to_freqs ())
2874 static int real_values_initialized
= 0;
2876 if (!real_values_initialized
)
2878 real_values_initialized
= 1;
2879 real_br_prob_base
= REG_BR_PROB_BASE
;
2880 real_bb_freq_max
= BB_FREQ_MAX
;
2881 real_one_half
= sreal (1, -1);
2882 real_inv_br_prob_base
= sreal (1) / real_br_prob_base
;
2883 real_almost_one
= sreal (1) - real_inv_br_prob_base
;
2886 mark_dfs_back_edges ();
2888 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->probability
=
2891 /* Set up block info for each basic block. */
2892 alloc_aux_for_blocks (sizeof (block_info
));
2893 alloc_aux_for_edges (sizeof (edge_prob_info
));
2894 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
), NULL
, next_bb
)
2899 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2901 EDGE_INFO (e
)->back_edge_prob
= e
->probability
;
2902 EDGE_INFO (e
)->back_edge_prob
*= real_inv_br_prob_base
;
2906 /* First compute frequencies locally for each loop from innermost
2907 to outermost to examine frequencies for back edges. */
2911 FOR_EACH_BB_FN (bb
, cfun
)
2912 if (freq_max
< BLOCK_INFO (bb
)->frequency
)
2913 freq_max
= BLOCK_INFO (bb
)->frequency
;
2915 freq_max
= real_bb_freq_max
/ freq_max
;
2916 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
), NULL
, next_bb
)
2918 sreal tmp
= BLOCK_INFO (bb
)->frequency
* freq_max
+ real_one_half
;
2919 bb
->frequency
= tmp
.to_int ();
2922 free_aux_for_blocks ();
2923 free_aux_for_edges ();
2925 compute_function_frequency ();
2928 /* Decide whether function is hot, cold or unlikely executed. */
2930 compute_function_frequency (void)
2933 struct cgraph_node
*node
= cgraph_node::get (current_function_decl
);
2935 if (DECL_STATIC_CONSTRUCTOR (current_function_decl
)
2936 || MAIN_NAME_P (DECL_NAME (current_function_decl
)))
2937 node
->only_called_at_startup
= true;
2938 if (DECL_STATIC_DESTRUCTOR (current_function_decl
))
2939 node
->only_called_at_exit
= true;
2941 if (profile_status_for_fn (cfun
) != PROFILE_READ
)
2943 int flags
= flags_from_decl_or_type (current_function_decl
);
2944 if (lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl
))
2946 node
->frequency
= NODE_FREQUENCY_UNLIKELY_EXECUTED
;
2947 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (current_function_decl
))
2949 node
->frequency
= NODE_FREQUENCY_HOT
;
2950 else if (flags
& ECF_NORETURN
)
2951 node
->frequency
= NODE_FREQUENCY_EXECUTED_ONCE
;
2952 else if (MAIN_NAME_P (DECL_NAME (current_function_decl
)))
2953 node
->frequency
= NODE_FREQUENCY_EXECUTED_ONCE
;
2954 else if (DECL_STATIC_CONSTRUCTOR (current_function_decl
)
2955 || DECL_STATIC_DESTRUCTOR (current_function_decl
))
2956 node
->frequency
= NODE_FREQUENCY_EXECUTED_ONCE
;
2960 /* Only first time try to drop function into unlikely executed.
2961 After inlining the roundoff errors may confuse us.
2962 Ipa-profile pass will drop functions only called from unlikely
2963 functions to unlikely and that is most of what we care about. */
2964 if (!cfun
->after_inlining
)
2965 node
->frequency
= NODE_FREQUENCY_UNLIKELY_EXECUTED
;
2966 FOR_EACH_BB_FN (bb
, cfun
)
2968 if (maybe_hot_bb_p (cfun
, bb
))
2970 node
->frequency
= NODE_FREQUENCY_HOT
;
2973 if (!probably_never_executed_bb_p (cfun
, bb
))
2974 node
->frequency
= NODE_FREQUENCY_NORMAL
;
2978 /* Build PREDICT_EXPR. */
2980 build_predict_expr (enum br_predictor predictor
, enum prediction taken
)
2982 tree t
= build1 (PREDICT_EXPR
, void_type_node
,
2983 build_int_cst (integer_type_node
, predictor
));
2984 SET_PREDICT_EXPR_OUTCOME (t
, taken
);
2989 predictor_name (enum br_predictor predictor
)
2991 return predictor_info
[predictor
].name
;
2994 /* Predict branch probabilities and estimate profile of the tree CFG. */
2998 const pass_data pass_data_profile
=
3000 GIMPLE_PASS
, /* type */
3001 "profile_estimate", /* name */
3002 OPTGROUP_NONE
, /* optinfo_flags */
3003 TV_BRANCH_PROB
, /* tv_id */
3004 PROP_cfg
, /* properties_required */
3005 0, /* properties_provided */
3006 0, /* properties_destroyed */
3007 0, /* todo_flags_start */
3008 0, /* todo_flags_finish */
3011 class pass_profile
: public gimple_opt_pass
3014 pass_profile (gcc::context
*ctxt
)
3015 : gimple_opt_pass (pass_data_profile
, ctxt
)
3018 /* opt_pass methods: */
3019 virtual bool gate (function
*) { return flag_guess_branch_prob
; }
3020 virtual unsigned int execute (function
*);
3022 }; // class pass_profile
3025 pass_profile::execute (function
*fun
)
3029 if (profile_status_for_fn (cfun
) == PROFILE_GUESSED
)
3032 loop_optimizer_init (LOOPS_NORMAL
);
3033 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3034 flow_loops_dump (dump_file
, NULL
, 0);
3036 mark_irreducible_loops ();
3038 nb_loops
= number_of_loops (fun
);
3042 tree_estimate_probability ();
3047 loop_optimizer_finalize ();
3048 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3049 gimple_dump_cfg (dump_file
, dump_flags
);
3050 if (profile_status_for_fn (fun
) == PROFILE_ABSENT
)
3051 profile_status_for_fn (fun
) = PROFILE_GUESSED
;
3058 make_pass_profile (gcc::context
*ctxt
)
3060 return new pass_profile (ctxt
);
3065 const pass_data pass_data_strip_predict_hints
=
3067 GIMPLE_PASS
, /* type */
3068 "*strip_predict_hints", /* name */
3069 OPTGROUP_NONE
, /* optinfo_flags */
3070 TV_BRANCH_PROB
, /* tv_id */
3071 PROP_cfg
, /* properties_required */
3072 0, /* properties_provided */
3073 0, /* properties_destroyed */
3074 0, /* todo_flags_start */
3075 0, /* todo_flags_finish */
3078 class pass_strip_predict_hints
: public gimple_opt_pass
3081 pass_strip_predict_hints (gcc::context
*ctxt
)
3082 : gimple_opt_pass (pass_data_strip_predict_hints
, ctxt
)
3085 /* opt_pass methods: */
3086 opt_pass
* clone () { return new pass_strip_predict_hints (m_ctxt
); }
3087 virtual unsigned int execute (function
*);
3089 }; // class pass_strip_predict_hints
3091 /* Get rid of all builtin_expect calls and GIMPLE_PREDICT statements
3092 we no longer need. */
3094 pass_strip_predict_hints::execute (function
*fun
)
3100 FOR_EACH_BB_FN (bb
, fun
)
3102 gimple_stmt_iterator bi
;
3103 for (bi
= gsi_start_bb (bb
); !gsi_end_p (bi
);)
3105 gimple stmt
= gsi_stmt (bi
);
3107 if (gimple_code (stmt
) == GIMPLE_PREDICT
)
3109 gsi_remove (&bi
, true);
3112 else if (is_gimple_call (stmt
))
3114 tree fndecl
= gimple_call_fndecl (stmt
);
3117 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
3118 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
3119 && gimple_call_num_args (stmt
) == 2)
3120 || (gimple_call_internal_p (stmt
)
3121 && gimple_call_internal_fn (stmt
) == IFN_BUILTIN_EXPECT
))
3123 var
= gimple_call_lhs (stmt
);
3127 = gimple_build_assign (var
, gimple_call_arg (stmt
, 0));
3128 gsi_replace (&bi
, ass_stmt
, true);
3132 gsi_remove (&bi
, true);
3146 make_pass_strip_predict_hints (gcc::context
*ctxt
)
3148 return new pass_strip_predict_hints (ctxt
);
3151 /* Rebuild function frequencies. Passes are in general expected to
3152 maintain profile by hand, however in some cases this is not possible:
3153 for example when inlining several functions with loops freuqencies might run
3154 out of scale and thus needs to be recomputed. */
3157 rebuild_frequencies (void)
3159 timevar_push (TV_REBUILD_FREQUENCIES
);
3161 /* When the max bb count in the function is small, there is a higher
3162 chance that there were truncation errors in the integer scaling
3163 of counts by inlining and other optimizations. This could lead
3164 to incorrect classification of code as being cold when it isn't.
3165 In that case, force the estimation of bb counts/frequencies from the
3166 branch probabilities, rather than computing frequencies from counts,
3167 which may also lead to frequencies incorrectly reduced to 0. There
3168 is less precision in the probabilities, so we only do this for small
3170 gcov_type count_max
= 0;
3172 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
), NULL
, next_bb
)
3173 count_max
= MAX (bb
->count
, count_max
);
3175 if (profile_status_for_fn (cfun
) == PROFILE_GUESSED
3176 || (!flag_auto_profile
&& profile_status_for_fn (cfun
) == PROFILE_READ
3177 && count_max
< REG_BR_PROB_BASE
/10))
3179 loop_optimizer_init (0);
3180 add_noreturn_fake_exit_edges ();
3181 mark_irreducible_loops ();
3182 connect_infinite_loops_to_exit ();
3183 estimate_bb_frequencies (true);
3184 remove_fake_exit_edges ();
3185 loop_optimizer_finalize ();
3187 else if (profile_status_for_fn (cfun
) == PROFILE_READ
)
3191 timevar_pop (TV_REBUILD_FREQUENCIES
);