1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 [1] "Branch Prediction for Free"
23 Ball and Larus; PLDI '93.
24 [2] "Static Branch Frequency and Program Profile Analysis"
25 Wu and Larus; MICRO-27.
26 [3] "Corpus-based Static Branch Prediction"
27 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
32 #include "coretypes.h"
38 #include "tree-pass.h"
43 #include "diagnostic-core.h"
44 #include "gimple-predict.h"
45 #include "fold-const.h"
52 #include "gimple-iterator.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "tree-ssa-loop.h"
56 #include "tree-scalar-evolution.h"
57 #include "ipa-utils.h"
58 #include "gimple-pretty-print.h"
60 /* Enum with reasons why a predictor is ignored. */
66 REASON_SINGLE_EDGE_DUPLICATE
,
67 REASON_EDGE_PAIR_DUPLICATE
70 /* String messages for the aforementioned enum. */
72 static const char *reason_messages
[] = {"", " (ignored)",
73 " (single edge duplicate)", " (edge pair duplicate)"};
75 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
76 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
77 static sreal real_almost_one
, real_br_prob_base
,
78 real_inv_br_prob_base
, real_one_half
, real_bb_freq_max
;
80 static void combine_predictions_for_insn (rtx_insn
*, basic_block
);
81 static void dump_prediction (FILE *, enum br_predictor
, int, basic_block
,
82 enum predictor_reason
, edge
);
83 static void predict_paths_leading_to (basic_block
, enum br_predictor
,
85 struct loop
*in_loop
= NULL
);
86 static void predict_paths_leading_to_edge (edge
, enum br_predictor
,
88 struct loop
*in_loop
= NULL
);
89 static bool can_predict_insn_p (const rtx_insn
*);
91 /* Information we hold about each branch predictor.
92 Filled using information from predict.def. */
96 const char *const name
; /* Name used in the debugging dumps. */
97 const int hitrate
; /* Expected hitrate used by
98 predict_insn_def call. */
102 /* Use given predictor without Dempster-Shaffer theory if it matches
103 using first_match heuristics. */
104 #define PRED_FLAG_FIRST_MATCH 1
106 /* Recompute hitrate in percent to our representation. */
108 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
110 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
111 static const struct predictor_info predictor_info
[]= {
112 #include "predict.def"
114 /* Upper bound on predictors. */
119 /* Return TRUE if frequency FREQ is considered to be hot. */
122 maybe_hot_frequency_p (struct function
*fun
, int freq
)
124 struct cgraph_node
*node
= cgraph_node::get (fun
->decl
);
126 || !opt_for_fn (fun
->decl
, flag_branch_probabilities
))
128 if (node
->frequency
== NODE_FREQUENCY_UNLIKELY_EXECUTED
)
130 if (node
->frequency
== NODE_FREQUENCY_HOT
)
133 if (profile_status_for_fn (fun
) == PROFILE_ABSENT
)
135 if (node
->frequency
== NODE_FREQUENCY_EXECUTED_ONCE
136 && freq
< (ENTRY_BLOCK_PTR_FOR_FN (fun
)->frequency
* 2 / 3))
138 if (PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION
) == 0)
140 if (freq
* PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION
)
141 < ENTRY_BLOCK_PTR_FOR_FN (fun
)->frequency
)
146 static gcov_type min_count
= -1;
148 /* Determine the threshold for hot BB counts. */
151 get_hot_bb_threshold ()
153 gcov_working_set_t
*ws
;
156 ws
= find_working_set (PARAM_VALUE (HOT_BB_COUNT_WS_PERMILLE
));
158 min_count
= ws
->min_counter
;
163 /* Set the threshold for hot BB counts. */
166 set_hot_bb_threshold (gcov_type min
)
171 /* Return TRUE if frequency FREQ is considered to be hot. */
174 maybe_hot_count_p (struct function
*fun
, gcov_type count
)
176 if (fun
&& profile_status_for_fn (fun
) != PROFILE_READ
)
178 /* Code executed at most once is not hot. */
179 if (profile_info
->runs
>= count
)
181 return (count
>= get_hot_bb_threshold ());
184 /* Return true in case BB can be CPU intensive and should be optimized
185 for maximal performance. */
188 maybe_hot_bb_p (struct function
*fun
, const_basic_block bb
)
190 gcc_checking_assert (fun
);
191 if (profile_status_for_fn (fun
) == PROFILE_READ
)
192 return maybe_hot_count_p (fun
, bb
->count
);
193 return maybe_hot_frequency_p (fun
, bb
->frequency
);
196 /* Return true in case BB can be CPU intensive and should be optimized
197 for maximal performance. */
200 maybe_hot_edge_p (edge e
)
202 if (profile_status_for_fn (cfun
) == PROFILE_READ
)
203 return maybe_hot_count_p (cfun
, e
->count
);
204 return maybe_hot_frequency_p (cfun
, EDGE_FREQUENCY (e
));
207 /* Return true if profile COUNT and FREQUENCY, or function FUN static
208 node frequency reflects never being executed. */
211 probably_never_executed (struct function
*fun
,
212 gcov_type count
, int frequency
)
214 gcc_checking_assert (fun
);
215 if (profile_status_for_fn (fun
) == PROFILE_READ
)
217 int unlikely_count_fraction
= PARAM_VALUE (UNLIKELY_BB_COUNT_FRACTION
);
218 if (count
* unlikely_count_fraction
>= profile_info
->runs
)
222 if (!ENTRY_BLOCK_PTR_FOR_FN (fun
)->frequency
)
224 if (ENTRY_BLOCK_PTR_FOR_FN (fun
)->count
)
226 gcov_type computed_count
;
227 /* Check for possibility of overflow, in which case entry bb count
228 is large enough to do the division first without losing much
230 if (ENTRY_BLOCK_PTR_FOR_FN (fun
)->count
< REG_BR_PROB_BASE
*
233 gcov_type scaled_count
234 = frequency
* ENTRY_BLOCK_PTR_FOR_FN (fun
)->count
*
235 unlikely_count_fraction
;
236 computed_count
= RDIV (scaled_count
,
237 ENTRY_BLOCK_PTR_FOR_FN (fun
)->frequency
);
241 computed_count
= RDIV (ENTRY_BLOCK_PTR_FOR_FN (fun
)->count
,
242 ENTRY_BLOCK_PTR_FOR_FN (fun
)->frequency
);
243 computed_count
*= frequency
* unlikely_count_fraction
;
245 if (computed_count
>= profile_info
->runs
)
250 if ((!profile_info
|| !(opt_for_fn (fun
->decl
, flag_branch_probabilities
)))
251 && (cgraph_node::get (fun
->decl
)->frequency
252 == NODE_FREQUENCY_UNLIKELY_EXECUTED
))
258 /* Return true in case BB is probably never executed. */
261 probably_never_executed_bb_p (struct function
*fun
, const_basic_block bb
)
263 return probably_never_executed (fun
, bb
->count
, bb
->frequency
);
267 /* Return true in case edge E is probably never executed. */
270 probably_never_executed_edge_p (struct function
*fun
, edge e
)
272 return probably_never_executed (fun
, e
->count
, EDGE_FREQUENCY (e
));
275 /* Return true when current function should always be optimized for size. */
278 optimize_function_for_size_p (struct function
*fun
)
280 if (!fun
|| !fun
->decl
)
281 return optimize_size
;
282 cgraph_node
*n
= cgraph_node::get (fun
->decl
);
283 return n
&& n
->optimize_for_size_p ();
286 /* Return true when current function should always be optimized for speed. */
289 optimize_function_for_speed_p (struct function
*fun
)
291 return !optimize_function_for_size_p (fun
);
294 /* Return the optimization type that should be used for the function FUN. */
297 function_optimization_type (struct function
*fun
)
299 return (optimize_function_for_speed_p (fun
)
301 : OPTIMIZE_FOR_SIZE
);
304 /* Return TRUE when BB should be optimized for size. */
307 optimize_bb_for_size_p (const_basic_block bb
)
309 return (optimize_function_for_size_p (cfun
)
310 || (bb
&& !maybe_hot_bb_p (cfun
, bb
)));
313 /* Return TRUE when BB should be optimized for speed. */
316 optimize_bb_for_speed_p (const_basic_block bb
)
318 return !optimize_bb_for_size_p (bb
);
321 /* Return the optimization type that should be used for block BB. */
324 bb_optimization_type (const_basic_block bb
)
326 return (optimize_bb_for_speed_p (bb
)
328 : OPTIMIZE_FOR_SIZE
);
331 /* Return TRUE when BB should be optimized for size. */
334 optimize_edge_for_size_p (edge e
)
336 return optimize_function_for_size_p (cfun
) || !maybe_hot_edge_p (e
);
339 /* Return TRUE when BB should be optimized for speed. */
342 optimize_edge_for_speed_p (edge e
)
344 return !optimize_edge_for_size_p (e
);
347 /* Return TRUE when BB should be optimized for size. */
350 optimize_insn_for_size_p (void)
352 return optimize_function_for_size_p (cfun
) || !crtl
->maybe_hot_insn_p
;
355 /* Return TRUE when BB should be optimized for speed. */
358 optimize_insn_for_speed_p (void)
360 return !optimize_insn_for_size_p ();
363 /* Return TRUE when LOOP should be optimized for size. */
366 optimize_loop_for_size_p (struct loop
*loop
)
368 return optimize_bb_for_size_p (loop
->header
);
371 /* Return TRUE when LOOP should be optimized for speed. */
374 optimize_loop_for_speed_p (struct loop
*loop
)
376 return optimize_bb_for_speed_p (loop
->header
);
379 /* Return TRUE when LOOP nest should be optimized for speed. */
382 optimize_loop_nest_for_speed_p (struct loop
*loop
)
384 struct loop
*l
= loop
;
385 if (optimize_loop_for_speed_p (loop
))
388 while (l
&& l
!= loop
)
390 if (optimize_loop_for_speed_p (l
))
398 while (l
!= loop
&& !l
->next
)
407 /* Return TRUE when LOOP nest should be optimized for size. */
410 optimize_loop_nest_for_size_p (struct loop
*loop
)
412 return !optimize_loop_nest_for_speed_p (loop
);
415 /* Return true when edge E is likely to be well predictable by branch
419 predictable_edge_p (edge e
)
421 if (profile_status_for_fn (cfun
) == PROFILE_ABSENT
)
424 <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME
) * REG_BR_PROB_BASE
/ 100)
425 || (REG_BR_PROB_BASE
- e
->probability
426 <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME
) * REG_BR_PROB_BASE
/ 100))
432 /* Set RTL expansion for BB profile. */
435 rtl_profile_for_bb (basic_block bb
)
437 crtl
->maybe_hot_insn_p
= maybe_hot_bb_p (cfun
, bb
);
440 /* Set RTL expansion for edge profile. */
443 rtl_profile_for_edge (edge e
)
445 crtl
->maybe_hot_insn_p
= maybe_hot_edge_p (e
);
448 /* Set RTL expansion to default mode (i.e. when profile info is not known). */
450 default_rtl_profile (void)
452 crtl
->maybe_hot_insn_p
= true;
455 /* Return true if the one of outgoing edges is already predicted by
459 rtl_predicted_by_p (const_basic_block bb
, enum br_predictor predictor
)
462 if (!INSN_P (BB_END (bb
)))
464 for (note
= REG_NOTES (BB_END (bb
)); note
; note
= XEXP (note
, 1))
465 if (REG_NOTE_KIND (note
) == REG_BR_PRED
466 && INTVAL (XEXP (XEXP (note
, 0), 0)) == (int)predictor
)
471 /* Structure representing predictions in tree level. */
473 struct edge_prediction
{
474 struct edge_prediction
*ep_next
;
476 enum br_predictor ep_predictor
;
480 /* This map contains for a basic block the list of predictions for the
483 static hash_map
<const_basic_block
, edge_prediction
*> *bb_predictions
;
485 /* Return true if the one of outgoing edges is already predicted by
489 gimple_predicted_by_p (const_basic_block bb
, enum br_predictor predictor
)
491 struct edge_prediction
*i
;
492 edge_prediction
**preds
= bb_predictions
->get (bb
);
497 for (i
= *preds
; i
; i
= i
->ep_next
)
498 if (i
->ep_predictor
== predictor
)
503 /* Return true if the one of outgoing edges is already predicted by
504 PREDICTOR for edge E predicted as TAKEN. */
507 edge_predicted_by_p (edge e
, enum br_predictor predictor
, bool taken
)
509 struct edge_prediction
*i
;
510 basic_block bb
= e
->src
;
511 edge_prediction
**preds
= bb_predictions
->get (bb
);
515 int probability
= predictor_info
[(int) predictor
].hitrate
;
518 probability
= REG_BR_PROB_BASE
- probability
;
520 for (i
= *preds
; i
; i
= i
->ep_next
)
521 if (i
->ep_predictor
== predictor
523 && i
->ep_probability
== probability
)
528 /* Return true when the probability of edge is reliable.
530 The profile guessing code is good at predicting branch outcome (ie.
531 taken/not taken), that is predicted right slightly over 75% of time.
532 It is however notoriously poor on predicting the probability itself.
533 In general the profile appear a lot flatter (with probabilities closer
534 to 50%) than the reality so it is bad idea to use it to drive optimization
535 such as those disabling dynamic branch prediction for well predictable
538 There are two exceptions - edges leading to noreturn edges and edges
539 predicted by number of iterations heuristics are predicted well. This macro
540 should be able to distinguish those, but at the moment it simply check for
541 noreturn heuristic that is only one giving probability over 99% or bellow
542 1%. In future we might want to propagate reliability information across the
543 CFG if we find this information useful on multiple places. */
545 probability_reliable_p (int prob
)
547 return (profile_status_for_fn (cfun
) == PROFILE_READ
548 || (profile_status_for_fn (cfun
) == PROFILE_GUESSED
549 && (prob
<= HITRATE (1) || prob
>= HITRATE (99))));
552 /* Same predicate as above, working on edges. */
554 edge_probability_reliable_p (const_edge e
)
556 return probability_reliable_p (e
->probability
);
559 /* Same predicate as edge_probability_reliable_p, working on notes. */
561 br_prob_note_reliable_p (const_rtx note
)
563 gcc_assert (REG_NOTE_KIND (note
) == REG_BR_PROB
);
564 return probability_reliable_p (XINT (note
, 0));
568 predict_insn (rtx_insn
*insn
, enum br_predictor predictor
, int probability
)
570 gcc_assert (any_condjump_p (insn
));
571 if (!flag_guess_branch_prob
)
574 add_reg_note (insn
, REG_BR_PRED
,
575 gen_rtx_CONCAT (VOIDmode
,
576 GEN_INT ((int) predictor
),
577 GEN_INT ((int) probability
)));
580 /* Predict insn by given predictor. */
583 predict_insn_def (rtx_insn
*insn
, enum br_predictor predictor
,
584 enum prediction taken
)
586 int probability
= predictor_info
[(int) predictor
].hitrate
;
589 probability
= REG_BR_PROB_BASE
- probability
;
591 predict_insn (insn
, predictor
, probability
);
594 /* Predict edge E with given probability if possible. */
597 rtl_predict_edge (edge e
, enum br_predictor predictor
, int probability
)
600 last_insn
= BB_END (e
->src
);
602 /* We can store the branch prediction information only about
603 conditional jumps. */
604 if (!any_condjump_p (last_insn
))
607 /* We always store probability of branching. */
608 if (e
->flags
& EDGE_FALLTHRU
)
609 probability
= REG_BR_PROB_BASE
- probability
;
611 predict_insn (last_insn
, predictor
, probability
);
614 /* Predict edge E with the given PROBABILITY. */
616 gimple_predict_edge (edge e
, enum br_predictor predictor
, int probability
)
618 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
619 && EDGE_COUNT (e
->src
->succs
) > 1
620 && flag_guess_branch_prob
623 struct edge_prediction
*i
= XNEW (struct edge_prediction
);
624 edge_prediction
*&preds
= bb_predictions
->get_or_insert (e
->src
);
628 i
->ep_probability
= probability
;
629 i
->ep_predictor
= predictor
;
634 /* Filter edge predictions PREDS by a function FILTER. DATA are passed
635 to the filter function. */
638 filter_predictions (edge_prediction
**preds
,
639 bool (*filter
) (edge_prediction
*, void *), void *data
)
646 struct edge_prediction
**prediction
= preds
;
647 struct edge_prediction
*next
;
651 if ((*filter
) (*prediction
, data
))
652 prediction
= &((*prediction
)->ep_next
);
655 next
= (*prediction
)->ep_next
;
663 /* Filter function predicate that returns true for a edge predicate P
664 if its edge is equal to DATA. */
667 equal_edge_p (edge_prediction
*p
, void *data
)
669 return p
->ep_edge
== (edge
)data
;
672 /* Remove all predictions on given basic block that are attached
675 remove_predictions_associated_with_edge (edge e
)
680 edge_prediction
**preds
= bb_predictions
->get (e
->src
);
681 filter_predictions (preds
, equal_edge_p
, e
);
684 /* Clears the list of predictions stored for BB. */
687 clear_bb_predictions (basic_block bb
)
689 edge_prediction
**preds
= bb_predictions
->get (bb
);
690 struct edge_prediction
*pred
, *next
;
695 for (pred
= *preds
; pred
; pred
= next
)
697 next
= pred
->ep_next
;
703 /* Return true when we can store prediction on insn INSN.
704 At the moment we represent predictions only on conditional
705 jumps, not at computed jump or other complicated cases. */
707 can_predict_insn_p (const rtx_insn
*insn
)
709 return (JUMP_P (insn
)
710 && any_condjump_p (insn
)
711 && EDGE_COUNT (BLOCK_FOR_INSN (insn
)->succs
) >= 2);
714 /* Predict edge E by given predictor if possible. */
717 predict_edge_def (edge e
, enum br_predictor predictor
,
718 enum prediction taken
)
720 int probability
= predictor_info
[(int) predictor
].hitrate
;
723 probability
= REG_BR_PROB_BASE
- probability
;
725 predict_edge (e
, predictor
, probability
);
728 /* Invert all branch predictions or probability notes in the INSN. This needs
729 to be done each time we invert the condition used by the jump. */
732 invert_br_probabilities (rtx insn
)
736 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
737 if (REG_NOTE_KIND (note
) == REG_BR_PROB
)
738 XINT (note
, 0) = REG_BR_PROB_BASE
- XINT (note
, 0);
739 else if (REG_NOTE_KIND (note
) == REG_BR_PRED
)
740 XEXP (XEXP (note
, 0), 1)
741 = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (XEXP (note
, 0), 1)));
744 /* Dump information about the branch prediction to the output file. */
747 dump_prediction (FILE *file
, enum br_predictor predictor
, int probability
,
748 basic_block bb
, enum predictor_reason reason
= REASON_NONE
,
758 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
759 if (! (e
->flags
& EDGE_FALLTHRU
))
762 char edge_info_str
[128];
764 sprintf (edge_info_str
, " of edge %d->%d", ep_edge
->src
->index
,
765 ep_edge
->dest
->index
);
767 edge_info_str
[0] = '\0';
769 fprintf (file
, " %s heuristics%s%s: %.1f%%",
770 predictor_info
[predictor
].name
,
771 edge_info_str
, reason_messages
[reason
],
772 probability
* 100.0 / REG_BR_PROB_BASE
);
776 fprintf (file
, " exec %" PRId64
, bb
->count
);
779 fprintf (file
, " hit %" PRId64
, e
->count
);
780 fprintf (file
, " (%.1f%%)", e
->count
* 100.0 / bb
->count
);
784 fprintf (file
, "\n");
787 /* We can not predict the probabilities of outgoing edges of bb. Set them
788 evenly and hope for the best. If UNLIKELY_EDGES is not null, distribute
789 even probability for all edges not mentioned in the set. These edges
790 are given PROB_VERY_UNLIKELY probability. */
793 set_even_probabilities (basic_block bb
,
794 hash_set
<edge
> *unlikely_edges
= NULL
)
800 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
801 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
804 /* Make the distribution even if all edges are unlikely. */
805 unsigned unlikely_count
= unlikely_edges
? unlikely_edges
->elements () : 0;
806 if (unlikely_count
== nedges
)
808 unlikely_edges
= NULL
;
812 unsigned c
= nedges
- unlikely_count
;
814 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
815 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
817 if (unlikely_edges
!= NULL
&& unlikely_edges
->contains (e
))
818 e
->probability
= PROB_VERY_UNLIKELY
;
820 e
->probability
= (REG_BR_PROB_BASE
+ c
/ 2) / c
;
826 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
827 note if not already present. Remove now useless REG_BR_PRED notes. */
830 combine_predictions_for_insn (rtx_insn
*insn
, basic_block bb
)
835 int best_probability
= PROB_EVEN
;
836 enum br_predictor best_predictor
= END_PREDICTORS
;
837 int combined_probability
= REG_BR_PROB_BASE
/ 2;
839 bool first_match
= false;
842 if (!can_predict_insn_p (insn
))
844 set_even_probabilities (bb
);
848 prob_note
= find_reg_note (insn
, REG_BR_PROB
, 0);
849 pnote
= ®_NOTES (insn
);
851 fprintf (dump_file
, "Predictions for insn %i bb %i\n", INSN_UID (insn
),
854 /* We implement "first match" heuristics and use probability guessed
855 by predictor with smallest index. */
856 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
857 if (REG_NOTE_KIND (note
) == REG_BR_PRED
)
859 enum br_predictor predictor
= ((enum br_predictor
)
860 INTVAL (XEXP (XEXP (note
, 0), 0)));
861 int probability
= INTVAL (XEXP (XEXP (note
, 0), 1));
864 if (best_predictor
> predictor
865 && predictor_info
[predictor
].flags
& PRED_FLAG_FIRST_MATCH
)
866 best_probability
= probability
, best_predictor
= predictor
;
868 d
= (combined_probability
* probability
869 + (REG_BR_PROB_BASE
- combined_probability
)
870 * (REG_BR_PROB_BASE
- probability
));
872 /* Use FP math to avoid overflows of 32bit integers. */
874 /* If one probability is 0% and one 100%, avoid division by zero. */
875 combined_probability
= REG_BR_PROB_BASE
/ 2;
877 combined_probability
= (((double) combined_probability
) * probability
878 * REG_BR_PROB_BASE
/ d
+ 0.5);
881 /* Decide which heuristic to use. In case we didn't match anything,
882 use no_prediction heuristic, in case we did match, use either
883 first match or Dempster-Shaffer theory depending on the flags. */
885 if (best_predictor
!= END_PREDICTORS
)
889 dump_prediction (dump_file
, PRED_NO_PREDICTION
,
890 combined_probability
, bb
);
894 dump_prediction (dump_file
, PRED_DS_THEORY
, combined_probability
,
895 bb
, !first_match
? REASON_NONE
: REASON_IGNORED
);
897 dump_prediction (dump_file
, PRED_FIRST_MATCH
, best_probability
,
898 bb
, first_match
? REASON_NONE
: REASON_IGNORED
);
902 combined_probability
= best_probability
;
903 dump_prediction (dump_file
, PRED_COMBINED
, combined_probability
, bb
);
907 if (REG_NOTE_KIND (*pnote
) == REG_BR_PRED
)
909 enum br_predictor predictor
= ((enum br_predictor
)
910 INTVAL (XEXP (XEXP (*pnote
, 0), 0)));
911 int probability
= INTVAL (XEXP (XEXP (*pnote
, 0), 1));
913 dump_prediction (dump_file
, predictor
, probability
, bb
,
914 (!first_match
|| best_predictor
== predictor
)
915 ? REASON_NONE
: REASON_IGNORED
);
916 *pnote
= XEXP (*pnote
, 1);
919 pnote
= &XEXP (*pnote
, 1);
924 add_int_reg_note (insn
, REG_BR_PROB
, combined_probability
);
926 /* Save the prediction into CFG in case we are seeing non-degenerated
928 if (!single_succ_p (bb
))
930 BRANCH_EDGE (bb
)->probability
= combined_probability
;
931 FALLTHRU_EDGE (bb
)->probability
932 = REG_BR_PROB_BASE
- combined_probability
;
935 else if (!single_succ_p (bb
))
937 int prob
= XINT (prob_note
, 0);
939 BRANCH_EDGE (bb
)->probability
= prob
;
940 FALLTHRU_EDGE (bb
)->probability
= REG_BR_PROB_BASE
- prob
;
943 single_succ_edge (bb
)->probability
= REG_BR_PROB_BASE
;
946 /* Edge prediction hash traits. */
948 struct predictor_hash
: pointer_hash
<edge_prediction
>
951 static inline hashval_t
hash (const edge_prediction
*);
952 static inline bool equal (const edge_prediction
*, const edge_prediction
*);
955 /* Calculate hash value of an edge prediction P based on predictor and
956 normalized probability. */
959 predictor_hash::hash (const edge_prediction
*p
)
961 inchash::hash hstate
;
962 hstate
.add_int (p
->ep_predictor
);
964 int prob
= p
->ep_probability
;
965 if (prob
> REG_BR_PROB_BASE
/ 2)
966 prob
= REG_BR_PROB_BASE
- prob
;
968 hstate
.add_int (prob
);
970 return hstate
.end ();
973 /* Return true whether edge predictions P1 and P2 use the same predictor and
974 have equal (or opposed probability). */
977 predictor_hash::equal (const edge_prediction
*p1
, const edge_prediction
*p2
)
979 return (p1
->ep_predictor
== p2
->ep_predictor
980 && (p1
->ep_probability
== p2
->ep_probability
981 || p1
->ep_probability
== REG_BR_PROB_BASE
- p2
->ep_probability
));
984 struct predictor_hash_traits
: predictor_hash
,
985 typed_noop_remove
<edge_prediction
*> {};
987 /* Return true if edge prediction P is not in DATA hash set. */
990 not_removed_prediction_p (edge_prediction
*p
, void *data
)
992 hash_set
<edge_prediction
*> *remove
= (hash_set
<edge_prediction
*> *) data
;
993 return !remove
->contains (p
);
996 /* Prune predictions for a basic block BB. Currently we do following
999 1) remove duplicate prediction that is guessed with the same probability
1000 (different than 1/2) to both edge
1001 2) remove duplicates for a prediction that belongs with the same probability
1007 prune_predictions_for_bb (basic_block bb
)
1009 edge_prediction
**preds
= bb_predictions
->get (bb
);
1013 hash_table
<predictor_hash_traits
> s (13);
1014 hash_set
<edge_prediction
*> remove
;
1016 /* Step 1: identify predictors that should be removed. */
1017 for (edge_prediction
*pred
= *preds
; pred
; pred
= pred
->ep_next
)
1019 edge_prediction
*existing
= s
.find (pred
);
1022 if (pred
->ep_edge
== existing
->ep_edge
1023 && pred
->ep_probability
== existing
->ep_probability
)
1025 /* Remove a duplicate predictor. */
1026 dump_prediction (dump_file
, pred
->ep_predictor
,
1027 pred
->ep_probability
, bb
,
1028 REASON_SINGLE_EDGE_DUPLICATE
, pred
->ep_edge
);
1032 else if (pred
->ep_edge
!= existing
->ep_edge
1033 && pred
->ep_probability
== existing
->ep_probability
1034 && pred
->ep_probability
!= REG_BR_PROB_BASE
/ 2)
1036 /* Remove both predictors as they predict the same
1038 dump_prediction (dump_file
, existing
->ep_predictor
,
1039 pred
->ep_probability
, bb
,
1040 REASON_EDGE_PAIR_DUPLICATE
,
1042 dump_prediction (dump_file
, pred
->ep_predictor
,
1043 pred
->ep_probability
, bb
,
1044 REASON_EDGE_PAIR_DUPLICATE
,
1047 remove
.add (existing
);
1052 edge_prediction
**slot2
= s
.find_slot (pred
, INSERT
);
1056 /* Step 2: Remove predictors. */
1057 filter_predictions (preds
, not_removed_prediction_p
, &remove
);
1061 /* Combine predictions into single probability and store them into CFG.
1062 Remove now useless prediction entries.
1063 If DRY_RUN is set, only produce dumps and do not modify profile. */
1066 combine_predictions_for_bb (basic_block bb
, bool dry_run
)
1068 int best_probability
= PROB_EVEN
;
1069 enum br_predictor best_predictor
= END_PREDICTORS
;
1070 int combined_probability
= REG_BR_PROB_BASE
/ 2;
1072 bool first_match
= false;
1074 struct edge_prediction
*pred
;
1076 edge e
, first
= NULL
, second
= NULL
;
1079 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1080 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
1083 if (first
&& !second
)
1089 /* When there is no successor or only one choice, prediction is easy.
1091 When we have a basic block with more than 2 successors, the situation
1092 is more complicated as DS theory cannot be used literally.
1093 More precisely, let's assume we predicted edge e1 with probability p1,
1094 thus: m1({b1}) = p1. As we're going to combine more than 2 edges, we
1095 need to find probability of e.g. m1({b2}), which we don't know.
1096 The only approximation is to equally distribute 1-p1 to all edges
1099 According to numbers we've got from SPEC2006 benchark, there's only
1100 one interesting reliable predictor (noreturn call), which can be
1101 handled with a bit easier approach. */
1104 hash_set
<edge
> unlikely_edges (4);
1106 /* Identify all edges that have a probability close to very unlikely.
1107 Doing the approach for very unlikely doesn't worth for doing as
1108 there's no such probability in SPEC2006 benchmark. */
1109 edge_prediction
**preds
= bb_predictions
->get (bb
);
1111 for (pred
= *preds
; pred
; pred
= pred
->ep_next
)
1112 if (pred
->ep_probability
<= PROB_VERY_UNLIKELY
)
1113 unlikely_edges
.add (pred
->ep_edge
);
1115 if (!bb
->count
&& !dry_run
)
1116 set_even_probabilities (bb
, &unlikely_edges
);
1117 clear_bb_predictions (bb
);
1120 fprintf (dump_file
, "Predictions for bb %i\n", bb
->index
);
1121 if (unlikely_edges
.elements () == 0)
1123 "%i edges in bb %i predicted to even probabilities\n",
1128 "%i edges in bb %i predicted with some unlikely edges\n",
1130 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1131 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
1132 dump_prediction (dump_file
, PRED_COMBINED
, e
->probability
,
1133 bb
, REASON_NONE
, e
);
1140 fprintf (dump_file
, "Predictions for bb %i\n", bb
->index
);
1142 prune_predictions_for_bb (bb
);
1144 edge_prediction
**preds
= bb_predictions
->get (bb
);
1148 /* We implement "first match" heuristics and use probability guessed
1149 by predictor with smallest index. */
1150 for (pred
= *preds
; pred
; pred
= pred
->ep_next
)
1152 enum br_predictor predictor
= pred
->ep_predictor
;
1153 int probability
= pred
->ep_probability
;
1155 if (pred
->ep_edge
!= first
)
1156 probability
= REG_BR_PROB_BASE
- probability
;
1159 /* First match heuristics would be widly confused if we predicted
1161 if (best_predictor
> predictor
1162 && predictor_info
[predictor
].flags
& PRED_FLAG_FIRST_MATCH
)
1164 struct edge_prediction
*pred2
;
1165 int prob
= probability
;
1167 for (pred2
= (struct edge_prediction
*) *preds
;
1168 pred2
; pred2
= pred2
->ep_next
)
1169 if (pred2
!= pred
&& pred2
->ep_predictor
== pred
->ep_predictor
)
1171 int probability2
= pred2
->ep_probability
;
1173 if (pred2
->ep_edge
!= first
)
1174 probability2
= REG_BR_PROB_BASE
- probability2
;
1176 if ((probability
< REG_BR_PROB_BASE
/ 2) !=
1177 (probability2
< REG_BR_PROB_BASE
/ 2))
1180 /* If the same predictor later gave better result, go for it! */
1181 if ((probability
>= REG_BR_PROB_BASE
/ 2 && (probability2
> probability
))
1182 || (probability
<= REG_BR_PROB_BASE
/ 2 && (probability2
< probability
)))
1183 prob
= probability2
;
1186 best_probability
= prob
, best_predictor
= predictor
;
1189 d
= (combined_probability
* probability
1190 + (REG_BR_PROB_BASE
- combined_probability
)
1191 * (REG_BR_PROB_BASE
- probability
));
1193 /* Use FP math to avoid overflows of 32bit integers. */
1195 /* If one probability is 0% and one 100%, avoid division by zero. */
1196 combined_probability
= REG_BR_PROB_BASE
/ 2;
1198 combined_probability
= (((double) combined_probability
)
1200 * REG_BR_PROB_BASE
/ d
+ 0.5);
1204 /* Decide which heuristic to use. In case we didn't match anything,
1205 use no_prediction heuristic, in case we did match, use either
1206 first match or Dempster-Shaffer theory depending on the flags. */
1208 if (best_predictor
!= END_PREDICTORS
)
1212 dump_prediction (dump_file
, PRED_NO_PREDICTION
, combined_probability
, bb
);
1216 dump_prediction (dump_file
, PRED_DS_THEORY
, combined_probability
, bb
,
1217 !first_match
? REASON_NONE
: REASON_IGNORED
);
1219 dump_prediction (dump_file
, PRED_FIRST_MATCH
, best_probability
, bb
,
1220 first_match
? REASON_NONE
: REASON_IGNORED
);
1224 combined_probability
= best_probability
;
1225 dump_prediction (dump_file
, PRED_COMBINED
, combined_probability
, bb
);
1229 for (pred
= (struct edge_prediction
*) *preds
; pred
; pred
= pred
->ep_next
)
1231 enum br_predictor predictor
= pred
->ep_predictor
;
1232 int probability
= pred
->ep_probability
;
1234 dump_prediction (dump_file
, predictor
, probability
, bb
,
1235 (!first_match
|| best_predictor
== predictor
)
1236 ? REASON_NONE
: REASON_IGNORED
, pred
->ep_edge
);
1239 clear_bb_predictions (bb
);
1241 if (!bb
->count
&& !dry_run
)
1243 first
->probability
= combined_probability
;
1244 second
->probability
= REG_BR_PROB_BASE
- combined_probability
;
1248 /* Check if T1 and T2 satisfy the IV_COMPARE condition.
1249 Return the SSA_NAME if the condition satisfies, NULL otherwise.
1251 T1 and T2 should be one of the following cases:
1252 1. T1 is SSA_NAME, T2 is NULL
1253 2. T1 is SSA_NAME, T2 is INTEGER_CST between [-4, 4]
1254 3. T2 is SSA_NAME, T1 is INTEGER_CST between [-4, 4] */
1257 strips_small_constant (tree t1
, tree t2
)
1264 else if (TREE_CODE (t1
) == SSA_NAME
)
1266 else if (tree_fits_shwi_p (t1
))
1267 value
= tree_to_shwi (t1
);
1273 else if (tree_fits_shwi_p (t2
))
1274 value
= tree_to_shwi (t2
);
1275 else if (TREE_CODE (t2
) == SSA_NAME
)
1283 if (value
<= 4 && value
>= -4)
1289 /* Return the SSA_NAME in T or T's operands.
1290 Return NULL if SSA_NAME cannot be found. */
1293 get_base_value (tree t
)
1295 if (TREE_CODE (t
) == SSA_NAME
)
1298 if (!BINARY_CLASS_P (t
))
1301 switch (TREE_OPERAND_LENGTH (t
))
1304 return strips_small_constant (TREE_OPERAND (t
, 0), NULL
);
1306 return strips_small_constant (TREE_OPERAND (t
, 0),
1307 TREE_OPERAND (t
, 1));
1313 /* Check the compare STMT in LOOP. If it compares an induction
1314 variable to a loop invariant, return true, and save
1315 LOOP_INVARIANT, COMPARE_CODE and LOOP_STEP.
1316 Otherwise return false and set LOOP_INVAIANT to NULL. */
1319 is_comparison_with_loop_invariant_p (gcond
*stmt
, struct loop
*loop
,
1320 tree
*loop_invariant
,
1321 enum tree_code
*compare_code
,
1325 tree op0
, op1
, bound
, base
;
1327 enum tree_code code
;
1330 code
= gimple_cond_code (stmt
);
1331 *loop_invariant
= NULL
;
1347 op0
= gimple_cond_lhs (stmt
);
1348 op1
= gimple_cond_rhs (stmt
);
1350 if ((TREE_CODE (op0
) != SSA_NAME
&& TREE_CODE (op0
) != INTEGER_CST
)
1351 || (TREE_CODE (op1
) != SSA_NAME
&& TREE_CODE (op1
) != INTEGER_CST
))
1353 if (!simple_iv (loop
, loop_containing_stmt (stmt
), op0
, &iv0
, true))
1355 if (!simple_iv (loop
, loop_containing_stmt (stmt
), op1
, &iv1
, true))
1357 if (TREE_CODE (iv0
.step
) != INTEGER_CST
1358 || TREE_CODE (iv1
.step
) != INTEGER_CST
)
1360 if ((integer_zerop (iv0
.step
) && integer_zerop (iv1
.step
))
1361 || (!integer_zerop (iv0
.step
) && !integer_zerop (iv1
.step
)))
1364 if (integer_zerop (iv0
.step
))
1366 if (code
!= NE_EXPR
&& code
!= EQ_EXPR
)
1367 code
= invert_tree_comparison (code
, false);
1370 if (tree_fits_shwi_p (iv1
.step
))
1379 if (tree_fits_shwi_p (iv0
.step
))
1385 if (TREE_CODE (bound
) != INTEGER_CST
)
1386 bound
= get_base_value (bound
);
1389 if (TREE_CODE (base
) != INTEGER_CST
)
1390 base
= get_base_value (base
);
1394 *loop_invariant
= bound
;
1395 *compare_code
= code
;
1397 *loop_iv_base
= base
;
1401 /* Compare two SSA_NAMEs: returns TRUE if T1 and T2 are value coherent. */
1404 expr_coherent_p (tree t1
, tree t2
)
1407 tree ssa_name_1
= NULL
;
1408 tree ssa_name_2
= NULL
;
1410 gcc_assert (TREE_CODE (t1
) == SSA_NAME
|| TREE_CODE (t1
) == INTEGER_CST
);
1411 gcc_assert (TREE_CODE (t2
) == SSA_NAME
|| TREE_CODE (t2
) == INTEGER_CST
);
1416 if (TREE_CODE (t1
) == INTEGER_CST
&& TREE_CODE (t2
) == INTEGER_CST
)
1418 if (TREE_CODE (t1
) == INTEGER_CST
|| TREE_CODE (t2
) == INTEGER_CST
)
1421 /* Check to see if t1 is expressed/defined with t2. */
1422 stmt
= SSA_NAME_DEF_STMT (t1
);
1423 gcc_assert (stmt
!= NULL
);
1424 if (is_gimple_assign (stmt
))
1426 ssa_name_1
= SINGLE_SSA_TREE_OPERAND (stmt
, SSA_OP_USE
);
1427 if (ssa_name_1
&& ssa_name_1
== t2
)
1431 /* Check to see if t2 is expressed/defined with t1. */
1432 stmt
= SSA_NAME_DEF_STMT (t2
);
1433 gcc_assert (stmt
!= NULL
);
1434 if (is_gimple_assign (stmt
))
1436 ssa_name_2
= SINGLE_SSA_TREE_OPERAND (stmt
, SSA_OP_USE
);
1437 if (ssa_name_2
&& ssa_name_2
== t1
)
1441 /* Compare if t1 and t2's def_stmts are identical. */
1442 if (ssa_name_2
!= NULL
&& ssa_name_1
== ssa_name_2
)
1448 /* Return true if E is predicted by one of loop heuristics. */
1451 predicted_by_loop_heuristics_p (basic_block bb
)
1453 struct edge_prediction
*i
;
1454 edge_prediction
**preds
= bb_predictions
->get (bb
);
1459 for (i
= *preds
; i
; i
= i
->ep_next
)
1460 if (i
->ep_predictor
== PRED_LOOP_ITERATIONS_GUESSED
1461 || i
->ep_predictor
== PRED_LOOP_ITERATIONS_MAX
1462 || i
->ep_predictor
== PRED_LOOP_ITERATIONS
1463 || i
->ep_predictor
== PRED_LOOP_EXIT
1464 || i
->ep_predictor
== PRED_LOOP_EXIT_WITH_RECURSION
1465 || i
->ep_predictor
== PRED_LOOP_EXTRA_EXIT
)
1470 /* Predict branch probability of BB when BB contains a branch that compares
1471 an induction variable in LOOP with LOOP_IV_BASE_VAR to LOOP_BOUND_VAR. The
1472 loop exit is compared using LOOP_BOUND_CODE, with step of LOOP_BOUND_STEP.
1475 for (int i = 0; i < bound; i++) {
1482 In this loop, we will predict the branch inside the loop to be taken. */
1485 predict_iv_comparison (struct loop
*loop
, basic_block bb
,
1486 tree loop_bound_var
,
1487 tree loop_iv_base_var
,
1488 enum tree_code loop_bound_code
,
1489 int loop_bound_step
)
1492 tree compare_var
, compare_base
;
1493 enum tree_code compare_code
;
1494 tree compare_step_var
;
1498 if (predicted_by_loop_heuristics_p (bb
))
1501 stmt
= last_stmt (bb
);
1502 if (!stmt
|| gimple_code (stmt
) != GIMPLE_COND
)
1504 if (!is_comparison_with_loop_invariant_p (as_a
<gcond
*> (stmt
),
1511 /* Find the taken edge. */
1512 FOR_EACH_EDGE (then_edge
, ei
, bb
->succs
)
1513 if (then_edge
->flags
& EDGE_TRUE_VALUE
)
1516 /* When comparing an IV to a loop invariant, NE is more likely to be
1517 taken while EQ is more likely to be not-taken. */
1518 if (compare_code
== NE_EXPR
)
1520 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, TAKEN
);
1523 else if (compare_code
== EQ_EXPR
)
1525 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, NOT_TAKEN
);
1529 if (!expr_coherent_p (loop_iv_base_var
, compare_base
))
1532 /* If loop bound, base and compare bound are all constants, we can
1533 calculate the probability directly. */
1534 if (tree_fits_shwi_p (loop_bound_var
)
1535 && tree_fits_shwi_p (compare_var
)
1536 && tree_fits_shwi_p (compare_base
))
1539 bool overflow
, overall_overflow
= false;
1540 widest_int compare_count
, tem
;
1542 /* (loop_bound - base) / compare_step */
1543 tem
= wi::sub (wi::to_widest (loop_bound_var
),
1544 wi::to_widest (compare_base
), SIGNED
, &overflow
);
1545 overall_overflow
|= overflow
;
1546 widest_int loop_count
= wi::div_trunc (tem
,
1547 wi::to_widest (compare_step_var
),
1549 overall_overflow
|= overflow
;
1551 if (!wi::neg_p (wi::to_widest (compare_step_var
))
1552 ^ (compare_code
== LT_EXPR
|| compare_code
== LE_EXPR
))
1554 /* (loop_bound - compare_bound) / compare_step */
1555 tem
= wi::sub (wi::to_widest (loop_bound_var
),
1556 wi::to_widest (compare_var
), SIGNED
, &overflow
);
1557 overall_overflow
|= overflow
;
1558 compare_count
= wi::div_trunc (tem
, wi::to_widest (compare_step_var
),
1560 overall_overflow
|= overflow
;
1564 /* (compare_bound - base) / compare_step */
1565 tem
= wi::sub (wi::to_widest (compare_var
),
1566 wi::to_widest (compare_base
), SIGNED
, &overflow
);
1567 overall_overflow
|= overflow
;
1568 compare_count
= wi::div_trunc (tem
, wi::to_widest (compare_step_var
),
1570 overall_overflow
|= overflow
;
1572 if (compare_code
== LE_EXPR
|| compare_code
== GE_EXPR
)
1574 if (loop_bound_code
== LE_EXPR
|| loop_bound_code
== GE_EXPR
)
1576 if (wi::neg_p (compare_count
))
1578 if (wi::neg_p (loop_count
))
1580 if (loop_count
== 0)
1582 else if (wi::cmps (compare_count
, loop_count
) == 1)
1583 probability
= REG_BR_PROB_BASE
;
1586 tem
= compare_count
* REG_BR_PROB_BASE
;
1587 tem
= wi::udiv_trunc (tem
, loop_count
);
1588 probability
= tem
.to_uhwi ();
1591 /* FIXME: The branch prediction seems broken. It has only 20% hitrate. */
1592 if (!overall_overflow
)
1593 predict_edge (then_edge
, PRED_LOOP_IV_COMPARE
, probability
);
1598 if (expr_coherent_p (loop_bound_var
, compare_var
))
1600 if ((loop_bound_code
== LT_EXPR
|| loop_bound_code
== LE_EXPR
)
1601 && (compare_code
== LT_EXPR
|| compare_code
== LE_EXPR
))
1602 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, TAKEN
);
1603 else if ((loop_bound_code
== GT_EXPR
|| loop_bound_code
== GE_EXPR
)
1604 && (compare_code
== GT_EXPR
|| compare_code
== GE_EXPR
))
1605 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, TAKEN
);
1606 else if (loop_bound_code
== NE_EXPR
)
1608 /* If the loop backedge condition is "(i != bound)", we do
1609 the comparison based on the step of IV:
1610 * step < 0 : backedge condition is like (i > bound)
1611 * step > 0 : backedge condition is like (i < bound) */
1612 gcc_assert (loop_bound_step
!= 0);
1613 if (loop_bound_step
> 0
1614 && (compare_code
== LT_EXPR
1615 || compare_code
== LE_EXPR
))
1616 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, TAKEN
);
1617 else if (loop_bound_step
< 0
1618 && (compare_code
== GT_EXPR
1619 || compare_code
== GE_EXPR
))
1620 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, TAKEN
);
1622 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, NOT_TAKEN
);
1625 /* The branch is predicted not-taken if loop_bound_code is
1626 opposite with compare_code. */
1627 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, NOT_TAKEN
);
1629 else if (expr_coherent_p (loop_iv_base_var
, compare_var
))
1632 for (i = s; i < h; i++)
1634 The branch should be predicted taken. */
1635 if (loop_bound_step
> 0
1636 && (compare_code
== GT_EXPR
|| compare_code
== GE_EXPR
))
1637 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, TAKEN
);
1638 else if (loop_bound_step
< 0
1639 && (compare_code
== LT_EXPR
|| compare_code
== LE_EXPR
))
1640 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, TAKEN
);
1642 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, NOT_TAKEN
);
1646 /* Predict for extra loop exits that will lead to EXIT_EDGE. The extra loop
1647 exits are resulted from short-circuit conditions that will generate an
1650 if (foo() || global > 10)
1653 This will be translated into:
1658 if foo() goto BB6 else goto BB5
1660 if global > 10 goto BB6 else goto BB7
1664 iftmp = (PHI 0(BB5), 1(BB6))
1665 if iftmp == 1 goto BB8 else goto BB3
1667 outside of the loop...
1669 The edge BB7->BB8 is loop exit because BB8 is outside of the loop.
1670 From the dataflow, we can infer that BB4->BB6 and BB5->BB6 are also loop
1671 exits. This function takes BB7->BB8 as input, and finds out the extra loop
1672 exits to predict them using PRED_LOOP_EXTRA_EXIT. */
1675 predict_extra_loop_exits (edge exit_edge
)
1678 bool check_value_one
;
1679 gimple
*lhs_def_stmt
;
1681 tree cmp_rhs
, cmp_lhs
;
1685 last
= last_stmt (exit_edge
->src
);
1688 cmp_stmt
= dyn_cast
<gcond
*> (last
);
1692 cmp_rhs
= gimple_cond_rhs (cmp_stmt
);
1693 cmp_lhs
= gimple_cond_lhs (cmp_stmt
);
1694 if (!TREE_CONSTANT (cmp_rhs
)
1695 || !(integer_zerop (cmp_rhs
) || integer_onep (cmp_rhs
)))
1697 if (TREE_CODE (cmp_lhs
) != SSA_NAME
)
1700 /* If check_value_one is true, only the phi_args with value '1' will lead
1701 to loop exit. Otherwise, only the phi_args with value '0' will lead to
1703 check_value_one
= (((integer_onep (cmp_rhs
))
1704 ^ (gimple_cond_code (cmp_stmt
) == EQ_EXPR
))
1705 ^ ((exit_edge
->flags
& EDGE_TRUE_VALUE
) != 0));
1707 lhs_def_stmt
= SSA_NAME_DEF_STMT (cmp_lhs
);
1711 phi_stmt
= dyn_cast
<gphi
*> (lhs_def_stmt
);
1715 for (i
= 0; i
< gimple_phi_num_args (phi_stmt
); i
++)
1719 tree val
= gimple_phi_arg_def (phi_stmt
, i
);
1720 edge e
= gimple_phi_arg_edge (phi_stmt
, i
);
1722 if (!TREE_CONSTANT (val
) || !(integer_zerop (val
) || integer_onep (val
)))
1724 if ((check_value_one
^ integer_onep (val
)) == 1)
1726 if (EDGE_COUNT (e
->src
->succs
) != 1)
1728 predict_paths_leading_to_edge (e
, PRED_LOOP_EXTRA_EXIT
, NOT_TAKEN
);
1732 FOR_EACH_EDGE (e1
, ei
, e
->src
->preds
)
1733 predict_paths_leading_to_edge (e1
, PRED_LOOP_EXTRA_EXIT
, NOT_TAKEN
);
1738 /* Predict edge probabilities by exploiting loop structure. */
1741 predict_loops (void)
1745 hash_set
<struct loop
*> with_recursion(10);
1747 FOR_EACH_BB_FN (bb
, cfun
)
1749 gimple_stmt_iterator gsi
;
1752 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1753 if (is_gimple_call (gsi_stmt (gsi
))
1754 && (decl
= gimple_call_fndecl (gsi_stmt (gsi
))) != NULL
1755 && recursive_call_p (current_function_decl
, decl
))
1757 loop
= bb
->loop_father
;
1758 while (loop
&& !with_recursion
.add (loop
))
1759 loop
= loop_outer (loop
);
1763 /* Try to predict out blocks in a loop that are not part of a
1765 FOR_EACH_LOOP (loop
, LI_FROM_INNERMOST
)
1767 basic_block bb
, *bbs
;
1768 unsigned j
, n_exits
= 0;
1770 struct tree_niter_desc niter_desc
;
1772 struct nb_iter_bound
*nb_iter
;
1773 enum tree_code loop_bound_code
= ERROR_MARK
;
1774 tree loop_bound_step
= NULL
;
1775 tree loop_bound_var
= NULL
;
1776 tree loop_iv_base
= NULL
;
1778 bool recursion
= with_recursion
.contains (loop
);
1780 exits
= get_loop_exit_edges (loop
);
1781 FOR_EACH_VEC_ELT (exits
, j
, ex
)
1782 if (!(ex
->flags
& (EDGE_EH
| EDGE_ABNORMAL_CALL
| EDGE_FAKE
)))
1790 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1791 fprintf (dump_file
, "Predicting loop %i%s with %i exits.\n",
1792 loop
->num
, recursion
? " (with recursion)":"", n_exits
);
1793 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
1794 && max_loop_iterations_int (loop
) >= 0)
1797 "Loop %d iterates at most %i times.\n", loop
->num
,
1798 (int)max_loop_iterations_int (loop
));
1800 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
1801 && likely_max_loop_iterations_int (loop
) >= 0)
1803 fprintf (dump_file
, "Loop %d likely iterates at most %i times.\n",
1804 loop
->num
, (int)likely_max_loop_iterations_int (loop
));
1807 FOR_EACH_VEC_ELT (exits
, j
, ex
)
1810 HOST_WIDE_INT nitercst
;
1811 int max
= PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS
);
1813 enum br_predictor predictor
;
1816 if (ex
->flags
& (EDGE_EH
| EDGE_ABNORMAL_CALL
| EDGE_FAKE
))
1818 /* Loop heuristics do not expect exit conditional to be inside
1819 inner loop. We predict from innermost to outermost loop. */
1820 if (predicted_by_loop_heuristics_p (ex
->src
))
1822 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1823 fprintf (dump_file
, "Skipping exit %i->%i because "
1824 "it is already predicted.\n",
1825 ex
->src
->index
, ex
->dest
->index
);
1828 predict_extra_loop_exits (ex
);
1830 if (number_of_iterations_exit (loop
, ex
, &niter_desc
, false, false))
1831 niter
= niter_desc
.niter
;
1832 if (!niter
|| TREE_CODE (niter_desc
.niter
) != INTEGER_CST
)
1833 niter
= loop_niter_by_eval (loop
, ex
);
1834 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
1835 && TREE_CODE (niter
) == INTEGER_CST
)
1837 fprintf (dump_file
, "Exit %i->%i %d iterates ",
1838 ex
->src
->index
, ex
->dest
->index
,
1840 print_generic_expr (dump_file
, niter
, TDF_SLIM
);
1841 fprintf (dump_file
, " times.\n");
1844 if (TREE_CODE (niter
) == INTEGER_CST
)
1846 if (tree_fits_uhwi_p (niter
)
1848 && compare_tree_int (niter
, max
- 1) == -1)
1849 nitercst
= tree_to_uhwi (niter
) + 1;
1852 predictor
= PRED_LOOP_ITERATIONS
;
1854 /* If we have just one exit and we can derive some information about
1855 the number of iterations of the loop from the statements inside
1856 the loop, use it to predict this exit. */
1857 else if (n_exits
== 1
1858 && estimated_stmt_executions (loop
, &nit
))
1860 if (wi::gtu_p (nit
, max
))
1863 nitercst
= nit
.to_shwi ();
1864 predictor
= PRED_LOOP_ITERATIONS_GUESSED
;
1866 /* If we have likely upper bound, trust it for very small iteration
1867 counts. Such loops would otherwise get mispredicted by standard
1868 LOOP_EXIT heuristics. */
1869 else if (n_exits
== 1
1870 && likely_max_stmt_executions (loop
, &nit
)
1872 RDIV (REG_BR_PROB_BASE
,
1876 ? PRED_LOOP_EXIT_WITH_RECURSION
1877 : PRED_LOOP_EXIT
].hitrate
)))
1879 nitercst
= nit
.to_shwi ();
1880 predictor
= PRED_LOOP_ITERATIONS_MAX
;
1884 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1885 fprintf (dump_file
, "Nothing known about exit %i->%i.\n",
1886 ex
->src
->index
, ex
->dest
->index
);
1890 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1891 fprintf (dump_file
, "Recording prediction to %i iterations by %s.\n",
1892 (int)nitercst
, predictor_info
[predictor
].name
);
1893 /* If the prediction for number of iterations is zero, do not
1894 predict the exit edges. */
1898 probability
= RDIV (REG_BR_PROB_BASE
, nitercst
);
1899 predict_edge (ex
, predictor
, probability
);
1903 /* Find information about loop bound variables. */
1904 for (nb_iter
= loop
->bounds
; nb_iter
;
1905 nb_iter
= nb_iter
->next
)
1907 && gimple_code (nb_iter
->stmt
) == GIMPLE_COND
)
1909 stmt
= as_a
<gcond
*> (nb_iter
->stmt
);
1912 if (!stmt
&& last_stmt (loop
->header
)
1913 && gimple_code (last_stmt (loop
->header
)) == GIMPLE_COND
)
1914 stmt
= as_a
<gcond
*> (last_stmt (loop
->header
));
1916 is_comparison_with_loop_invariant_p (stmt
, loop
,
1922 bbs
= get_loop_body (loop
);
1924 for (j
= 0; j
< loop
->num_nodes
; j
++)
1931 /* Bypass loop heuristics on continue statement. These
1932 statements construct loops via "non-loop" constructs
1933 in the source language and are better to be handled
1935 if (predicted_by_p (bb
, PRED_CONTINUE
))
1937 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1938 fprintf (dump_file
, "BB %i predicted by continue.\n",
1943 /* If we already used more reliable loop exit predictors, do not
1944 bother with PRED_LOOP_EXIT. */
1945 if (!predicted_by_loop_heuristics_p (bb
))
1947 /* For loop with many exits we don't want to predict all exits
1948 with the pretty large probability, because if all exits are
1949 considered in row, the loop would be predicted to iterate
1950 almost never. The code to divide probability by number of
1951 exits is very rough. It should compute the number of exits
1952 taken in each patch through function (not the overall number
1953 of exits that might be a lot higher for loops with wide switch
1954 statements in them) and compute n-th square root.
1956 We limit the minimal probability by 2% to avoid
1957 EDGE_PROBABILITY_RELIABLE from trusting the branch prediction
1958 as this was causing regression in perl benchmark containing such
1961 int probability
= ((REG_BR_PROB_BASE
1964 ? PRED_LOOP_EXIT_WITH_RECURSION
1965 : PRED_LOOP_EXIT
].hitrate
)
1967 if (probability
< HITRATE (2))
1968 probability
= HITRATE (2);
1969 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1970 if (e
->dest
->index
< NUM_FIXED_BLOCKS
1971 || !flow_bb_inside_loop_p (loop
, e
->dest
))
1973 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1975 "Predicting exit %i->%i with prob %i.\n",
1976 e
->src
->index
, e
->dest
->index
, probability
);
1978 recursion
? PRED_LOOP_EXIT_WITH_RECURSION
1979 : PRED_LOOP_EXIT
, probability
);
1983 predict_iv_comparison (loop
, bb
, loop_bound_var
, loop_iv_base
,
1985 tree_to_shwi (loop_bound_step
));
1988 /* In the following code
1993 guess that cond is unlikely. */
1994 if (loop_outer (loop
)->num
)
1996 basic_block bb
= NULL
;
1997 edge preheader_edge
= loop_preheader_edge (loop
);
1999 if (single_pred_p (preheader_edge
->src
)
2000 && single_succ_p (preheader_edge
->src
))
2001 preheader_edge
= single_pred_edge (preheader_edge
->src
);
2003 gimple
*stmt
= last_stmt (preheader_edge
->src
);
2004 /* Pattern match fortran loop preheader:
2005 _16 = BUILTIN_EXPECT (_15, 1, PRED_FORTRAN_LOOP_PREHEADER);
2006 _17 = (logical(kind=4)) _16;
2012 Loop guard branch prediction says nothing about duplicated loop
2013 headers produced by fortran frontend and in this case we want
2014 to predict paths leading to this preheader. */
2017 && gimple_code (stmt
) == GIMPLE_COND
2018 && gimple_cond_code (stmt
) == NE_EXPR
2019 && TREE_CODE (gimple_cond_lhs (stmt
)) == SSA_NAME
2020 && integer_zerop (gimple_cond_rhs (stmt
)))
2022 gimple
*call_stmt
= SSA_NAME_DEF_STMT (gimple_cond_lhs (stmt
));
2023 if (gimple_code (call_stmt
) == GIMPLE_ASSIGN
2024 && gimple_expr_code (call_stmt
) == NOP_EXPR
2025 && TREE_CODE (gimple_assign_rhs1 (call_stmt
)) == SSA_NAME
)
2026 call_stmt
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (call_stmt
));
2027 if (gimple_code (call_stmt
) == GIMPLE_CALL
2028 && gimple_call_internal_p (call_stmt
)
2029 && gimple_call_internal_fn (call_stmt
) == IFN_BUILTIN_EXPECT
2030 && TREE_CODE (gimple_call_arg (call_stmt
, 2)) == INTEGER_CST
2031 && tree_fits_uhwi_p (gimple_call_arg (call_stmt
, 2))
2032 && tree_to_uhwi (gimple_call_arg (call_stmt
, 2))
2033 == PRED_FORTRAN_LOOP_PREHEADER
)
2034 bb
= preheader_edge
->src
;
2038 if (!dominated_by_p (CDI_DOMINATORS
,
2039 loop_outer (loop
)->latch
, loop
->header
))
2040 predict_paths_leading_to_edge (loop_preheader_edge (loop
),
2042 ? PRED_LOOP_GUARD_WITH_RECURSION
2049 if (!dominated_by_p (CDI_DOMINATORS
,
2050 loop_outer (loop
)->latch
, bb
))
2051 predict_paths_leading_to (bb
,
2053 ? PRED_LOOP_GUARD_WITH_RECURSION
2060 /* Free basic blocks from get_loop_body. */
2065 /* Attempt to predict probabilities of BB outgoing edges using local
2068 bb_estimate_probability_locally (basic_block bb
)
2070 rtx_insn
*last_insn
= BB_END (bb
);
2073 if (! can_predict_insn_p (last_insn
))
2075 cond
= get_condition (last_insn
, NULL
, false, false);
2079 /* Try "pointer heuristic."
2080 A comparison ptr == 0 is predicted as false.
2081 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
2082 if (COMPARISON_P (cond
)
2083 && ((REG_P (XEXP (cond
, 0)) && REG_POINTER (XEXP (cond
, 0)))
2084 || (REG_P (XEXP (cond
, 1)) && REG_POINTER (XEXP (cond
, 1)))))
2086 if (GET_CODE (cond
) == EQ
)
2087 predict_insn_def (last_insn
, PRED_POINTER
, NOT_TAKEN
);
2088 else if (GET_CODE (cond
) == NE
)
2089 predict_insn_def (last_insn
, PRED_POINTER
, TAKEN
);
2093 /* Try "opcode heuristic."
2094 EQ tests are usually false and NE tests are usually true. Also,
2095 most quantities are positive, so we can make the appropriate guesses
2096 about signed comparisons against zero. */
2097 switch (GET_CODE (cond
))
2100 /* Unconditional branch. */
2101 predict_insn_def (last_insn
, PRED_UNCONDITIONAL
,
2102 cond
== const0_rtx
? NOT_TAKEN
: TAKEN
);
2107 /* Floating point comparisons appears to behave in a very
2108 unpredictable way because of special role of = tests in
2110 if (FLOAT_MODE_P (GET_MODE (XEXP (cond
, 0))))
2112 /* Comparisons with 0 are often used for booleans and there is
2113 nothing useful to predict about them. */
2114 else if (XEXP (cond
, 1) == const0_rtx
2115 || XEXP (cond
, 0) == const0_rtx
)
2118 predict_insn_def (last_insn
, PRED_OPCODE_NONEQUAL
, NOT_TAKEN
);
2123 /* Floating point comparisons appears to behave in a very
2124 unpredictable way because of special role of = tests in
2126 if (FLOAT_MODE_P (GET_MODE (XEXP (cond
, 0))))
2128 /* Comparisons with 0 are often used for booleans and there is
2129 nothing useful to predict about them. */
2130 else if (XEXP (cond
, 1) == const0_rtx
2131 || XEXP (cond
, 0) == const0_rtx
)
2134 predict_insn_def (last_insn
, PRED_OPCODE_NONEQUAL
, TAKEN
);
2138 predict_insn_def (last_insn
, PRED_FPOPCODE
, TAKEN
);
2142 predict_insn_def (last_insn
, PRED_FPOPCODE
, NOT_TAKEN
);
2147 if (XEXP (cond
, 1) == const0_rtx
|| XEXP (cond
, 1) == const1_rtx
2148 || XEXP (cond
, 1) == constm1_rtx
)
2149 predict_insn_def (last_insn
, PRED_OPCODE_POSITIVE
, NOT_TAKEN
);
2154 if (XEXP (cond
, 1) == const0_rtx
|| XEXP (cond
, 1) == const1_rtx
2155 || XEXP (cond
, 1) == constm1_rtx
)
2156 predict_insn_def (last_insn
, PRED_OPCODE_POSITIVE
, TAKEN
);
2164 /* Set edge->probability for each successor edge of BB. */
2166 guess_outgoing_edge_probabilities (basic_block bb
)
2168 bb_estimate_probability_locally (bb
);
2169 combine_predictions_for_insn (BB_END (bb
), bb
);
2172 static tree
expr_expected_value (tree
, bitmap
, enum br_predictor
*predictor
);
2174 /* Helper function for expr_expected_value. */
2177 expr_expected_value_1 (tree type
, tree op0
, enum tree_code code
,
2178 tree op1
, bitmap visited
, enum br_predictor
*predictor
)
2183 *predictor
= PRED_UNCONDITIONAL
;
2185 if (get_gimple_rhs_class (code
) == GIMPLE_SINGLE_RHS
)
2187 if (TREE_CONSTANT (op0
))
2190 if (code
== IMAGPART_EXPR
)
2192 if (TREE_CODE (TREE_OPERAND (op0
, 0)) == SSA_NAME
)
2194 def
= SSA_NAME_DEF_STMT (TREE_OPERAND (op0
, 0));
2195 if (is_gimple_call (def
)
2196 && gimple_call_internal_p (def
)
2197 && (gimple_call_internal_fn (def
)
2198 == IFN_ATOMIC_COMPARE_EXCHANGE
))
2200 /* Assume that any given atomic operation has low contention,
2201 and thus the compare-and-swap operation succeeds. */
2203 *predictor
= PRED_COMPARE_AND_SWAP
;
2204 return build_one_cst (TREE_TYPE (op0
));
2209 if (code
!= SSA_NAME
)
2212 def
= SSA_NAME_DEF_STMT (op0
);
2214 /* If we were already here, break the infinite cycle. */
2215 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (op0
)))
2218 if (gimple_code (def
) == GIMPLE_PHI
)
2220 /* All the arguments of the PHI node must have the same constant
2222 int i
, n
= gimple_phi_num_args (def
);
2223 tree val
= NULL
, new_val
;
2225 for (i
= 0; i
< n
; i
++)
2227 tree arg
= PHI_ARG_DEF (def
, i
);
2228 enum br_predictor predictor2
;
2230 /* If this PHI has itself as an argument, we cannot
2231 determine the string length of this argument. However,
2232 if we can find an expected constant value for the other
2233 PHI args then we can still be sure that this is
2234 likely a constant. So be optimistic and just
2235 continue with the next argument. */
2236 if (arg
== PHI_RESULT (def
))
2239 new_val
= expr_expected_value (arg
, visited
, &predictor2
);
2241 /* It is difficult to combine value predictors. Simply assume
2242 that later predictor is weaker and take its prediction. */
2243 if (predictor
&& *predictor
< predictor2
)
2244 *predictor
= predictor2
;
2249 else if (!operand_equal_p (val
, new_val
, false))
2254 if (is_gimple_assign (def
))
2256 if (gimple_assign_lhs (def
) != op0
)
2259 return expr_expected_value_1 (TREE_TYPE (gimple_assign_lhs (def
)),
2260 gimple_assign_rhs1 (def
),
2261 gimple_assign_rhs_code (def
),
2262 gimple_assign_rhs2 (def
),
2263 visited
, predictor
);
2266 if (is_gimple_call (def
))
2268 tree decl
= gimple_call_fndecl (def
);
2271 if (gimple_call_internal_p (def
)
2272 && gimple_call_internal_fn (def
) == IFN_BUILTIN_EXPECT
)
2274 gcc_assert (gimple_call_num_args (def
) == 3);
2275 tree val
= gimple_call_arg (def
, 0);
2276 if (TREE_CONSTANT (val
))
2280 tree val2
= gimple_call_arg (def
, 2);
2281 gcc_assert (TREE_CODE (val2
) == INTEGER_CST
2282 && tree_fits_uhwi_p (val2
)
2283 && tree_to_uhwi (val2
) < END_PREDICTORS
);
2284 *predictor
= (enum br_predictor
) tree_to_uhwi (val2
);
2286 return gimple_call_arg (def
, 1);
2290 if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
2291 switch (DECL_FUNCTION_CODE (decl
))
2293 case BUILT_IN_EXPECT
:
2296 if (gimple_call_num_args (def
) != 2)
2298 val
= gimple_call_arg (def
, 0);
2299 if (TREE_CONSTANT (val
))
2302 *predictor
= PRED_BUILTIN_EXPECT
;
2303 return gimple_call_arg (def
, 1);
2306 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N
:
2307 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
2308 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
2309 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
2310 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
2311 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
2312 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE
:
2313 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N
:
2314 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
2315 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
2316 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
2317 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
2318 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
2319 /* Assume that any given atomic operation has low contention,
2320 and thus the compare-and-swap operation succeeds. */
2322 *predictor
= PRED_COMPARE_AND_SWAP
;
2323 return boolean_true_node
;
2332 if (get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
)
2335 enum br_predictor predictor2
;
2336 op0
= expr_expected_value (op0
, visited
, predictor
);
2339 op1
= expr_expected_value (op1
, visited
, &predictor2
);
2340 if (predictor
&& *predictor
< predictor2
)
2341 *predictor
= predictor2
;
2344 res
= fold_build2 (code
, type
, op0
, op1
);
2345 if (TREE_CONSTANT (res
))
2349 if (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
)
2352 op0
= expr_expected_value (op0
, visited
, predictor
);
2355 res
= fold_build1 (code
, type
, op0
);
2356 if (TREE_CONSTANT (res
))
2363 /* Return constant EXPR will likely have at execution time, NULL if unknown.
2364 The function is used by builtin_expect branch predictor so the evidence
2365 must come from this construct and additional possible constant folding.
2367 We may want to implement more involved value guess (such as value range
2368 propagation based prediction), but such tricks shall go to new
2372 expr_expected_value (tree expr
, bitmap visited
,
2373 enum br_predictor
*predictor
)
2375 enum tree_code code
;
2378 if (TREE_CONSTANT (expr
))
2381 *predictor
= PRED_UNCONDITIONAL
;
2385 extract_ops_from_tree (expr
, &code
, &op0
, &op1
);
2386 return expr_expected_value_1 (TREE_TYPE (expr
),
2387 op0
, code
, op1
, visited
, predictor
);
2390 /* Predict using opcode of the last statement in basic block. */
2392 tree_predict_by_opcode (basic_block bb
)
2394 gimple
*stmt
= last_stmt (bb
);
2402 enum br_predictor predictor
;
2404 if (!stmt
|| gimple_code (stmt
) != GIMPLE_COND
)
2406 FOR_EACH_EDGE (then_edge
, ei
, bb
->succs
)
2407 if (then_edge
->flags
& EDGE_TRUE_VALUE
)
2409 op0
= gimple_cond_lhs (stmt
);
2410 op1
= gimple_cond_rhs (stmt
);
2411 cmp
= gimple_cond_code (stmt
);
2412 type
= TREE_TYPE (op0
);
2413 visited
= BITMAP_ALLOC (NULL
);
2414 val
= expr_expected_value_1 (boolean_type_node
, op0
, cmp
, op1
, visited
,
2416 BITMAP_FREE (visited
);
2417 if (val
&& TREE_CODE (val
) == INTEGER_CST
)
2419 if (predictor
== PRED_BUILTIN_EXPECT
)
2421 int percent
= PARAM_VALUE (BUILTIN_EXPECT_PROBABILITY
);
2423 gcc_assert (percent
>= 0 && percent
<= 100);
2424 if (integer_zerop (val
))
2425 percent
= 100 - percent
;
2426 predict_edge (then_edge
, PRED_BUILTIN_EXPECT
, HITRATE (percent
));
2429 predict_edge_def (then_edge
, predictor
,
2430 integer_zerop (val
) ? NOT_TAKEN
: TAKEN
);
2432 /* Try "pointer heuristic."
2433 A comparison ptr == 0 is predicted as false.
2434 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
2435 if (POINTER_TYPE_P (type
))
2438 predict_edge_def (then_edge
, PRED_TREE_POINTER
, NOT_TAKEN
);
2439 else if (cmp
== NE_EXPR
)
2440 predict_edge_def (then_edge
, PRED_TREE_POINTER
, TAKEN
);
2444 /* Try "opcode heuristic."
2445 EQ tests are usually false and NE tests are usually true. Also,
2446 most quantities are positive, so we can make the appropriate guesses
2447 about signed comparisons against zero. */
2452 /* Floating point comparisons appears to behave in a very
2453 unpredictable way because of special role of = tests in
2455 if (FLOAT_TYPE_P (type
))
2457 /* Comparisons with 0 are often used for booleans and there is
2458 nothing useful to predict about them. */
2459 else if (integer_zerop (op0
) || integer_zerop (op1
))
2462 predict_edge_def (then_edge
, PRED_TREE_OPCODE_NONEQUAL
, NOT_TAKEN
);
2467 /* Floating point comparisons appears to behave in a very
2468 unpredictable way because of special role of = tests in
2470 if (FLOAT_TYPE_P (type
))
2472 /* Comparisons with 0 are often used for booleans and there is
2473 nothing useful to predict about them. */
2474 else if (integer_zerop (op0
)
2475 || integer_zerop (op1
))
2478 predict_edge_def (then_edge
, PRED_TREE_OPCODE_NONEQUAL
, TAKEN
);
2482 predict_edge_def (then_edge
, PRED_TREE_FPOPCODE
, TAKEN
);
2485 case UNORDERED_EXPR
:
2486 predict_edge_def (then_edge
, PRED_TREE_FPOPCODE
, NOT_TAKEN
);
2491 if (integer_zerop (op1
)
2492 || integer_onep (op1
)
2493 || integer_all_onesp (op1
)
2496 || real_minus_onep (op1
))
2497 predict_edge_def (then_edge
, PRED_TREE_OPCODE_POSITIVE
, NOT_TAKEN
);
2502 if (integer_zerop (op1
)
2503 || integer_onep (op1
)
2504 || integer_all_onesp (op1
)
2507 || real_minus_onep (op1
))
2508 predict_edge_def (then_edge
, PRED_TREE_OPCODE_POSITIVE
, TAKEN
);
2516 /* Try to guess whether the value of return means error code. */
2518 static enum br_predictor
2519 return_prediction (tree val
, enum prediction
*prediction
)
2523 return PRED_NO_PREDICTION
;
2524 /* Different heuristics for pointers and scalars. */
2525 if (POINTER_TYPE_P (TREE_TYPE (val
)))
2527 /* NULL is usually not returned. */
2528 if (integer_zerop (val
))
2530 *prediction
= NOT_TAKEN
;
2531 return PRED_NULL_RETURN
;
2534 else if (INTEGRAL_TYPE_P (TREE_TYPE (val
)))
2536 /* Negative return values are often used to indicate
2538 if (TREE_CODE (val
) == INTEGER_CST
2539 && tree_int_cst_sgn (val
) < 0)
2541 *prediction
= NOT_TAKEN
;
2542 return PRED_NEGATIVE_RETURN
;
2544 /* Constant return values seems to be commonly taken.
2545 Zero/one often represent booleans so exclude them from the
2547 if (TREE_CONSTANT (val
)
2548 && (!integer_zerop (val
) && !integer_onep (val
)))
2550 *prediction
= NOT_TAKEN
;
2551 return PRED_CONST_RETURN
;
2554 return PRED_NO_PREDICTION
;
2557 /* Find the basic block with return expression and look up for possible
2558 return value trying to apply RETURN_PREDICTION heuristics. */
2560 apply_return_prediction (void)
2562 greturn
*return_stmt
= NULL
;
2566 int phi_num_args
, i
;
2567 enum br_predictor pred
;
2568 enum prediction direction
;
2571 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
2573 gimple
*last
= last_stmt (e
->src
);
2575 && gimple_code (last
) == GIMPLE_RETURN
)
2577 return_stmt
= as_a
<greturn
*> (last
);
2583 return_val
= gimple_return_retval (return_stmt
);
2586 if (TREE_CODE (return_val
) != SSA_NAME
2587 || !SSA_NAME_DEF_STMT (return_val
)
2588 || gimple_code (SSA_NAME_DEF_STMT (return_val
)) != GIMPLE_PHI
)
2590 phi
= as_a
<gphi
*> (SSA_NAME_DEF_STMT (return_val
));
2591 phi_num_args
= gimple_phi_num_args (phi
);
2592 pred
= return_prediction (PHI_ARG_DEF (phi
, 0), &direction
);
2594 /* Avoid the degenerate case where all return values form the function
2595 belongs to same category (ie they are all positive constants)
2596 so we can hardly say something about them. */
2597 for (i
= 1; i
< phi_num_args
; i
++)
2598 if (pred
!= return_prediction (PHI_ARG_DEF (phi
, i
), &direction
))
2600 if (i
!= phi_num_args
)
2601 for (i
= 0; i
< phi_num_args
; i
++)
2603 pred
= return_prediction (PHI_ARG_DEF (phi
, i
), &direction
);
2604 if (pred
!= PRED_NO_PREDICTION
)
2605 predict_paths_leading_to_edge (gimple_phi_arg_edge (phi
, i
), pred
,
2610 /* Look for basic block that contains unlikely to happen events
2611 (such as noreturn calls) and mark all paths leading to execution
2612 of this basic blocks as unlikely. */
2615 tree_bb_level_predictions (void)
2618 bool has_return_edges
= false;
2622 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
2623 if (!(e
->flags
& (EDGE_ABNORMAL
| EDGE_FAKE
| EDGE_EH
)))
2625 has_return_edges
= true;
2629 apply_return_prediction ();
2631 FOR_EACH_BB_FN (bb
, cfun
)
2633 gimple_stmt_iterator gsi
;
2635 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2637 gimple
*stmt
= gsi_stmt (gsi
);
2640 if (is_gimple_call (stmt
))
2642 if (gimple_call_noreturn_p (stmt
) && has_return_edges
)
2643 predict_paths_leading_to (bb
, PRED_NORETURN
,
2645 decl
= gimple_call_fndecl (stmt
);
2647 && lookup_attribute ("cold",
2648 DECL_ATTRIBUTES (decl
)))
2649 predict_paths_leading_to (bb
, PRED_COLD_FUNCTION
,
2651 if (decl
&& recursive_call_p (current_function_decl
, decl
))
2652 predict_paths_leading_to (bb
, PRED_RECURSIVE_CALL
,
2655 else if (gimple_code (stmt
) == GIMPLE_PREDICT
)
2657 predict_paths_leading_to (bb
, gimple_predict_predictor (stmt
),
2658 gimple_predict_outcome (stmt
));
2659 /* Keep GIMPLE_PREDICT around so early inlining will propagate
2660 hints to callers. */
2666 /* Callback for hash_map::traverse, asserts that the pointer map is
2670 assert_is_empty (const_basic_block
const &, edge_prediction
*const &value
,
2673 gcc_assert (!value
);
2677 /* Predict branch probabilities and estimate profile for basic block BB. */
2680 tree_estimate_probability_bb (basic_block bb
)
2686 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2688 /* Predict edges to user labels with attributes. */
2689 if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
2691 gimple_stmt_iterator gi
;
2692 for (gi
= gsi_start_bb (e
->dest
); !gsi_end_p (gi
); gsi_next (&gi
))
2694 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gi
));
2699 decl
= gimple_label_label (label_stmt
);
2700 if (DECL_ARTIFICIAL (decl
))
2703 /* Finally, we have a user-defined label. */
2704 if (lookup_attribute ("cold", DECL_ATTRIBUTES (decl
)))
2705 predict_edge_def (e
, PRED_COLD_LABEL
, NOT_TAKEN
);
2706 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (decl
)))
2707 predict_edge_def (e
, PRED_HOT_LABEL
, TAKEN
);
2711 /* Predict early returns to be probable, as we've already taken
2712 care for error returns and other cases are often used for
2713 fast paths through function.
2715 Since we've already removed the return statements, we are
2716 looking for CFG like:
2726 if (e
->dest
!= bb
->next_bb
2727 && e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
2728 && single_succ_p (e
->dest
)
2729 && single_succ_edge (e
->dest
)->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
2730 && (last
= last_stmt (e
->dest
)) != NULL
2731 && gimple_code (last
) == GIMPLE_RETURN
)
2736 if (single_succ_p (bb
))
2738 FOR_EACH_EDGE (e1
, ei1
, bb
->preds
)
2739 if (!predicted_by_p (e1
->src
, PRED_NULL_RETURN
)
2740 && !predicted_by_p (e1
->src
, PRED_CONST_RETURN
)
2741 && !predicted_by_p (e1
->src
, PRED_NEGATIVE_RETURN
))
2742 predict_edge_def (e1
, PRED_TREE_EARLY_RETURN
, NOT_TAKEN
);
2745 if (!predicted_by_p (e
->src
, PRED_NULL_RETURN
)
2746 && !predicted_by_p (e
->src
, PRED_CONST_RETURN
)
2747 && !predicted_by_p (e
->src
, PRED_NEGATIVE_RETURN
))
2748 predict_edge_def (e
, PRED_TREE_EARLY_RETURN
, NOT_TAKEN
);
2751 /* Look for block we are guarding (ie we dominate it,
2752 but it doesn't postdominate us). */
2753 if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
) && e
->dest
!= bb
2754 && dominated_by_p (CDI_DOMINATORS
, e
->dest
, e
->src
)
2755 && !dominated_by_p (CDI_POST_DOMINATORS
, e
->src
, e
->dest
))
2757 gimple_stmt_iterator bi
;
2759 /* The call heuristic claims that a guarded function call
2760 is improbable. This is because such calls are often used
2761 to signal exceptional situations such as printing error
2763 for (bi
= gsi_start_bb (e
->dest
); !gsi_end_p (bi
);
2766 gimple
*stmt
= gsi_stmt (bi
);
2767 if (is_gimple_call (stmt
)
2768 && !gimple_inexpensive_call_p (as_a
<gcall
*> (stmt
))
2769 /* Constant and pure calls are hardly used to signalize
2770 something exceptional. */
2771 && gimple_has_side_effects (stmt
))
2773 predict_edge_def (e
, PRED_CALL
, NOT_TAKEN
);
2779 tree_predict_by_opcode (bb
);
2782 /* Predict branch probabilities and estimate profile of the tree CFG.
2783 This function can be called from the loop optimizers to recompute
2784 the profile information.
2785 If DRY_RUN is set, do not modify CFG and only produce dump files. */
2788 tree_estimate_probability (bool dry_run
)
2792 add_noreturn_fake_exit_edges ();
2793 connect_infinite_loops_to_exit ();
2794 /* We use loop_niter_by_eval, which requires that the loops have
2796 create_preheaders (CP_SIMPLE_PREHEADERS
);
2797 calculate_dominance_info (CDI_POST_DOMINATORS
);
2799 bb_predictions
= new hash_map
<const_basic_block
, edge_prediction
*>;
2800 tree_bb_level_predictions ();
2801 record_loop_exits ();
2803 if (number_of_loops (cfun
) > 1)
2806 FOR_EACH_BB_FN (bb
, cfun
)
2807 tree_estimate_probability_bb (bb
);
2809 FOR_EACH_BB_FN (bb
, cfun
)
2810 combine_predictions_for_bb (bb
, dry_run
);
2813 bb_predictions
->traverse
<void *, assert_is_empty
> (NULL
);
2815 delete bb_predictions
;
2816 bb_predictions
= NULL
;
2819 estimate_bb_frequencies (false);
2820 free_dominance_info (CDI_POST_DOMINATORS
);
2821 remove_fake_exit_edges ();
2824 /* Predict edges to successors of CUR whose sources are not postdominated by
2825 BB by PRED and recurse to all postdominators. */
2828 predict_paths_for_bb (basic_block cur
, basic_block bb
,
2829 enum br_predictor pred
,
2830 enum prediction taken
,
2831 bitmap visited
, struct loop
*in_loop
= NULL
)
2837 /* If we exited the loop or CUR is unconditional in the loop, there is
2840 && (!flow_bb_inside_loop_p (in_loop
, cur
)
2841 || dominated_by_p (CDI_DOMINATORS
, in_loop
->latch
, cur
)))
2844 /* We are looking for all edges forming edge cut induced by
2845 set of all blocks postdominated by BB. */
2846 FOR_EACH_EDGE (e
, ei
, cur
->preds
)
2847 if (e
->src
->index
>= NUM_FIXED_BLOCKS
2848 && !dominated_by_p (CDI_POST_DOMINATORS
, e
->src
, bb
))
2854 /* Ignore fake edges and eh, we predict them as not taken anyway. */
2855 if (e
->flags
& (EDGE_EH
| EDGE_FAKE
))
2857 gcc_assert (bb
== cur
|| dominated_by_p (CDI_POST_DOMINATORS
, cur
, bb
));
2859 /* See if there is an edge from e->src that is not abnormal
2860 and does not lead to BB and does not exit the loop. */
2861 FOR_EACH_EDGE (e2
, ei2
, e
->src
->succs
)
2863 && !(e2
->flags
& (EDGE_EH
| EDGE_FAKE
))
2864 && !dominated_by_p (CDI_POST_DOMINATORS
, e2
->dest
, bb
)
2865 && (!in_loop
|| !loop_exit_edge_p (in_loop
, e2
)))
2871 /* If there is non-abnormal path leaving e->src, predict edge
2872 using predictor. Otherwise we need to look for paths
2875 The second may lead to infinite loop in the case we are predicitng
2876 regions that are only reachable by abnormal edges. We simply
2877 prevent visiting given BB twice. */
2880 if (!edge_predicted_by_p (e
, pred
, taken
))
2881 predict_edge_def (e
, pred
, taken
);
2883 else if (bitmap_set_bit (visited
, e
->src
->index
))
2884 predict_paths_for_bb (e
->src
, e
->src
, pred
, taken
, visited
, in_loop
);
2886 for (son
= first_dom_son (CDI_POST_DOMINATORS
, cur
);
2888 son
= next_dom_son (CDI_POST_DOMINATORS
, son
))
2889 predict_paths_for_bb (son
, bb
, pred
, taken
, visited
, in_loop
);
2892 /* Sets branch probabilities according to PREDiction and
2896 predict_paths_leading_to (basic_block bb
, enum br_predictor pred
,
2897 enum prediction taken
, struct loop
*in_loop
)
2899 bitmap visited
= BITMAP_ALLOC (NULL
);
2900 predict_paths_for_bb (bb
, bb
, pred
, taken
, visited
, in_loop
);
2901 BITMAP_FREE (visited
);
2904 /* Like predict_paths_leading_to but take edge instead of basic block. */
2907 predict_paths_leading_to_edge (edge e
, enum br_predictor pred
,
2908 enum prediction taken
, struct loop
*in_loop
)
2910 bool has_nonloop_edge
= false;
2914 basic_block bb
= e
->src
;
2915 FOR_EACH_EDGE (e2
, ei
, bb
->succs
)
2916 if (e2
->dest
!= e
->src
&& e2
->dest
!= e
->dest
2917 && !(e
->flags
& (EDGE_EH
| EDGE_FAKE
))
2918 && !dominated_by_p (CDI_POST_DOMINATORS
, e
->src
, e2
->dest
))
2920 has_nonloop_edge
= true;
2923 if (!has_nonloop_edge
)
2925 bitmap visited
= BITMAP_ALLOC (NULL
);
2926 predict_paths_for_bb (bb
, bb
, pred
, taken
, visited
, in_loop
);
2927 BITMAP_FREE (visited
);
2930 predict_edge_def (e
, pred
, taken
);
2933 /* This is used to carry information about basic blocks. It is
2934 attached to the AUX field of the standard CFG block. */
2938 /* Estimated frequency of execution of basic_block. */
2941 /* To keep queue of basic blocks to process. */
2944 /* Number of predecessors we need to visit first. */
2948 /* Similar information for edges. */
2949 struct edge_prob_info
2951 /* In case edge is a loopback edge, the probability edge will be reached
2952 in case header is. Estimated number of iterations of the loop can be
2953 then computed as 1 / (1 - back_edge_prob). */
2954 sreal back_edge_prob
;
2955 /* True if the edge is a loopback edge in the natural loop. */
2956 unsigned int back_edge
:1;
2959 #define BLOCK_INFO(B) ((block_info *) (B)->aux)
2961 #define EDGE_INFO(E) ((edge_prob_info *) (E)->aux)
2963 /* Helper function for estimate_bb_frequencies.
2964 Propagate the frequencies in blocks marked in
2965 TOVISIT, starting in HEAD. */
2968 propagate_freq (basic_block head
, bitmap tovisit
)
2977 /* For each basic block we need to visit count number of his predecessors
2978 we need to visit first. */
2979 EXECUTE_IF_SET_IN_BITMAP (tovisit
, 0, i
, bi
)
2984 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
2986 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
2988 bool visit
= bitmap_bit_p (tovisit
, e
->src
->index
);
2990 if (visit
&& !(e
->flags
& EDGE_DFS_BACK
))
2992 else if (visit
&& dump_file
&& !EDGE_INFO (e
)->back_edge
)
2994 "Irreducible region hit, ignoring edge to %i->%i\n",
2995 e
->src
->index
, bb
->index
);
2997 BLOCK_INFO (bb
)->npredecessors
= count
;
2998 /* When function never returns, we will never process exit block. */
2999 if (!count
&& bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
3000 bb
->count
= bb
->frequency
= 0;
3003 BLOCK_INFO (head
)->frequency
= 1;
3005 for (bb
= head
; bb
; bb
= nextbb
)
3008 sreal cyclic_probability
= 0;
3009 sreal frequency
= 0;
3011 nextbb
= BLOCK_INFO (bb
)->next
;
3012 BLOCK_INFO (bb
)->next
= NULL
;
3014 /* Compute frequency of basic block. */
3018 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3019 gcc_assert (!bitmap_bit_p (tovisit
, e
->src
->index
)
3020 || (e
->flags
& EDGE_DFS_BACK
));
3022 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3023 if (EDGE_INFO (e
)->back_edge
)
3025 cyclic_probability
+= EDGE_INFO (e
)->back_edge_prob
;
3027 else if (!(e
->flags
& EDGE_DFS_BACK
))
3029 /* frequency += (e->probability
3030 * BLOCK_INFO (e->src)->frequency /
3031 REG_BR_PROB_BASE); */
3033 sreal tmp
= e
->probability
;
3034 tmp
*= BLOCK_INFO (e
->src
)->frequency
;
3035 tmp
*= real_inv_br_prob_base
;
3039 if (cyclic_probability
== 0)
3041 BLOCK_INFO (bb
)->frequency
= frequency
;
3045 if (cyclic_probability
> real_almost_one
)
3046 cyclic_probability
= real_almost_one
;
3048 /* BLOCK_INFO (bb)->frequency = frequency
3049 / (1 - cyclic_probability) */
3051 cyclic_probability
= sreal (1) - cyclic_probability
;
3052 BLOCK_INFO (bb
)->frequency
= frequency
/ cyclic_probability
;
3056 bitmap_clear_bit (tovisit
, bb
->index
);
3058 e
= find_edge (bb
, head
);
3061 /* EDGE_INFO (e)->back_edge_prob
3062 = ((e->probability * BLOCK_INFO (bb)->frequency)
3063 / REG_BR_PROB_BASE); */
3065 sreal tmp
= e
->probability
;
3066 tmp
*= BLOCK_INFO (bb
)->frequency
;
3067 EDGE_INFO (e
)->back_edge_prob
= tmp
* real_inv_br_prob_base
;
3070 /* Propagate to successor blocks. */
3071 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3072 if (!(e
->flags
& EDGE_DFS_BACK
)
3073 && BLOCK_INFO (e
->dest
)->npredecessors
)
3075 BLOCK_INFO (e
->dest
)->npredecessors
--;
3076 if (!BLOCK_INFO (e
->dest
)->npredecessors
)
3081 BLOCK_INFO (last
)->next
= e
->dest
;
3089 /* Estimate frequencies in loops at same nest level. */
3092 estimate_loops_at_level (struct loop
*first_loop
)
3096 for (loop
= first_loop
; loop
; loop
= loop
->next
)
3101 bitmap tovisit
= BITMAP_ALLOC (NULL
);
3103 estimate_loops_at_level (loop
->inner
);
3105 /* Find current loop back edge and mark it. */
3106 e
= loop_latch_edge (loop
);
3107 EDGE_INFO (e
)->back_edge
= 1;
3109 bbs
= get_loop_body (loop
);
3110 for (i
= 0; i
< loop
->num_nodes
; i
++)
3111 bitmap_set_bit (tovisit
, bbs
[i
]->index
);
3113 propagate_freq (loop
->header
, tovisit
);
3114 BITMAP_FREE (tovisit
);
3118 /* Propagates frequencies through structure of loops. */
3121 estimate_loops (void)
3123 bitmap tovisit
= BITMAP_ALLOC (NULL
);
3126 /* Start by estimating the frequencies in the loops. */
3127 if (number_of_loops (cfun
) > 1)
3128 estimate_loops_at_level (current_loops
->tree_root
->inner
);
3130 /* Now propagate the frequencies through all the blocks. */
3131 FOR_ALL_BB_FN (bb
, cfun
)
3133 bitmap_set_bit (tovisit
, bb
->index
);
3135 propagate_freq (ENTRY_BLOCK_PTR_FOR_FN (cfun
), tovisit
);
3136 BITMAP_FREE (tovisit
);
3139 /* Drop the profile for NODE to guessed, and update its frequency based on
3140 whether it is expected to be hot given the CALL_COUNT. */
3143 drop_profile (struct cgraph_node
*node
, gcov_type call_count
)
3145 struct function
*fn
= DECL_STRUCT_FUNCTION (node
->decl
);
3146 /* In the case where this was called by another function with a
3147 dropped profile, call_count will be 0. Since there are no
3148 non-zero call counts to this function, we don't know for sure
3149 whether it is hot, and therefore it will be marked normal below. */
3150 bool hot
= maybe_hot_count_p (NULL
, call_count
);
3154 "Dropping 0 profile for %s/%i. %s based on calls.\n",
3155 node
->name (), node
->order
,
3156 hot
? "Function is hot" : "Function is normal");
3157 /* We only expect to miss profiles for functions that are reached
3158 via non-zero call edges in cases where the function may have
3159 been linked from another module or library (COMDATs and extern
3160 templates). See the comments below for handle_missing_profiles.
3161 Also, only warn in cases where the missing counts exceed the
3162 number of training runs. In certain cases with an execv followed
3163 by a no-return call the profile for the no-return call is not
3164 dumped and there can be a mismatch. */
3165 if (!DECL_COMDAT (node
->decl
) && !DECL_EXTERNAL (node
->decl
)
3166 && call_count
> profile_info
->runs
)
3168 if (flag_profile_correction
)
3172 "Missing counts for called function %s/%i\n",
3173 node
->name (), node
->order
);
3176 warning (0, "Missing counts for called function %s/%i",
3177 node
->name (), node
->order
);
3180 profile_status_for_fn (fn
)
3181 = (flag_guess_branch_prob
? PROFILE_GUESSED
: PROFILE_ABSENT
);
3183 = hot
? NODE_FREQUENCY_HOT
: NODE_FREQUENCY_NORMAL
;
3186 /* In the case of COMDAT routines, multiple object files will contain the same
3187 function and the linker will select one for the binary. In that case
3188 all the other copies from the profile instrument binary will be missing
3189 profile counts. Look for cases where this happened, due to non-zero
3190 call counts going to 0-count functions, and drop the profile to guessed
3191 so that we can use the estimated probabilities and avoid optimizing only
3194 The other case where the profile may be missing is when the routine
3195 is not going to be emitted to the object file, e.g. for "extern template"
3196 class methods. Those will be marked DECL_EXTERNAL. Emit a warning in
3197 all other cases of non-zero calls to 0-count functions. */
3200 handle_missing_profiles (void)
3202 struct cgraph_node
*node
;
3203 int unlikely_count_fraction
= PARAM_VALUE (UNLIKELY_BB_COUNT_FRACTION
);
3204 auto_vec
<struct cgraph_node
*, 64> worklist
;
3206 /* See if 0 count function has non-0 count callers. In this case we
3207 lost some profile. Drop its function profile to PROFILE_GUESSED. */
3208 FOR_EACH_DEFINED_FUNCTION (node
)
3210 struct cgraph_edge
*e
;
3211 gcov_type call_count
= 0;
3212 gcov_type max_tp_first_run
= 0;
3213 struct function
*fn
= DECL_STRUCT_FUNCTION (node
->decl
);
3217 for (e
= node
->callers
; e
; e
= e
->next_caller
)
3219 call_count
+= e
->count
;
3221 if (e
->caller
->tp_first_run
> max_tp_first_run
)
3222 max_tp_first_run
= e
->caller
->tp_first_run
;
3225 /* If time profile is missing, let assign the maximum that comes from
3226 caller functions. */
3227 if (!node
->tp_first_run
&& max_tp_first_run
)
3228 node
->tp_first_run
= max_tp_first_run
+ 1;
3232 && (call_count
* unlikely_count_fraction
>= profile_info
->runs
))
3234 drop_profile (node
, call_count
);
3235 worklist
.safe_push (node
);
3239 /* Propagate the profile dropping to other 0-count COMDATs that are
3240 potentially called by COMDATs we already dropped the profile on. */
3241 while (worklist
.length () > 0)
3243 struct cgraph_edge
*e
;
3245 node
= worklist
.pop ();
3246 for (e
= node
->callees
; e
; e
= e
->next_caller
)
3248 struct cgraph_node
*callee
= e
->callee
;
3249 struct function
*fn
= DECL_STRUCT_FUNCTION (callee
->decl
);
3251 if (callee
->count
> 0)
3253 if (DECL_COMDAT (callee
->decl
) && fn
&& fn
->cfg
3254 && profile_status_for_fn (fn
) == PROFILE_READ
)
3256 drop_profile (node
, 0);
3257 worklist
.safe_push (callee
);
3263 /* Convert counts measured by profile driven feedback to frequencies.
3264 Return nonzero iff there was any nonzero execution count. */
3267 counts_to_freqs (void)
3269 gcov_type count_max
, true_count_max
= 0;
3272 /* Don't overwrite the estimated frequencies when the profile for
3273 the function is missing. We may drop this function PROFILE_GUESSED
3274 later in drop_profile (). */
3275 if (!flag_auto_profile
&& !ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
)
3278 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
), NULL
, next_bb
)
3279 true_count_max
= MAX (bb
->count
, true_count_max
);
3281 count_max
= MAX (true_count_max
, 1);
3282 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
), NULL
, next_bb
)
3283 bb
->frequency
= (bb
->count
* BB_FREQ_MAX
+ count_max
/ 2) / count_max
;
3285 return true_count_max
;
3288 /* Return true if function is likely to be expensive, so there is no point to
3289 optimize performance of prologue, epilogue or do inlining at the expense
3290 of code size growth. THRESHOLD is the limit of number of instructions
3291 function can execute at average to be still considered not expensive. */
3294 expensive_function_p (int threshold
)
3296 unsigned int sum
= 0;
3300 /* We can not compute accurately for large thresholds due to scaled
3302 gcc_assert (threshold
<= BB_FREQ_MAX
);
3304 /* Frequencies are out of range. This either means that function contains
3305 internal loop executing more than BB_FREQ_MAX times or profile feedback
3306 is available and function has not been executed at all. */
3307 if (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
== 0)
3310 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
3311 limit
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
* threshold
;
3312 FOR_EACH_BB_FN (bb
, cfun
)
3316 FOR_BB_INSNS (bb
, insn
)
3317 if (active_insn_p (insn
))
3319 sum
+= bb
->frequency
;
3328 /* Estimate and propagate basic block frequencies using the given branch
3329 probabilities. If FORCE is true, the frequencies are used to estimate
3330 the counts even when there are already non-zero profile counts. */
3333 estimate_bb_frequencies (bool force
)
3338 if (force
|| profile_status_for_fn (cfun
) != PROFILE_READ
|| !counts_to_freqs ())
3340 static int real_values_initialized
= 0;
3342 if (!real_values_initialized
)
3344 real_values_initialized
= 1;
3345 real_br_prob_base
= REG_BR_PROB_BASE
;
3346 real_bb_freq_max
= BB_FREQ_MAX
;
3347 real_one_half
= sreal (1, -1);
3348 real_inv_br_prob_base
= sreal (1) / real_br_prob_base
;
3349 real_almost_one
= sreal (1) - real_inv_br_prob_base
;
3352 mark_dfs_back_edges ();
3354 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->probability
=
3357 /* Set up block info for each basic block. */
3358 alloc_aux_for_blocks (sizeof (block_info
));
3359 alloc_aux_for_edges (sizeof (edge_prob_info
));
3360 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
), NULL
, next_bb
)
3365 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3367 EDGE_INFO (e
)->back_edge_prob
= e
->probability
;
3368 EDGE_INFO (e
)->back_edge_prob
*= real_inv_br_prob_base
;
3372 /* First compute frequencies locally for each loop from innermost
3373 to outermost to examine frequencies for back edges. */
3377 FOR_EACH_BB_FN (bb
, cfun
)
3378 if (freq_max
< BLOCK_INFO (bb
)->frequency
)
3379 freq_max
= BLOCK_INFO (bb
)->frequency
;
3381 freq_max
= real_bb_freq_max
/ freq_max
;
3382 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
), NULL
, next_bb
)
3384 sreal tmp
= BLOCK_INFO (bb
)->frequency
* freq_max
+ real_one_half
;
3385 bb
->frequency
= tmp
.to_int ();
3388 free_aux_for_blocks ();
3389 free_aux_for_edges ();
3391 compute_function_frequency ();
3394 /* Decide whether function is hot, cold or unlikely executed. */
3396 compute_function_frequency (void)
3399 struct cgraph_node
*node
= cgraph_node::get (current_function_decl
);
3401 if (DECL_STATIC_CONSTRUCTOR (current_function_decl
)
3402 || MAIN_NAME_P (DECL_NAME (current_function_decl
)))
3403 node
->only_called_at_startup
= true;
3404 if (DECL_STATIC_DESTRUCTOR (current_function_decl
))
3405 node
->only_called_at_exit
= true;
3407 if (profile_status_for_fn (cfun
) != PROFILE_READ
)
3409 int flags
= flags_from_decl_or_type (current_function_decl
);
3410 if (lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl
))
3412 node
->frequency
= NODE_FREQUENCY_UNLIKELY_EXECUTED
;
3413 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (current_function_decl
))
3415 node
->frequency
= NODE_FREQUENCY_HOT
;
3416 else if (flags
& ECF_NORETURN
)
3417 node
->frequency
= NODE_FREQUENCY_EXECUTED_ONCE
;
3418 else if (MAIN_NAME_P (DECL_NAME (current_function_decl
)))
3419 node
->frequency
= NODE_FREQUENCY_EXECUTED_ONCE
;
3420 else if (DECL_STATIC_CONSTRUCTOR (current_function_decl
)
3421 || DECL_STATIC_DESTRUCTOR (current_function_decl
))
3422 node
->frequency
= NODE_FREQUENCY_EXECUTED_ONCE
;
3426 /* Only first time try to drop function into unlikely executed.
3427 After inlining the roundoff errors may confuse us.
3428 Ipa-profile pass will drop functions only called from unlikely
3429 functions to unlikely and that is most of what we care about. */
3430 if (!cfun
->after_inlining
)
3431 node
->frequency
= NODE_FREQUENCY_UNLIKELY_EXECUTED
;
3432 FOR_EACH_BB_FN (bb
, cfun
)
3434 if (maybe_hot_bb_p (cfun
, bb
))
3436 node
->frequency
= NODE_FREQUENCY_HOT
;
3439 if (!probably_never_executed_bb_p (cfun
, bb
))
3440 node
->frequency
= NODE_FREQUENCY_NORMAL
;
3444 /* Build PREDICT_EXPR. */
3446 build_predict_expr (enum br_predictor predictor
, enum prediction taken
)
3448 tree t
= build1 (PREDICT_EXPR
, void_type_node
,
3449 build_int_cst (integer_type_node
, predictor
));
3450 SET_PREDICT_EXPR_OUTCOME (t
, taken
);
3455 predictor_name (enum br_predictor predictor
)
3457 return predictor_info
[predictor
].name
;
3460 /* Predict branch probabilities and estimate profile of the tree CFG. */
3464 const pass_data pass_data_profile
=
3466 GIMPLE_PASS
, /* type */
3467 "profile_estimate", /* name */
3468 OPTGROUP_NONE
, /* optinfo_flags */
3469 TV_BRANCH_PROB
, /* tv_id */
3470 PROP_cfg
, /* properties_required */
3471 0, /* properties_provided */
3472 0, /* properties_destroyed */
3473 0, /* todo_flags_start */
3474 0, /* todo_flags_finish */
3477 class pass_profile
: public gimple_opt_pass
3480 pass_profile (gcc::context
*ctxt
)
3481 : gimple_opt_pass (pass_data_profile
, ctxt
)
3484 /* opt_pass methods: */
3485 virtual bool gate (function
*) { return flag_guess_branch_prob
; }
3486 virtual unsigned int execute (function
*);
3488 }; // class pass_profile
3491 pass_profile::execute (function
*fun
)
3495 if (profile_status_for_fn (cfun
) == PROFILE_GUESSED
)
3498 loop_optimizer_init (LOOPS_NORMAL
);
3499 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3500 flow_loops_dump (dump_file
, NULL
, 0);
3502 mark_irreducible_loops ();
3504 nb_loops
= number_of_loops (fun
);
3508 tree_estimate_probability (false);
3513 loop_optimizer_finalize ();
3514 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3515 gimple_dump_cfg (dump_file
, dump_flags
);
3516 if (profile_status_for_fn (fun
) == PROFILE_ABSENT
)
3517 profile_status_for_fn (fun
) = PROFILE_GUESSED
;
3518 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3521 FOR_EACH_LOOP (loop
, LI_FROM_INNERMOST
)
3522 if (loop
->header
->frequency
)
3523 fprintf (dump_file
, "Loop got predicted %d to iterate %i times.\n",
3525 (int)expected_loop_iterations_unbounded (loop
));
3533 make_pass_profile (gcc::context
*ctxt
)
3535 return new pass_profile (ctxt
);
3540 const pass_data pass_data_strip_predict_hints
=
3542 GIMPLE_PASS
, /* type */
3543 "*strip_predict_hints", /* name */
3544 OPTGROUP_NONE
, /* optinfo_flags */
3545 TV_BRANCH_PROB
, /* tv_id */
3546 PROP_cfg
, /* properties_required */
3547 0, /* properties_provided */
3548 0, /* properties_destroyed */
3549 0, /* todo_flags_start */
3550 0, /* todo_flags_finish */
3553 class pass_strip_predict_hints
: public gimple_opt_pass
3556 pass_strip_predict_hints (gcc::context
*ctxt
)
3557 : gimple_opt_pass (pass_data_strip_predict_hints
, ctxt
)
3560 /* opt_pass methods: */
3561 opt_pass
* clone () { return new pass_strip_predict_hints (m_ctxt
); }
3562 virtual unsigned int execute (function
*);
3564 }; // class pass_strip_predict_hints
3566 /* Get rid of all builtin_expect calls and GIMPLE_PREDICT statements
3567 we no longer need. */
3569 pass_strip_predict_hints::execute (function
*fun
)
3574 bool changed
= false;
3576 FOR_EACH_BB_FN (bb
, fun
)
3578 gimple_stmt_iterator bi
;
3579 for (bi
= gsi_start_bb (bb
); !gsi_end_p (bi
);)
3581 gimple
*stmt
= gsi_stmt (bi
);
3583 if (gimple_code (stmt
) == GIMPLE_PREDICT
)
3585 gsi_remove (&bi
, true);
3589 else if (is_gimple_call (stmt
))
3591 tree fndecl
= gimple_call_fndecl (stmt
);
3594 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
3595 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
3596 && gimple_call_num_args (stmt
) == 2)
3597 || (gimple_call_internal_p (stmt
)
3598 && gimple_call_internal_fn (stmt
) == IFN_BUILTIN_EXPECT
))
3600 var
= gimple_call_lhs (stmt
);
3605 = gimple_build_assign (var
, gimple_call_arg (stmt
, 0));
3606 gsi_replace (&bi
, ass_stmt
, true);
3610 gsi_remove (&bi
, true);
3618 return changed
? TODO_cleanup_cfg
: 0;
3624 make_pass_strip_predict_hints (gcc::context
*ctxt
)
3626 return new pass_strip_predict_hints (ctxt
);
3629 /* Rebuild function frequencies. Passes are in general expected to
3630 maintain profile by hand, however in some cases this is not possible:
3631 for example when inlining several functions with loops freuqencies might run
3632 out of scale and thus needs to be recomputed. */
3635 rebuild_frequencies (void)
3637 timevar_push (TV_REBUILD_FREQUENCIES
);
3639 /* When the max bb count in the function is small, there is a higher
3640 chance that there were truncation errors in the integer scaling
3641 of counts by inlining and other optimizations. This could lead
3642 to incorrect classification of code as being cold when it isn't.
3643 In that case, force the estimation of bb counts/frequencies from the
3644 branch probabilities, rather than computing frequencies from counts,
3645 which may also lead to frequencies incorrectly reduced to 0. There
3646 is less precision in the probabilities, so we only do this for small
3648 gcov_type count_max
= 0;
3650 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
), NULL
, next_bb
)
3651 count_max
= MAX (bb
->count
, count_max
);
3653 if (profile_status_for_fn (cfun
) == PROFILE_GUESSED
3654 || (!flag_auto_profile
&& profile_status_for_fn (cfun
) == PROFILE_READ
3655 && count_max
< REG_BR_PROB_BASE
/10))
3657 loop_optimizer_init (0);
3658 add_noreturn_fake_exit_edges ();
3659 mark_irreducible_loops ();
3660 connect_infinite_loops_to_exit ();
3661 estimate_bb_frequencies (true);
3662 remove_fake_exit_edges ();
3663 loop_optimizer_finalize ();
3665 else if (profile_status_for_fn (cfun
) == PROFILE_READ
)
3669 timevar_pop (TV_REBUILD_FREQUENCIES
);
3672 /* Perform a dry run of the branch prediction pass and report comparsion of
3673 the predicted and real profile into the dump file. */
3676 report_predictor_hitrates (void)
3680 loop_optimizer_init (LOOPS_NORMAL
);
3681 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3682 flow_loops_dump (dump_file
, NULL
, 0);
3684 mark_irreducible_loops ();
3686 nb_loops
= number_of_loops (cfun
);
3690 tree_estimate_probability (true);
3695 loop_optimizer_finalize ();
3698 /* Force edge E to be cold.
3699 If IMPOSSIBLE is true, for edge to have count and probability 0 otherwise
3700 keep low probability to represent possible error in a guess. This is used
3701 i.e. in case we predict loop to likely iterate given number of times but
3702 we are not 100% sure.
3704 This function locally updates profile without attempt to keep global
3705 consistency which can not be reached in full generality without full profile
3706 rebuild from probabilities alone. Doing so is not necessarily a good idea
3707 because frequencies and counts may be more realistic then probabilities.
3709 In some cases (such as for elimination of early exits during full loop
3710 unrolling) the caller can ensure that profile will get consistent
3714 force_edge_cold (edge e
, bool impossible
)
3716 gcov_type count_sum
= 0;
3720 gcov_type old_count
= e
->count
;
3721 int old_probability
= e
->probability
;
3722 gcov_type gcov_scale
= REG_BR_PROB_BASE
;
3723 int prob_scale
= REG_BR_PROB_BASE
;
3725 /* If edge is already improbably or cold, just return. */
3726 if (e
->probability
<= impossible
? PROB_VERY_UNLIKELY
: 0
3727 && (!impossible
|| !e
->count
))
3729 FOR_EACH_EDGE (e2
, ei
, e
->src
->succs
)
3732 count_sum
+= e2
->count
;
3733 prob_sum
+= e2
->probability
;
3736 /* If there are other edges out of e->src, redistribute probabilitity
3741 = MIN (e
->probability
, impossible
? 0 : PROB_VERY_UNLIKELY
);
3742 if (old_probability
)
3743 e
->count
= RDIV (e
->count
* e
->probability
, old_probability
);
3745 e
->count
= MIN (e
->count
, impossible
? 0 : 1);
3748 gcov_scale
= RDIV ((count_sum
+ old_count
- e
->count
) * REG_BR_PROB_BASE
,
3750 prob_scale
= RDIV ((REG_BR_PROB_BASE
- e
->probability
) * REG_BR_PROB_BASE
,
3752 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3753 fprintf (dump_file
, "Making edge %i->%i %s by redistributing "
3754 "probability to other edges.\n",
3755 e
->src
->index
, e
->dest
->index
,
3756 impossible
? "impossible" : "cold");
3757 FOR_EACH_EDGE (e2
, ei
, e
->src
->succs
)
3760 e2
->count
= RDIV (e2
->count
* gcov_scale
, REG_BR_PROB_BASE
);
3761 e2
->probability
= RDIV (e2
->probability
* prob_scale
,
3765 /* If all edges out of e->src are unlikely, the basic block itself
3769 e
->probability
= REG_BR_PROB_BASE
;
3771 /* If we did not adjusting, the source basic block has no likely edeges
3772 leaving other direction. In that case force that bb cold, too.
3773 This in general is difficult task to do, but handle special case when
3774 BB has only one predecestor. This is common case when we are updating
3775 after loop transforms. */
3776 if (!prob_sum
&& !count_sum
&& single_pred_p (e
->src
)
3777 && e
->src
->frequency
> (impossible
? 0 : 1))
3779 int old_frequency
= e
->src
->frequency
;
3780 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3781 fprintf (dump_file
, "Making bb %i %s.\n", e
->src
->index
,
3782 impossible
? "impossible" : "cold");
3783 e
->src
->frequency
= MIN (e
->src
->frequency
, impossible
? 0 : 1);
3784 e
->src
->count
= e
->count
= RDIV (e
->src
->count
* e
->src
->frequency
,
3786 force_edge_cold (single_pred_edge (e
->src
), impossible
);
3788 else if (dump_file
&& (dump_flags
& TDF_DETAILS
)
3789 && maybe_hot_bb_p (cfun
, e
->src
))
3790 fprintf (dump_file
, "Giving up on making bb %i %s.\n", e
->src
->index
,
3791 impossible
? "impossible" : "cold");