1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 [1] "Branch Prediction for Free"
24 Ball and Larus; PLDI '93.
25 [2] "Static Branch Frequency and Program Profile Analysis"
26 Wu and Larus; MICRO-27.
27 [3] "Corpus-based Static Branch Prediction"
28 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
33 #include "coretypes.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "insn-config.h"
46 #include "diagnostic-core.h"
55 #include "tree-flow.h"
57 #include "tree-dump.h"
58 #include "tree-pass.h"
60 #include "tree-scalar-evolution.h"
62 #include "pointer-set.h"
64 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
65 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
66 static sreal real_zero
, real_one
, real_almost_one
, real_br_prob_base
,
67 real_inv_br_prob_base
, real_one_half
, real_bb_freq_max
;
69 /* Random guesstimation given names.
70 PROV_VERY_UNLIKELY should be small enough so basic block predicted
71 by it gets bellow HOT_BB_FREQUENCY_FRANCTION. */
72 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 2000 - 1)
73 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
74 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
75 #define PROB_ALWAYS (REG_BR_PROB_BASE)
77 static void combine_predictions_for_insn (rtx
, basic_block
);
78 static void dump_prediction (FILE *, enum br_predictor
, int, basic_block
, int);
79 static void predict_paths_leading_to (basic_block
, enum br_predictor
, enum prediction
);
80 static void predict_paths_leading_to_edge (edge
, enum br_predictor
, enum prediction
);
81 static bool can_predict_insn_p (const_rtx
);
83 /* Information we hold about each branch predictor.
84 Filled using information from predict.def. */
88 const char *const name
; /* Name used in the debugging dumps. */
89 const int hitrate
; /* Expected hitrate used by
90 predict_insn_def call. */
94 /* Use given predictor without Dempster-Shaffer theory if it matches
95 using first_match heuristics. */
96 #define PRED_FLAG_FIRST_MATCH 1
98 /* Recompute hitrate in percent to our representation. */
100 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
102 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
103 static const struct predictor_info predictor_info
[]= {
104 #include "predict.def"
106 /* Upper bound on predictors. */
111 /* Return TRUE if frequency FREQ is considered to be hot. */
114 maybe_hot_frequency_p (int freq
)
116 struct cgraph_node
*node
= cgraph_get_node (current_function_decl
);
117 if (!profile_info
|| !flag_branch_probabilities
)
119 if (node
->frequency
== NODE_FREQUENCY_UNLIKELY_EXECUTED
)
121 if (node
->frequency
== NODE_FREQUENCY_HOT
)
124 if (profile_status
== PROFILE_ABSENT
)
126 if (node
->frequency
== NODE_FREQUENCY_EXECUTED_ONCE
127 && freq
< (ENTRY_BLOCK_PTR
->frequency
* 2 / 3))
129 if (freq
< ENTRY_BLOCK_PTR
->frequency
/ PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION
))
134 /* Return TRUE if frequency FREQ is considered to be hot. */
137 maybe_hot_count_p (gcov_type count
)
139 if (profile_status
!= PROFILE_READ
)
141 /* Code executed at most once is not hot. */
142 if (profile_info
->runs
>= count
)
145 > profile_info
->sum_max
/ PARAM_VALUE (HOT_BB_COUNT_FRACTION
));
148 /* Return true in case BB can be CPU intensive and should be optimized
149 for maximal performance. */
152 maybe_hot_bb_p (const_basic_block bb
)
154 if (profile_status
== PROFILE_READ
)
155 return maybe_hot_count_p (bb
->count
);
156 return maybe_hot_frequency_p (bb
->frequency
);
159 /* Return true if the call can be hot. */
162 cgraph_maybe_hot_edge_p (struct cgraph_edge
*edge
)
164 if (profile_info
&& flag_branch_probabilities
166 <= profile_info
->sum_max
/ PARAM_VALUE (HOT_BB_COUNT_FRACTION
)))
168 if (edge
->caller
->frequency
== NODE_FREQUENCY_UNLIKELY_EXECUTED
169 || edge
->callee
->frequency
== NODE_FREQUENCY_UNLIKELY_EXECUTED
)
171 if (edge
->caller
->frequency
> NODE_FREQUENCY_UNLIKELY_EXECUTED
172 && edge
->callee
->frequency
<= NODE_FREQUENCY_EXECUTED_ONCE
)
176 if (edge
->caller
->frequency
== NODE_FREQUENCY_HOT
)
178 if (edge
->caller
->frequency
== NODE_FREQUENCY_EXECUTED_ONCE
179 && edge
->frequency
< CGRAPH_FREQ_BASE
* 3 / 2)
181 if (flag_guess_branch_prob
182 && edge
->frequency
<= (CGRAPH_FREQ_BASE
183 / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION
)))
188 /* Return true in case BB can be CPU intensive and should be optimized
189 for maximal performance. */
192 maybe_hot_edge_p (edge e
)
194 if (profile_status
== PROFILE_READ
)
195 return maybe_hot_count_p (e
->count
);
196 return maybe_hot_frequency_p (EDGE_FREQUENCY (e
));
200 /* Return true in case BB is probably never executed. */
203 probably_never_executed_bb_p (const_basic_block bb
)
205 if (profile_info
&& flag_branch_probabilities
)
206 return ((bb
->count
+ profile_info
->runs
/ 2) / profile_info
->runs
) == 0;
207 if ((!profile_info
|| !flag_branch_probabilities
)
208 && (cgraph_get_node (current_function_decl
)->frequency
209 == NODE_FREQUENCY_UNLIKELY_EXECUTED
))
214 /* Return true if NODE should be optimized for size. */
217 cgraph_optimize_for_size_p (struct cgraph_node
*node
)
221 if (node
&& (node
->frequency
== NODE_FREQUENCY_UNLIKELY_EXECUTED
))
227 /* Return true when current function should always be optimized for size. */
230 optimize_function_for_size_p (struct function
*fun
)
234 if (!fun
|| !fun
->decl
)
236 return cgraph_optimize_for_size_p (cgraph_get_node (fun
->decl
));
239 /* Return true when current function should always be optimized for speed. */
242 optimize_function_for_speed_p (struct function
*fun
)
244 return !optimize_function_for_size_p (fun
);
247 /* Return TRUE when BB should be optimized for size. */
250 optimize_bb_for_size_p (const_basic_block bb
)
252 return optimize_function_for_size_p (cfun
) || !maybe_hot_bb_p (bb
);
255 /* Return TRUE when BB should be optimized for speed. */
258 optimize_bb_for_speed_p (const_basic_block bb
)
260 return !optimize_bb_for_size_p (bb
);
263 /* Return TRUE when BB should be optimized for size. */
266 optimize_edge_for_size_p (edge e
)
268 return optimize_function_for_size_p (cfun
) || !maybe_hot_edge_p (e
);
271 /* Return TRUE when BB should be optimized for speed. */
274 optimize_edge_for_speed_p (edge e
)
276 return !optimize_edge_for_size_p (e
);
279 /* Return TRUE when BB should be optimized for size. */
282 optimize_insn_for_size_p (void)
284 return optimize_function_for_size_p (cfun
) || !crtl
->maybe_hot_insn_p
;
287 /* Return TRUE when BB should be optimized for speed. */
290 optimize_insn_for_speed_p (void)
292 return !optimize_insn_for_size_p ();
295 /* Return TRUE when LOOP should be optimized for size. */
298 optimize_loop_for_size_p (struct loop
*loop
)
300 return optimize_bb_for_size_p (loop
->header
);
303 /* Return TRUE when LOOP should be optimized for speed. */
306 optimize_loop_for_speed_p (struct loop
*loop
)
308 return optimize_bb_for_speed_p (loop
->header
);
311 /* Return TRUE when LOOP nest should be optimized for speed. */
314 optimize_loop_nest_for_speed_p (struct loop
*loop
)
316 struct loop
*l
= loop
;
317 if (optimize_loop_for_speed_p (loop
))
320 while (l
&& l
!= loop
)
322 if (optimize_loop_for_speed_p (l
))
330 while (l
!= loop
&& !l
->next
)
339 /* Return TRUE when LOOP nest should be optimized for size. */
342 optimize_loop_nest_for_size_p (struct loop
*loop
)
344 return !optimize_loop_nest_for_speed_p (loop
);
347 /* Return true when edge E is likely to be well predictable by branch
351 predictable_edge_p (edge e
)
353 if (profile_status
== PROFILE_ABSENT
)
356 <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME
) * REG_BR_PROB_BASE
/ 100)
357 || (REG_BR_PROB_BASE
- e
->probability
358 <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME
) * REG_BR_PROB_BASE
/ 100))
364 /* Set RTL expansion for BB profile. */
367 rtl_profile_for_bb (basic_block bb
)
369 crtl
->maybe_hot_insn_p
= maybe_hot_bb_p (bb
);
372 /* Set RTL expansion for edge profile. */
375 rtl_profile_for_edge (edge e
)
377 crtl
->maybe_hot_insn_p
= maybe_hot_edge_p (e
);
380 /* Set RTL expansion to default mode (i.e. when profile info is not known). */
382 default_rtl_profile (void)
384 crtl
->maybe_hot_insn_p
= true;
387 /* Return true if the one of outgoing edges is already predicted by
391 rtl_predicted_by_p (const_basic_block bb
, enum br_predictor predictor
)
394 if (!INSN_P (BB_END (bb
)))
396 for (note
= REG_NOTES (BB_END (bb
)); note
; note
= XEXP (note
, 1))
397 if (REG_NOTE_KIND (note
) == REG_BR_PRED
398 && INTVAL (XEXP (XEXP (note
, 0), 0)) == (int)predictor
)
403 /* This map contains for a basic block the list of predictions for the
406 static struct pointer_map_t
*bb_predictions
;
408 /* Structure representing predictions in tree level. */
410 struct edge_prediction
{
411 struct edge_prediction
*ep_next
;
413 enum br_predictor ep_predictor
;
417 /* Return true if the one of outgoing edges is already predicted by
421 gimple_predicted_by_p (const_basic_block bb
, enum br_predictor predictor
)
423 struct edge_prediction
*i
;
424 void **preds
= pointer_map_contains (bb_predictions
, bb
);
429 for (i
= (struct edge_prediction
*) *preds
; i
; i
= i
->ep_next
)
430 if (i
->ep_predictor
== predictor
)
435 /* Return true when the probability of edge is reliable.
437 The profile guessing code is good at predicting branch outcome (ie.
438 taken/not taken), that is predicted right slightly over 75% of time.
439 It is however notoriously poor on predicting the probability itself.
440 In general the profile appear a lot flatter (with probabilities closer
441 to 50%) than the reality so it is bad idea to use it to drive optimization
442 such as those disabling dynamic branch prediction for well predictable
445 There are two exceptions - edges leading to noreturn edges and edges
446 predicted by number of iterations heuristics are predicted well. This macro
447 should be able to distinguish those, but at the moment it simply check for
448 noreturn heuristic that is only one giving probability over 99% or bellow
449 1%. In future we might want to propagate reliability information across the
450 CFG if we find this information useful on multiple places. */
452 probability_reliable_p (int prob
)
454 return (profile_status
== PROFILE_READ
455 || (profile_status
== PROFILE_GUESSED
456 && (prob
<= HITRATE (1) || prob
>= HITRATE (99))));
459 /* Same predicate as above, working on edges. */
461 edge_probability_reliable_p (const_edge e
)
463 return probability_reliable_p (e
->probability
);
466 /* Same predicate as edge_probability_reliable_p, working on notes. */
468 br_prob_note_reliable_p (const_rtx note
)
470 gcc_assert (REG_NOTE_KIND (note
) == REG_BR_PROB
);
471 return probability_reliable_p (INTVAL (XEXP (note
, 0)));
475 predict_insn (rtx insn
, enum br_predictor predictor
, int probability
)
477 gcc_assert (any_condjump_p (insn
));
478 if (!flag_guess_branch_prob
)
481 add_reg_note (insn
, REG_BR_PRED
,
482 gen_rtx_CONCAT (VOIDmode
,
483 GEN_INT ((int) predictor
),
484 GEN_INT ((int) probability
)));
487 /* Predict insn by given predictor. */
490 predict_insn_def (rtx insn
, enum br_predictor predictor
,
491 enum prediction taken
)
493 int probability
= predictor_info
[(int) predictor
].hitrate
;
496 probability
= REG_BR_PROB_BASE
- probability
;
498 predict_insn (insn
, predictor
, probability
);
501 /* Predict edge E with given probability if possible. */
504 rtl_predict_edge (edge e
, enum br_predictor predictor
, int probability
)
507 last_insn
= BB_END (e
->src
);
509 /* We can store the branch prediction information only about
510 conditional jumps. */
511 if (!any_condjump_p (last_insn
))
514 /* We always store probability of branching. */
515 if (e
->flags
& EDGE_FALLTHRU
)
516 probability
= REG_BR_PROB_BASE
- probability
;
518 predict_insn (last_insn
, predictor
, probability
);
521 /* Predict edge E with the given PROBABILITY. */
523 gimple_predict_edge (edge e
, enum br_predictor predictor
, int probability
)
525 gcc_assert (profile_status
!= PROFILE_GUESSED
);
526 if ((e
->src
!= ENTRY_BLOCK_PTR
&& EDGE_COUNT (e
->src
->succs
) > 1)
527 && flag_guess_branch_prob
&& optimize
)
529 struct edge_prediction
*i
= XNEW (struct edge_prediction
);
530 void **preds
= pointer_map_insert (bb_predictions
, e
->src
);
532 i
->ep_next
= (struct edge_prediction
*) *preds
;
534 i
->ep_probability
= probability
;
535 i
->ep_predictor
= predictor
;
540 /* Remove all predictions on given basic block that are attached
543 remove_predictions_associated_with_edge (edge e
)
550 preds
= pointer_map_contains (bb_predictions
, e
->src
);
554 struct edge_prediction
**prediction
= (struct edge_prediction
**) preds
;
555 struct edge_prediction
*next
;
559 if ((*prediction
)->ep_edge
== e
)
561 next
= (*prediction
)->ep_next
;
566 prediction
= &((*prediction
)->ep_next
);
571 /* Clears the list of predictions stored for BB. */
574 clear_bb_predictions (basic_block bb
)
576 void **preds
= pointer_map_contains (bb_predictions
, bb
);
577 struct edge_prediction
*pred
, *next
;
582 for (pred
= (struct edge_prediction
*) *preds
; pred
; pred
= next
)
584 next
= pred
->ep_next
;
590 /* Return true when we can store prediction on insn INSN.
591 At the moment we represent predictions only on conditional
592 jumps, not at computed jump or other complicated cases. */
594 can_predict_insn_p (const_rtx insn
)
596 return (JUMP_P (insn
)
597 && any_condjump_p (insn
)
598 && EDGE_COUNT (BLOCK_FOR_INSN (insn
)->succs
) >= 2);
601 /* Predict edge E by given predictor if possible. */
604 predict_edge_def (edge e
, enum br_predictor predictor
,
605 enum prediction taken
)
607 int probability
= predictor_info
[(int) predictor
].hitrate
;
610 probability
= REG_BR_PROB_BASE
- probability
;
612 predict_edge (e
, predictor
, probability
);
615 /* Invert all branch predictions or probability notes in the INSN. This needs
616 to be done each time we invert the condition used by the jump. */
619 invert_br_probabilities (rtx insn
)
623 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
624 if (REG_NOTE_KIND (note
) == REG_BR_PROB
)
625 XEXP (note
, 0) = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (note
, 0)));
626 else if (REG_NOTE_KIND (note
) == REG_BR_PRED
)
627 XEXP (XEXP (note
, 0), 1)
628 = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (XEXP (note
, 0), 1)));
631 /* Dump information about the branch prediction to the output file. */
634 dump_prediction (FILE *file
, enum br_predictor predictor
, int probability
,
635 basic_block bb
, int used
)
643 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
644 if (! (e
->flags
& EDGE_FALLTHRU
))
647 fprintf (file
, " %s heuristics%s: %.1f%%",
648 predictor_info
[predictor
].name
,
649 used
? "" : " (ignored)", probability
* 100.0 / REG_BR_PROB_BASE
);
653 fprintf (file
, " exec ");
654 fprintf (file
, HOST_WIDEST_INT_PRINT_DEC
, bb
->count
);
657 fprintf (file
, " hit ");
658 fprintf (file
, HOST_WIDEST_INT_PRINT_DEC
, e
->count
);
659 fprintf (file
, " (%.1f%%)", e
->count
* 100.0 / bb
->count
);
663 fprintf (file
, "\n");
666 /* We can not predict the probabilities of outgoing edges of bb. Set them
667 evenly and hope for the best. */
669 set_even_probabilities (basic_block bb
)
675 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
676 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
678 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
679 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
680 e
->probability
= (REG_BR_PROB_BASE
+ nedges
/ 2) / nedges
;
685 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
686 note if not already present. Remove now useless REG_BR_PRED notes. */
689 combine_predictions_for_insn (rtx insn
, basic_block bb
)
694 int best_probability
= PROB_EVEN
;
695 enum br_predictor best_predictor
= END_PREDICTORS
;
696 int combined_probability
= REG_BR_PROB_BASE
/ 2;
698 bool first_match
= false;
701 if (!can_predict_insn_p (insn
))
703 set_even_probabilities (bb
);
707 prob_note
= find_reg_note (insn
, REG_BR_PROB
, 0);
708 pnote
= ®_NOTES (insn
);
710 fprintf (dump_file
, "Predictions for insn %i bb %i\n", INSN_UID (insn
),
713 /* We implement "first match" heuristics and use probability guessed
714 by predictor with smallest index. */
715 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
716 if (REG_NOTE_KIND (note
) == REG_BR_PRED
)
718 enum br_predictor predictor
= ((enum br_predictor
)
719 INTVAL (XEXP (XEXP (note
, 0), 0)));
720 int probability
= INTVAL (XEXP (XEXP (note
, 0), 1));
723 if (best_predictor
> predictor
)
724 best_probability
= probability
, best_predictor
= predictor
;
726 d
= (combined_probability
* probability
727 + (REG_BR_PROB_BASE
- combined_probability
)
728 * (REG_BR_PROB_BASE
- probability
));
730 /* Use FP math to avoid overflows of 32bit integers. */
732 /* If one probability is 0% and one 100%, avoid division by zero. */
733 combined_probability
= REG_BR_PROB_BASE
/ 2;
735 combined_probability
= (((double) combined_probability
) * probability
736 * REG_BR_PROB_BASE
/ d
+ 0.5);
739 /* Decide which heuristic to use. In case we didn't match anything,
740 use no_prediction heuristic, in case we did match, use either
741 first match or Dempster-Shaffer theory depending on the flags. */
743 if (predictor_info
[best_predictor
].flags
& PRED_FLAG_FIRST_MATCH
)
747 dump_prediction (dump_file
, PRED_NO_PREDICTION
,
748 combined_probability
, bb
, true);
751 dump_prediction (dump_file
, PRED_DS_THEORY
, combined_probability
,
753 dump_prediction (dump_file
, PRED_FIRST_MATCH
, best_probability
,
758 combined_probability
= best_probability
;
759 dump_prediction (dump_file
, PRED_COMBINED
, combined_probability
, bb
, true);
763 if (REG_NOTE_KIND (*pnote
) == REG_BR_PRED
)
765 enum br_predictor predictor
= ((enum br_predictor
)
766 INTVAL (XEXP (XEXP (*pnote
, 0), 0)));
767 int probability
= INTVAL (XEXP (XEXP (*pnote
, 0), 1));
769 dump_prediction (dump_file
, predictor
, probability
, bb
,
770 !first_match
|| best_predictor
== predictor
);
771 *pnote
= XEXP (*pnote
, 1);
774 pnote
= &XEXP (*pnote
, 1);
779 add_reg_note (insn
, REG_BR_PROB
, GEN_INT (combined_probability
));
781 /* Save the prediction into CFG in case we are seeing non-degenerated
783 if (!single_succ_p (bb
))
785 BRANCH_EDGE (bb
)->probability
= combined_probability
;
786 FALLTHRU_EDGE (bb
)->probability
787 = REG_BR_PROB_BASE
- combined_probability
;
790 else if (!single_succ_p (bb
))
792 int prob
= INTVAL (XEXP (prob_note
, 0));
794 BRANCH_EDGE (bb
)->probability
= prob
;
795 FALLTHRU_EDGE (bb
)->probability
= REG_BR_PROB_BASE
- prob
;
798 single_succ_edge (bb
)->probability
= REG_BR_PROB_BASE
;
801 /* Combine predictions into single probability and store them into CFG.
802 Remove now useless prediction entries. */
805 combine_predictions_for_bb (basic_block bb
)
807 int best_probability
= PROB_EVEN
;
808 enum br_predictor best_predictor
= END_PREDICTORS
;
809 int combined_probability
= REG_BR_PROB_BASE
/ 2;
811 bool first_match
= false;
813 struct edge_prediction
*pred
;
815 edge e
, first
= NULL
, second
= NULL
;
819 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
820 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
823 if (first
&& !second
)
829 /* When there is no successor or only one choice, prediction is easy.
831 We are lazy for now and predict only basic blocks with two outgoing
832 edges. It is possible to predict generic case too, but we have to
833 ignore first match heuristics and do more involved combining. Implement
838 set_even_probabilities (bb
);
839 clear_bb_predictions (bb
);
841 fprintf (dump_file
, "%i edges in bb %i predicted to even probabilities\n",
847 fprintf (dump_file
, "Predictions for bb %i\n", bb
->index
);
849 preds
= pointer_map_contains (bb_predictions
, bb
);
852 /* We implement "first match" heuristics and use probability guessed
853 by predictor with smallest index. */
854 for (pred
= (struct edge_prediction
*) *preds
; pred
; pred
= pred
->ep_next
)
856 enum br_predictor predictor
= pred
->ep_predictor
;
857 int probability
= pred
->ep_probability
;
859 if (pred
->ep_edge
!= first
)
860 probability
= REG_BR_PROB_BASE
- probability
;
863 /* First match heuristics would be widly confused if we predicted
865 if (best_predictor
> predictor
)
867 struct edge_prediction
*pred2
;
868 int prob
= probability
;
870 for (pred2
= (struct edge_prediction
*) *preds
; pred2
; pred2
= pred2
->ep_next
)
871 if (pred2
!= pred
&& pred2
->ep_predictor
== pred
->ep_predictor
)
873 int probability2
= pred
->ep_probability
;
875 if (pred2
->ep_edge
!= first
)
876 probability2
= REG_BR_PROB_BASE
- probability2
;
878 if ((probability
< REG_BR_PROB_BASE
/ 2) !=
879 (probability2
< REG_BR_PROB_BASE
/ 2))
882 /* If the same predictor later gave better result, go for it! */
883 if ((probability
>= REG_BR_PROB_BASE
/ 2 && (probability2
> probability
))
884 || (probability
<= REG_BR_PROB_BASE
/ 2 && (probability2
< probability
)))
888 best_probability
= prob
, best_predictor
= predictor
;
891 d
= (combined_probability
* probability
892 + (REG_BR_PROB_BASE
- combined_probability
)
893 * (REG_BR_PROB_BASE
- probability
));
895 /* Use FP math to avoid overflows of 32bit integers. */
897 /* If one probability is 0% and one 100%, avoid division by zero. */
898 combined_probability
= REG_BR_PROB_BASE
/ 2;
900 combined_probability
= (((double) combined_probability
)
902 * REG_BR_PROB_BASE
/ d
+ 0.5);
906 /* Decide which heuristic to use. In case we didn't match anything,
907 use no_prediction heuristic, in case we did match, use either
908 first match or Dempster-Shaffer theory depending on the flags. */
910 if (predictor_info
[best_predictor
].flags
& PRED_FLAG_FIRST_MATCH
)
914 dump_prediction (dump_file
, PRED_NO_PREDICTION
, combined_probability
, bb
, true);
917 dump_prediction (dump_file
, PRED_DS_THEORY
, combined_probability
, bb
,
919 dump_prediction (dump_file
, PRED_FIRST_MATCH
, best_probability
, bb
,
924 combined_probability
= best_probability
;
925 dump_prediction (dump_file
, PRED_COMBINED
, combined_probability
, bb
, true);
929 for (pred
= (struct edge_prediction
*) *preds
; pred
; pred
= pred
->ep_next
)
931 enum br_predictor predictor
= pred
->ep_predictor
;
932 int probability
= pred
->ep_probability
;
934 if (pred
->ep_edge
!= EDGE_SUCC (bb
, 0))
935 probability
= REG_BR_PROB_BASE
- probability
;
936 dump_prediction (dump_file
, predictor
, probability
, bb
,
937 !first_match
|| best_predictor
== predictor
);
940 clear_bb_predictions (bb
);
944 first
->probability
= combined_probability
;
945 second
->probability
= REG_BR_PROB_BASE
- combined_probability
;
949 /* Check if T1 and T2 satisfy the IV_COMPARE condition.
950 Return the SSA_NAME if the condition satisfies, NULL otherwise.
952 T1 and T2 should be one of the following cases:
953 1. T1 is SSA_NAME, T2 is NULL
954 2. T1 is SSA_NAME, T2 is INTEGER_CST between [-4, 4]
955 3. T2 is SSA_NAME, T1 is INTEGER_CST between [-4, 4] */
958 strips_small_constant (tree t1
, tree t2
)
965 else if (TREE_CODE (t1
) == SSA_NAME
)
967 else if (host_integerp (t1
, 0))
968 value
= tree_low_cst (t1
, 0);
974 else if (host_integerp (t2
, 0))
975 value
= tree_low_cst (t2
, 0);
976 else if (TREE_CODE (t2
) == SSA_NAME
)
984 if (value
<= 4 && value
>= -4)
990 /* Return the SSA_NAME in T or T's operands.
991 Return NULL if SSA_NAME cannot be found. */
994 get_base_value (tree t
)
996 if (TREE_CODE (t
) == SSA_NAME
)
999 if (!BINARY_CLASS_P (t
))
1002 switch (TREE_OPERAND_LENGTH (t
))
1005 return strips_small_constant (TREE_OPERAND (t
, 0), NULL
);
1007 return strips_small_constant (TREE_OPERAND (t
, 0),
1008 TREE_OPERAND (t
, 1));
1014 /* Check the compare STMT in LOOP. If it compares an induction
1015 variable to a loop invariant, return true, and save
1016 LOOP_INVARIANT, COMPARE_CODE and LOOP_STEP.
1017 Otherwise return false and set LOOP_INVAIANT to NULL. */
1020 is_comparison_with_loop_invariant_p (gimple stmt
, struct loop
*loop
,
1021 tree
*loop_invariant
,
1022 enum tree_code
*compare_code
,
1026 tree op0
, op1
, bound
, base
;
1028 enum tree_code code
;
1031 code
= gimple_cond_code (stmt
);
1032 *loop_invariant
= NULL
;
1048 op0
= gimple_cond_lhs (stmt
);
1049 op1
= gimple_cond_rhs (stmt
);
1051 if ((TREE_CODE (op0
) != SSA_NAME
&& TREE_CODE (op0
) != INTEGER_CST
)
1052 || (TREE_CODE (op1
) != SSA_NAME
&& TREE_CODE (op1
) != INTEGER_CST
))
1054 if (!simple_iv (loop
, loop_containing_stmt (stmt
), op0
, &iv0
, true))
1056 if (!simple_iv (loop
, loop_containing_stmt (stmt
), op1
, &iv1
, true))
1058 if (TREE_CODE (iv0
.step
) != INTEGER_CST
1059 || TREE_CODE (iv1
.step
) != INTEGER_CST
)
1061 if ((integer_zerop (iv0
.step
) && integer_zerop (iv1
.step
))
1062 || (!integer_zerop (iv0
.step
) && !integer_zerop (iv1
.step
)))
1065 if (integer_zerop (iv0
.step
))
1067 if (code
!= NE_EXPR
&& code
!= EQ_EXPR
)
1068 code
= invert_tree_comparison (code
, false);
1071 if (host_integerp (iv1
.step
, 0))
1072 step
= tree_low_cst (iv1
.step
, 0);
1080 if (host_integerp (iv0
.step
, 0))
1081 step
= tree_low_cst (iv0
.step
, 0);
1086 if (TREE_CODE (bound
) != INTEGER_CST
)
1087 bound
= get_base_value (bound
);
1090 if (TREE_CODE (base
) != INTEGER_CST
)
1091 base
= get_base_value (base
);
1095 *loop_invariant
= bound
;
1096 *compare_code
= code
;
1098 *loop_iv_base
= base
;
1102 /* Compare two SSA_NAMEs: returns TRUE if T1 and T2 are value coherent. */
1105 expr_coherent_p (tree t1
, tree t2
)
1108 tree ssa_name_1
= NULL
;
1109 tree ssa_name_2
= NULL
;
1111 gcc_assert (TREE_CODE (t1
) == SSA_NAME
|| TREE_CODE (t1
) == INTEGER_CST
);
1112 gcc_assert (TREE_CODE (t2
) == SSA_NAME
|| TREE_CODE (t2
) == INTEGER_CST
);
1117 if (TREE_CODE (t1
) == INTEGER_CST
&& TREE_CODE (t2
) == INTEGER_CST
)
1119 if (TREE_CODE (t1
) == INTEGER_CST
|| TREE_CODE (t2
) == INTEGER_CST
)
1122 /* Check to see if t1 is expressed/defined with t2. */
1123 stmt
= SSA_NAME_DEF_STMT (t1
);
1124 gcc_assert (stmt
!= NULL
);
1125 if (is_gimple_assign (stmt
))
1127 ssa_name_1
= SINGLE_SSA_TREE_OPERAND (stmt
, SSA_OP_USE
);
1128 if (ssa_name_1
&& ssa_name_1
== t2
)
1132 /* Check to see if t2 is expressed/defined with t1. */
1133 stmt
= SSA_NAME_DEF_STMT (t2
);
1134 gcc_assert (stmt
!= NULL
);
1135 if (is_gimple_assign (stmt
))
1137 ssa_name_2
= SINGLE_SSA_TREE_OPERAND (stmt
, SSA_OP_USE
);
1138 if (ssa_name_2
&& ssa_name_2
== t1
)
1142 /* Compare if t1 and t2's def_stmts are identical. */
1143 if (ssa_name_2
!= NULL
&& ssa_name_1
== ssa_name_2
)
1149 /* Predict branch probability of BB when BB contains a branch that compares
1150 an induction variable in LOOP with LOOP_IV_BASE_VAR to LOOP_BOUND_VAR. The
1151 loop exit is compared using LOOP_BOUND_CODE, with step of LOOP_BOUND_STEP.
1154 for (int i = 0; i < bound; i++) {
1161 In this loop, we will predict the branch inside the loop to be taken. */
1164 predict_iv_comparison (struct loop
*loop
, basic_block bb
,
1165 tree loop_bound_var
,
1166 tree loop_iv_base_var
,
1167 enum tree_code loop_bound_code
,
1168 int loop_bound_step
)
1171 tree compare_var
, compare_base
;
1172 enum tree_code compare_code
;
1177 if (predicted_by_p (bb
, PRED_LOOP_ITERATIONS_GUESSED
)
1178 || predicted_by_p (bb
, PRED_LOOP_ITERATIONS
)
1179 || predicted_by_p (bb
, PRED_LOOP_EXIT
))
1182 stmt
= last_stmt (bb
);
1183 if (!stmt
|| gimple_code (stmt
) != GIMPLE_COND
)
1185 if (!is_comparison_with_loop_invariant_p (stmt
, loop
, &compare_var
,
1191 /* Find the taken edge. */
1192 FOR_EACH_EDGE (then_edge
, ei
, bb
->succs
)
1193 if (then_edge
->flags
& EDGE_TRUE_VALUE
)
1196 /* When comparing an IV to a loop invariant, NE is more likely to be
1197 taken while EQ is more likely to be not-taken. */
1198 if (compare_code
== NE_EXPR
)
1200 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, TAKEN
);
1203 else if (compare_code
== EQ_EXPR
)
1205 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, NOT_TAKEN
);
1209 if (!expr_coherent_p (loop_iv_base_var
, compare_base
))
1212 /* If loop bound, base and compare bound are all constants, we can
1213 calculate the probability directly. */
1214 if (host_integerp (loop_bound_var
, 0)
1215 && host_integerp (compare_var
, 0)
1216 && host_integerp (compare_base
, 0))
1219 HOST_WIDE_INT compare_count
;
1220 HOST_WIDE_INT loop_bound
= tree_low_cst (loop_bound_var
, 0);
1221 HOST_WIDE_INT compare_bound
= tree_low_cst (compare_var
, 0);
1222 HOST_WIDE_INT base
= tree_low_cst (compare_base
, 0);
1223 HOST_WIDE_INT loop_count
= (loop_bound
- base
) / compare_step
;
1225 if ((compare_step
> 0)
1226 ^ (compare_code
== LT_EXPR
|| compare_code
== LE_EXPR
))
1227 compare_count
= (loop_bound
- compare_bound
) / compare_step
;
1229 compare_count
= (compare_bound
- base
) / compare_step
;
1231 if (compare_code
== LE_EXPR
|| compare_code
== GE_EXPR
)
1233 if (loop_bound_code
== LE_EXPR
|| loop_bound_code
== GE_EXPR
)
1235 if (compare_count
< 0)
1240 if (loop_count
== 0)
1242 else if (compare_count
> loop_count
)
1243 probability
= REG_BR_PROB_BASE
;
1245 probability
= (double) REG_BR_PROB_BASE
* compare_count
/ loop_count
;
1246 predict_edge (then_edge
, PRED_LOOP_IV_COMPARE
, probability
);
1250 if (expr_coherent_p (loop_bound_var
, compare_var
))
1252 if ((loop_bound_code
== LT_EXPR
|| loop_bound_code
== LE_EXPR
)
1253 && (compare_code
== LT_EXPR
|| compare_code
== LE_EXPR
))
1254 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, TAKEN
);
1255 else if ((loop_bound_code
== GT_EXPR
|| loop_bound_code
== GE_EXPR
)
1256 && (compare_code
== GT_EXPR
|| compare_code
== GE_EXPR
))
1257 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, TAKEN
);
1258 else if (loop_bound_code
== NE_EXPR
)
1260 /* If the loop backedge condition is "(i != bound)", we do
1261 the comparison based on the step of IV:
1262 * step < 0 : backedge condition is like (i > bound)
1263 * step > 0 : backedge condition is like (i < bound) */
1264 gcc_assert (loop_bound_step
!= 0);
1265 if (loop_bound_step
> 0
1266 && (compare_code
== LT_EXPR
1267 || compare_code
== LE_EXPR
))
1268 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, TAKEN
);
1269 else if (loop_bound_step
< 0
1270 && (compare_code
== GT_EXPR
1271 || compare_code
== GE_EXPR
))
1272 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, TAKEN
);
1274 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, NOT_TAKEN
);
1277 /* The branch is predicted not-taken if loop_bound_code is
1278 opposite with compare_code. */
1279 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, NOT_TAKEN
);
1281 else if (expr_coherent_p (loop_iv_base_var
, compare_var
))
1284 for (i = s; i < h; i++)
1286 The branch should be predicted taken. */
1287 if (loop_bound_step
> 0
1288 && (compare_code
== GT_EXPR
|| compare_code
== GE_EXPR
))
1289 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, TAKEN
);
1290 else if (loop_bound_step
< 0
1291 && (compare_code
== LT_EXPR
|| compare_code
== LE_EXPR
))
1292 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, TAKEN
);
1294 predict_edge_def (then_edge
, PRED_LOOP_IV_COMPARE_GUESS
, NOT_TAKEN
);
1298 /* Predict edge probabilities by exploiting loop structure. */
1301 predict_loops (void)
1306 /* Try to predict out blocks in a loop that are not part of a
1308 FOR_EACH_LOOP (li
, loop
, 0)
1310 basic_block bb
, *bbs
;
1311 unsigned j
, n_exits
;
1312 VEC (edge
, heap
) *exits
;
1313 struct tree_niter_desc niter_desc
;
1315 struct nb_iter_bound
*nb_iter
;
1316 enum tree_code loop_bound_code
= ERROR_MARK
;
1317 int loop_bound_step
= 0;
1318 tree loop_bound_var
= NULL
;
1319 tree loop_iv_base
= NULL
;
1322 exits
= get_loop_exit_edges (loop
);
1323 n_exits
= VEC_length (edge
, exits
);
1325 FOR_EACH_VEC_ELT (edge
, exits
, j
, ex
)
1328 HOST_WIDE_INT nitercst
;
1329 int max
= PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS
);
1331 enum br_predictor predictor
;
1333 if (number_of_iterations_exit (loop
, ex
, &niter_desc
, false))
1334 niter
= niter_desc
.niter
;
1335 if (!niter
|| TREE_CODE (niter_desc
.niter
) != INTEGER_CST
)
1336 niter
= loop_niter_by_eval (loop
, ex
);
1338 if (TREE_CODE (niter
) == INTEGER_CST
)
1340 if (host_integerp (niter
, 1)
1341 && compare_tree_int (niter
, max
-1) == -1)
1342 nitercst
= tree_low_cst (niter
, 1) + 1;
1345 predictor
= PRED_LOOP_ITERATIONS
;
1347 /* If we have just one exit and we can derive some information about
1348 the number of iterations of the loop from the statements inside
1349 the loop, use it to predict this exit. */
1350 else if (n_exits
== 1)
1352 nitercst
= estimated_stmt_executions_int (loop
);
1358 predictor
= PRED_LOOP_ITERATIONS_GUESSED
;
1363 probability
= ((REG_BR_PROB_BASE
+ nitercst
/ 2) / nitercst
);
1364 predict_edge (ex
, predictor
, probability
);
1366 VEC_free (edge
, heap
, exits
);
1368 /* Find information about loop bound variables. */
1369 for (nb_iter
= loop
->bounds
; nb_iter
;
1370 nb_iter
= nb_iter
->next
)
1372 && gimple_code (nb_iter
->stmt
) == GIMPLE_COND
)
1374 stmt
= nb_iter
->stmt
;
1377 if (!stmt
&& last_stmt (loop
->header
)
1378 && gimple_code (last_stmt (loop
->header
)) == GIMPLE_COND
)
1379 stmt
= last_stmt (loop
->header
);
1381 is_comparison_with_loop_invariant_p (stmt
, loop
,
1387 bbs
= get_loop_body (loop
);
1389 for (j
= 0; j
< loop
->num_nodes
; j
++)
1391 int header_found
= 0;
1397 /* Bypass loop heuristics on continue statement. These
1398 statements construct loops via "non-loop" constructs
1399 in the source language and are better to be handled
1401 if (predicted_by_p (bb
, PRED_CONTINUE
))
1404 /* Loop branch heuristics - predict an edge back to a
1405 loop's head as taken. */
1406 if (bb
== loop
->latch
)
1408 e
= find_edge (loop
->latch
, loop
->header
);
1412 predict_edge_def (e
, PRED_LOOP_BRANCH
, TAKEN
);
1416 /* Loop exit heuristics - predict an edge exiting the loop if the
1417 conditional has no loop header successors as not taken. */
1419 /* If we already used more reliable loop exit predictors, do not
1420 bother with PRED_LOOP_EXIT. */
1421 && !predicted_by_p (bb
, PRED_LOOP_ITERATIONS_GUESSED
)
1422 && !predicted_by_p (bb
, PRED_LOOP_ITERATIONS
))
1424 /* For loop with many exits we don't want to predict all exits
1425 with the pretty large probability, because if all exits are
1426 considered in row, the loop would be predicted to iterate
1427 almost never. The code to divide probability by number of
1428 exits is very rough. It should compute the number of exits
1429 taken in each patch through function (not the overall number
1430 of exits that might be a lot higher for loops with wide switch
1431 statements in them) and compute n-th square root.
1433 We limit the minimal probability by 2% to avoid
1434 EDGE_PROBABILITY_RELIABLE from trusting the branch prediction
1435 as this was causing regression in perl benchmark containing such
1438 int probability
= ((REG_BR_PROB_BASE
1439 - predictor_info
[(int) PRED_LOOP_EXIT
].hitrate
)
1441 if (probability
< HITRATE (2))
1442 probability
= HITRATE (2);
1443 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1444 if (e
->dest
->index
< NUM_FIXED_BLOCKS
1445 || !flow_bb_inside_loop_p (loop
, e
->dest
))
1446 predict_edge (e
, PRED_LOOP_EXIT
, probability
);
1449 predict_iv_comparison (loop
, bb
, loop_bound_var
, loop_iv_base
,
1454 /* Free basic blocks from get_loop_body. */
1459 /* Attempt to predict probabilities of BB outgoing edges using local
1462 bb_estimate_probability_locally (basic_block bb
)
1464 rtx last_insn
= BB_END (bb
);
1467 if (! can_predict_insn_p (last_insn
))
1469 cond
= get_condition (last_insn
, NULL
, false, false);
1473 /* Try "pointer heuristic."
1474 A comparison ptr == 0 is predicted as false.
1475 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1476 if (COMPARISON_P (cond
)
1477 && ((REG_P (XEXP (cond
, 0)) && REG_POINTER (XEXP (cond
, 0)))
1478 || (REG_P (XEXP (cond
, 1)) && REG_POINTER (XEXP (cond
, 1)))))
1480 if (GET_CODE (cond
) == EQ
)
1481 predict_insn_def (last_insn
, PRED_POINTER
, NOT_TAKEN
);
1482 else if (GET_CODE (cond
) == NE
)
1483 predict_insn_def (last_insn
, PRED_POINTER
, TAKEN
);
1487 /* Try "opcode heuristic."
1488 EQ tests are usually false and NE tests are usually true. Also,
1489 most quantities are positive, so we can make the appropriate guesses
1490 about signed comparisons against zero. */
1491 switch (GET_CODE (cond
))
1494 /* Unconditional branch. */
1495 predict_insn_def (last_insn
, PRED_UNCONDITIONAL
,
1496 cond
== const0_rtx
? NOT_TAKEN
: TAKEN
);
1501 /* Floating point comparisons appears to behave in a very
1502 unpredictable way because of special role of = tests in
1504 if (FLOAT_MODE_P (GET_MODE (XEXP (cond
, 0))))
1506 /* Comparisons with 0 are often used for booleans and there is
1507 nothing useful to predict about them. */
1508 else if (XEXP (cond
, 1) == const0_rtx
1509 || XEXP (cond
, 0) == const0_rtx
)
1512 predict_insn_def (last_insn
, PRED_OPCODE_NONEQUAL
, NOT_TAKEN
);
1517 /* Floating point comparisons appears to behave in a very
1518 unpredictable way because of special role of = tests in
1520 if (FLOAT_MODE_P (GET_MODE (XEXP (cond
, 0))))
1522 /* Comparisons with 0 are often used for booleans and there is
1523 nothing useful to predict about them. */
1524 else if (XEXP (cond
, 1) == const0_rtx
1525 || XEXP (cond
, 0) == const0_rtx
)
1528 predict_insn_def (last_insn
, PRED_OPCODE_NONEQUAL
, TAKEN
);
1532 predict_insn_def (last_insn
, PRED_FPOPCODE
, TAKEN
);
1536 predict_insn_def (last_insn
, PRED_FPOPCODE
, NOT_TAKEN
);
1541 if (XEXP (cond
, 1) == const0_rtx
|| XEXP (cond
, 1) == const1_rtx
1542 || XEXP (cond
, 1) == constm1_rtx
)
1543 predict_insn_def (last_insn
, PRED_OPCODE_POSITIVE
, NOT_TAKEN
);
1548 if (XEXP (cond
, 1) == const0_rtx
|| XEXP (cond
, 1) == const1_rtx
1549 || XEXP (cond
, 1) == constm1_rtx
)
1550 predict_insn_def (last_insn
, PRED_OPCODE_POSITIVE
, TAKEN
);
1558 /* Set edge->probability for each successor edge of BB. */
1560 guess_outgoing_edge_probabilities (basic_block bb
)
1562 bb_estimate_probability_locally (bb
);
1563 combine_predictions_for_insn (BB_END (bb
), bb
);
1566 static tree
expr_expected_value (tree
, bitmap
);
1568 /* Helper function for expr_expected_value. */
1571 expr_expected_value_1 (tree type
, tree op0
, enum tree_code code
,
1572 tree op1
, bitmap visited
)
1576 if (get_gimple_rhs_class (code
) == GIMPLE_SINGLE_RHS
)
1578 if (TREE_CONSTANT (op0
))
1581 if (code
!= SSA_NAME
)
1584 def
= SSA_NAME_DEF_STMT (op0
);
1586 /* If we were already here, break the infinite cycle. */
1587 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (op0
)))
1590 if (gimple_code (def
) == GIMPLE_PHI
)
1592 /* All the arguments of the PHI node must have the same constant
1594 int i
, n
= gimple_phi_num_args (def
);
1595 tree val
= NULL
, new_val
;
1597 for (i
= 0; i
< n
; i
++)
1599 tree arg
= PHI_ARG_DEF (def
, i
);
1601 /* If this PHI has itself as an argument, we cannot
1602 determine the string length of this argument. However,
1603 if we can find an expected constant value for the other
1604 PHI args then we can still be sure that this is
1605 likely a constant. So be optimistic and just
1606 continue with the next argument. */
1607 if (arg
== PHI_RESULT (def
))
1610 new_val
= expr_expected_value (arg
, visited
);
1615 else if (!operand_equal_p (val
, new_val
, false))
1620 if (is_gimple_assign (def
))
1622 if (gimple_assign_lhs (def
) != op0
)
1625 return expr_expected_value_1 (TREE_TYPE (gimple_assign_lhs (def
)),
1626 gimple_assign_rhs1 (def
),
1627 gimple_assign_rhs_code (def
),
1628 gimple_assign_rhs2 (def
),
1632 if (is_gimple_call (def
))
1634 tree decl
= gimple_call_fndecl (def
);
1637 if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
1638 switch (DECL_FUNCTION_CODE (decl
))
1640 case BUILT_IN_EXPECT
:
1643 if (gimple_call_num_args (def
) != 2)
1645 val
= gimple_call_arg (def
, 0);
1646 if (TREE_CONSTANT (val
))
1648 return gimple_call_arg (def
, 1);
1651 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N
:
1652 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
1653 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
1654 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
1655 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
1656 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
1657 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE
:
1658 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N
:
1659 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
1660 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
1661 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
1662 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
1663 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
1664 /* Assume that any given atomic operation has low contention,
1665 and thus the compare-and-swap operation succeeds. */
1666 return boolean_true_node
;
1673 if (get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
)
1676 op0
= expr_expected_value (op0
, visited
);
1679 op1
= expr_expected_value (op1
, visited
);
1682 res
= fold_build2 (code
, type
, op0
, op1
);
1683 if (TREE_CONSTANT (res
))
1687 if (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
)
1690 op0
= expr_expected_value (op0
, visited
);
1693 res
= fold_build1 (code
, type
, op0
);
1694 if (TREE_CONSTANT (res
))
1701 /* Return constant EXPR will likely have at execution time, NULL if unknown.
1702 The function is used by builtin_expect branch predictor so the evidence
1703 must come from this construct and additional possible constant folding.
1705 We may want to implement more involved value guess (such as value range
1706 propagation based prediction), but such tricks shall go to new
1710 expr_expected_value (tree expr
, bitmap visited
)
1712 enum tree_code code
;
1715 if (TREE_CONSTANT (expr
))
1718 extract_ops_from_tree (expr
, &code
, &op0
, &op1
);
1719 return expr_expected_value_1 (TREE_TYPE (expr
),
1720 op0
, code
, op1
, visited
);
1724 /* Get rid of all builtin_expect calls and GIMPLE_PREDICT statements
1725 we no longer need. */
1727 strip_predict_hints (void)
1735 gimple_stmt_iterator bi
;
1736 for (bi
= gsi_start_bb (bb
); !gsi_end_p (bi
);)
1738 gimple stmt
= gsi_stmt (bi
);
1740 if (gimple_code (stmt
) == GIMPLE_PREDICT
)
1742 gsi_remove (&bi
, true);
1745 else if (gimple_code (stmt
) == GIMPLE_CALL
)
1747 tree fndecl
= gimple_call_fndecl (stmt
);
1750 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1751 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
1752 && gimple_call_num_args (stmt
) == 2)
1754 var
= gimple_call_lhs (stmt
);
1758 = gimple_build_assign (var
, gimple_call_arg (stmt
, 0));
1759 gsi_replace (&bi
, ass_stmt
, true);
1763 gsi_remove (&bi
, true);
1774 /* Predict using opcode of the last statement in basic block. */
1776 tree_predict_by_opcode (basic_block bb
)
1778 gimple stmt
= last_stmt (bb
);
1787 if (!stmt
|| gimple_code (stmt
) != GIMPLE_COND
)
1789 FOR_EACH_EDGE (then_edge
, ei
, bb
->succs
)
1790 if (then_edge
->flags
& EDGE_TRUE_VALUE
)
1792 op0
= gimple_cond_lhs (stmt
);
1793 op1
= gimple_cond_rhs (stmt
);
1794 cmp
= gimple_cond_code (stmt
);
1795 type
= TREE_TYPE (op0
);
1796 visited
= BITMAP_ALLOC (NULL
);
1797 val
= expr_expected_value_1 (boolean_type_node
, op0
, cmp
, op1
, visited
);
1798 BITMAP_FREE (visited
);
1801 if (integer_zerop (val
))
1802 predict_edge_def (then_edge
, PRED_BUILTIN_EXPECT
, NOT_TAKEN
);
1804 predict_edge_def (then_edge
, PRED_BUILTIN_EXPECT
, TAKEN
);
1807 /* Try "pointer heuristic."
1808 A comparison ptr == 0 is predicted as false.
1809 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1810 if (POINTER_TYPE_P (type
))
1813 predict_edge_def (then_edge
, PRED_TREE_POINTER
, NOT_TAKEN
);
1814 else if (cmp
== NE_EXPR
)
1815 predict_edge_def (then_edge
, PRED_TREE_POINTER
, TAKEN
);
1819 /* Try "opcode heuristic."
1820 EQ tests are usually false and NE tests are usually true. Also,
1821 most quantities are positive, so we can make the appropriate guesses
1822 about signed comparisons against zero. */
1827 /* Floating point comparisons appears to behave in a very
1828 unpredictable way because of special role of = tests in
1830 if (FLOAT_TYPE_P (type
))
1832 /* Comparisons with 0 are often used for booleans and there is
1833 nothing useful to predict about them. */
1834 else if (integer_zerop (op0
) || integer_zerop (op1
))
1837 predict_edge_def (then_edge
, PRED_TREE_OPCODE_NONEQUAL
, NOT_TAKEN
);
1842 /* Floating point comparisons appears to behave in a very
1843 unpredictable way because of special role of = tests in
1845 if (FLOAT_TYPE_P (type
))
1847 /* Comparisons with 0 are often used for booleans and there is
1848 nothing useful to predict about them. */
1849 else if (integer_zerop (op0
)
1850 || integer_zerop (op1
))
1853 predict_edge_def (then_edge
, PRED_TREE_OPCODE_NONEQUAL
, TAKEN
);
1857 predict_edge_def (then_edge
, PRED_TREE_FPOPCODE
, TAKEN
);
1860 case UNORDERED_EXPR
:
1861 predict_edge_def (then_edge
, PRED_TREE_FPOPCODE
, NOT_TAKEN
);
1866 if (integer_zerop (op1
)
1867 || integer_onep (op1
)
1868 || integer_all_onesp (op1
)
1871 || real_minus_onep (op1
))
1872 predict_edge_def (then_edge
, PRED_TREE_OPCODE_POSITIVE
, NOT_TAKEN
);
1877 if (integer_zerop (op1
)
1878 || integer_onep (op1
)
1879 || integer_all_onesp (op1
)
1882 || real_minus_onep (op1
))
1883 predict_edge_def (then_edge
, PRED_TREE_OPCODE_POSITIVE
, TAKEN
);
1891 /* Try to guess whether the value of return means error code. */
1893 static enum br_predictor
1894 return_prediction (tree val
, enum prediction
*prediction
)
1898 return PRED_NO_PREDICTION
;
1899 /* Different heuristics for pointers and scalars. */
1900 if (POINTER_TYPE_P (TREE_TYPE (val
)))
1902 /* NULL is usually not returned. */
1903 if (integer_zerop (val
))
1905 *prediction
= NOT_TAKEN
;
1906 return PRED_NULL_RETURN
;
1909 else if (INTEGRAL_TYPE_P (TREE_TYPE (val
)))
1911 /* Negative return values are often used to indicate
1913 if (TREE_CODE (val
) == INTEGER_CST
1914 && tree_int_cst_sgn (val
) < 0)
1916 *prediction
= NOT_TAKEN
;
1917 return PRED_NEGATIVE_RETURN
;
1919 /* Constant return values seems to be commonly taken.
1920 Zero/one often represent booleans so exclude them from the
1922 if (TREE_CONSTANT (val
)
1923 && (!integer_zerop (val
) && !integer_onep (val
)))
1925 *prediction
= TAKEN
;
1926 return PRED_CONST_RETURN
;
1929 return PRED_NO_PREDICTION
;
1932 /* Find the basic block with return expression and look up for possible
1933 return value trying to apply RETURN_PREDICTION heuristics. */
1935 apply_return_prediction (void)
1937 gimple return_stmt
= NULL
;
1941 int phi_num_args
, i
;
1942 enum br_predictor pred
;
1943 enum prediction direction
;
1946 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
1948 return_stmt
= last_stmt (e
->src
);
1950 && gimple_code (return_stmt
) == GIMPLE_RETURN
)
1955 return_val
= gimple_return_retval (return_stmt
);
1958 if (TREE_CODE (return_val
) != SSA_NAME
1959 || !SSA_NAME_DEF_STMT (return_val
)
1960 || gimple_code (SSA_NAME_DEF_STMT (return_val
)) != GIMPLE_PHI
)
1962 phi
= SSA_NAME_DEF_STMT (return_val
);
1963 phi_num_args
= gimple_phi_num_args (phi
);
1964 pred
= return_prediction (PHI_ARG_DEF (phi
, 0), &direction
);
1966 /* Avoid the degenerate case where all return values form the function
1967 belongs to same category (ie they are all positive constants)
1968 so we can hardly say something about them. */
1969 for (i
= 1; i
< phi_num_args
; i
++)
1970 if (pred
!= return_prediction (PHI_ARG_DEF (phi
, i
), &direction
))
1972 if (i
!= phi_num_args
)
1973 for (i
= 0; i
< phi_num_args
; i
++)
1975 pred
= return_prediction (PHI_ARG_DEF (phi
, i
), &direction
);
1976 if (pred
!= PRED_NO_PREDICTION
)
1977 predict_paths_leading_to_edge (gimple_phi_arg_edge (phi
, i
), pred
,
1982 /* Look for basic block that contains unlikely to happen events
1983 (such as noreturn calls) and mark all paths leading to execution
1984 of this basic blocks as unlikely. */
1987 tree_bb_level_predictions (void)
1990 bool has_return_edges
= false;
1994 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
1995 if (!(e
->flags
& (EDGE_ABNORMAL
| EDGE_FAKE
| EDGE_EH
)))
1997 has_return_edges
= true;
2001 apply_return_prediction ();
2005 gimple_stmt_iterator gsi
;
2007 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2009 gimple stmt
= gsi_stmt (gsi
);
2012 if (is_gimple_call (stmt
))
2014 if ((gimple_call_flags (stmt
) & ECF_NORETURN
)
2015 && has_return_edges
)
2016 predict_paths_leading_to (bb
, PRED_NORETURN
,
2018 decl
= gimple_call_fndecl (stmt
);
2020 && lookup_attribute ("cold",
2021 DECL_ATTRIBUTES (decl
)))
2022 predict_paths_leading_to (bb
, PRED_COLD_FUNCTION
,
2025 else if (gimple_code (stmt
) == GIMPLE_PREDICT
)
2027 predict_paths_leading_to (bb
, gimple_predict_predictor (stmt
),
2028 gimple_predict_outcome (stmt
));
2029 /* Keep GIMPLE_PREDICT around so early inlining will propagate
2030 hints to callers. */
2036 #ifdef ENABLE_CHECKING
2038 /* Callback for pointer_map_traverse, asserts that the pointer map is
2042 assert_is_empty (const void *key ATTRIBUTE_UNUSED
, void **value
,
2043 void *data ATTRIBUTE_UNUSED
)
2045 gcc_assert (!*value
);
2050 /* Predict branch probabilities and estimate profile for basic block BB. */
2053 tree_estimate_probability_bb (basic_block bb
)
2059 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2061 /* Predict early returns to be probable, as we've already taken
2062 care for error returns and other cases are often used for
2063 fast paths through function.
2065 Since we've already removed the return statements, we are
2066 looking for CFG like:
2076 if (e
->dest
!= bb
->next_bb
2077 && e
->dest
!= EXIT_BLOCK_PTR
2078 && single_succ_p (e
->dest
)
2079 && single_succ_edge (e
->dest
)->dest
== EXIT_BLOCK_PTR
2080 && (last
= last_stmt (e
->dest
)) != NULL
2081 && gimple_code (last
) == GIMPLE_RETURN
)
2086 if (single_succ_p (bb
))
2088 FOR_EACH_EDGE (e1
, ei1
, bb
->preds
)
2089 if (!predicted_by_p (e1
->src
, PRED_NULL_RETURN
)
2090 && !predicted_by_p (e1
->src
, PRED_CONST_RETURN
)
2091 && !predicted_by_p (e1
->src
, PRED_NEGATIVE_RETURN
))
2092 predict_edge_def (e1
, PRED_TREE_EARLY_RETURN
, NOT_TAKEN
);
2095 if (!predicted_by_p (e
->src
, PRED_NULL_RETURN
)
2096 && !predicted_by_p (e
->src
, PRED_CONST_RETURN
)
2097 && !predicted_by_p (e
->src
, PRED_NEGATIVE_RETURN
))
2098 predict_edge_def (e
, PRED_TREE_EARLY_RETURN
, NOT_TAKEN
);
2101 /* Look for block we are guarding (ie we dominate it,
2102 but it doesn't postdominate us). */
2103 if (e
->dest
!= EXIT_BLOCK_PTR
&& e
->dest
!= bb
2104 && dominated_by_p (CDI_DOMINATORS
, e
->dest
, e
->src
)
2105 && !dominated_by_p (CDI_POST_DOMINATORS
, e
->src
, e
->dest
))
2107 gimple_stmt_iterator bi
;
2109 /* The call heuristic claims that a guarded function call
2110 is improbable. This is because such calls are often used
2111 to signal exceptional situations such as printing error
2113 for (bi
= gsi_start_bb (e
->dest
); !gsi_end_p (bi
);
2116 gimple stmt
= gsi_stmt (bi
);
2117 if (is_gimple_call (stmt
)
2118 /* Constant and pure calls are hardly used to signalize
2119 something exceptional. */
2120 && gimple_has_side_effects (stmt
))
2122 predict_edge_def (e
, PRED_CALL
, NOT_TAKEN
);
2128 tree_predict_by_opcode (bb
);
2131 /* Predict branch probabilities and estimate profile of the tree CFG.
2132 This function can be called from the loop optimizers to recompute
2133 the profile information. */
2136 tree_estimate_probability (void)
2140 add_noreturn_fake_exit_edges ();
2141 connect_infinite_loops_to_exit ();
2142 /* We use loop_niter_by_eval, which requires that the loops have
2144 create_preheaders (CP_SIMPLE_PREHEADERS
);
2145 calculate_dominance_info (CDI_POST_DOMINATORS
);
2147 bb_predictions
= pointer_map_create ();
2148 tree_bb_level_predictions ();
2149 record_loop_exits ();
2151 if (number_of_loops () > 1)
2155 tree_estimate_probability_bb (bb
);
2158 combine_predictions_for_bb (bb
);
2160 #ifdef ENABLE_CHECKING
2161 pointer_map_traverse (bb_predictions
, assert_is_empty
, NULL
);
2163 pointer_map_destroy (bb_predictions
);
2164 bb_predictions
= NULL
;
2166 estimate_bb_frequencies ();
2167 free_dominance_info (CDI_POST_DOMINATORS
);
2168 remove_fake_exit_edges ();
2171 /* Predict branch probabilities and estimate profile of the tree CFG.
2172 This is the driver function for PASS_PROFILE. */
2175 tree_estimate_probability_driver (void)
2179 loop_optimizer_init (0);
2180 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2181 flow_loops_dump (dump_file
, NULL
, 0);
2183 mark_irreducible_loops ();
2185 nb_loops
= number_of_loops ();
2189 tree_estimate_probability ();
2194 loop_optimizer_finalize ();
2195 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2196 gimple_dump_cfg (dump_file
, dump_flags
);
2197 if (profile_status
== PROFILE_ABSENT
)
2198 profile_status
= PROFILE_GUESSED
;
2202 /* Predict edges to successors of CUR whose sources are not postdominated by
2203 BB by PRED and recurse to all postdominators. */
2206 predict_paths_for_bb (basic_block cur
, basic_block bb
,
2207 enum br_predictor pred
,
2208 enum prediction taken
,
2215 /* We are looking for all edges forming edge cut induced by
2216 set of all blocks postdominated by BB. */
2217 FOR_EACH_EDGE (e
, ei
, cur
->preds
)
2218 if (e
->src
->index
>= NUM_FIXED_BLOCKS
2219 && !dominated_by_p (CDI_POST_DOMINATORS
, e
->src
, bb
))
2225 /* Ignore fake edges and eh, we predict them as not taken anyway. */
2226 if (e
->flags
& (EDGE_EH
| EDGE_FAKE
))
2228 gcc_assert (bb
== cur
|| dominated_by_p (CDI_POST_DOMINATORS
, cur
, bb
));
2230 /* See if there is an edge from e->src that is not abnormal
2231 and does not lead to BB. */
2232 FOR_EACH_EDGE (e2
, ei2
, e
->src
->succs
)
2234 && !(e2
->flags
& (EDGE_EH
| EDGE_FAKE
))
2235 && !dominated_by_p (CDI_POST_DOMINATORS
, e2
->dest
, bb
))
2241 /* If there is non-abnormal path leaving e->src, predict edge
2242 using predictor. Otherwise we need to look for paths
2245 The second may lead to infinite loop in the case we are predicitng
2246 regions that are only reachable by abnormal edges. We simply
2247 prevent visiting given BB twice. */
2249 predict_edge_def (e
, pred
, taken
);
2250 else if (bitmap_set_bit (visited
, e
->src
->index
))
2251 predict_paths_for_bb (e
->src
, e
->src
, pred
, taken
, visited
);
2253 for (son
= first_dom_son (CDI_POST_DOMINATORS
, cur
);
2255 son
= next_dom_son (CDI_POST_DOMINATORS
, son
))
2256 predict_paths_for_bb (son
, bb
, pred
, taken
, visited
);
2259 /* Sets branch probabilities according to PREDiction and
2263 predict_paths_leading_to (basic_block bb
, enum br_predictor pred
,
2264 enum prediction taken
)
2266 bitmap visited
= BITMAP_ALLOC (NULL
);
2267 predict_paths_for_bb (bb
, bb
, pred
, taken
, visited
);
2268 BITMAP_FREE (visited
);
2271 /* Like predict_paths_leading_to but take edge instead of basic block. */
2274 predict_paths_leading_to_edge (edge e
, enum br_predictor pred
,
2275 enum prediction taken
)
2277 bool has_nonloop_edge
= false;
2281 basic_block bb
= e
->src
;
2282 FOR_EACH_EDGE (e2
, ei
, bb
->succs
)
2283 if (e2
->dest
!= e
->src
&& e2
->dest
!= e
->dest
2284 && !(e
->flags
& (EDGE_EH
| EDGE_FAKE
))
2285 && !dominated_by_p (CDI_POST_DOMINATORS
, e
->src
, e2
->dest
))
2287 has_nonloop_edge
= true;
2290 if (!has_nonloop_edge
)
2292 bitmap visited
= BITMAP_ALLOC (NULL
);
2293 predict_paths_for_bb (bb
, bb
, pred
, taken
, visited
);
2294 BITMAP_FREE (visited
);
2297 predict_edge_def (e
, pred
, taken
);
2300 /* This is used to carry information about basic blocks. It is
2301 attached to the AUX field of the standard CFG block. */
2303 typedef struct block_info_def
2305 /* Estimated frequency of execution of basic_block. */
2308 /* To keep queue of basic blocks to process. */
2311 /* Number of predecessors we need to visit first. */
2315 /* Similar information for edges. */
2316 typedef struct edge_info_def
2318 /* In case edge is a loopback edge, the probability edge will be reached
2319 in case header is. Estimated number of iterations of the loop can be
2320 then computed as 1 / (1 - back_edge_prob). */
2321 sreal back_edge_prob
;
2322 /* True if the edge is a loopback edge in the natural loop. */
2323 unsigned int back_edge
:1;
2326 #define BLOCK_INFO(B) ((block_info) (B)->aux)
2327 #define EDGE_INFO(E) ((edge_info) (E)->aux)
2329 /* Helper function for estimate_bb_frequencies.
2330 Propagate the frequencies in blocks marked in
2331 TOVISIT, starting in HEAD. */
2334 propagate_freq (basic_block head
, bitmap tovisit
)
2343 /* For each basic block we need to visit count number of his predecessors
2344 we need to visit first. */
2345 EXECUTE_IF_SET_IN_BITMAP (tovisit
, 0, i
, bi
)
2350 bb
= BASIC_BLOCK (i
);
2352 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
2354 bool visit
= bitmap_bit_p (tovisit
, e
->src
->index
);
2356 if (visit
&& !(e
->flags
& EDGE_DFS_BACK
))
2358 else if (visit
&& dump_file
&& !EDGE_INFO (e
)->back_edge
)
2360 "Irreducible region hit, ignoring edge to %i->%i\n",
2361 e
->src
->index
, bb
->index
);
2363 BLOCK_INFO (bb
)->npredecessors
= count
;
2364 /* When function never returns, we will never process exit block. */
2365 if (!count
&& bb
== EXIT_BLOCK_PTR
)
2366 bb
->count
= bb
->frequency
= 0;
2369 memcpy (&BLOCK_INFO (head
)->frequency
, &real_one
, sizeof (real_one
));
2371 for (bb
= head
; bb
; bb
= nextbb
)
2374 sreal cyclic_probability
, frequency
;
2376 memcpy (&cyclic_probability
, &real_zero
, sizeof (real_zero
));
2377 memcpy (&frequency
, &real_zero
, sizeof (real_zero
));
2379 nextbb
= BLOCK_INFO (bb
)->next
;
2380 BLOCK_INFO (bb
)->next
= NULL
;
2382 /* Compute frequency of basic block. */
2385 #ifdef ENABLE_CHECKING
2386 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
2387 gcc_assert (!bitmap_bit_p (tovisit
, e
->src
->index
)
2388 || (e
->flags
& EDGE_DFS_BACK
));
2391 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
2392 if (EDGE_INFO (e
)->back_edge
)
2394 sreal_add (&cyclic_probability
, &cyclic_probability
,
2395 &EDGE_INFO (e
)->back_edge_prob
);
2397 else if (!(e
->flags
& EDGE_DFS_BACK
))
2401 /* frequency += (e->probability
2402 * BLOCK_INFO (e->src)->frequency /
2403 REG_BR_PROB_BASE); */
2405 sreal_init (&tmp
, e
->probability
, 0);
2406 sreal_mul (&tmp
, &tmp
, &BLOCK_INFO (e
->src
)->frequency
);
2407 sreal_mul (&tmp
, &tmp
, &real_inv_br_prob_base
);
2408 sreal_add (&frequency
, &frequency
, &tmp
);
2411 if (sreal_compare (&cyclic_probability
, &real_zero
) == 0)
2413 memcpy (&BLOCK_INFO (bb
)->frequency
, &frequency
,
2414 sizeof (frequency
));
2418 if (sreal_compare (&cyclic_probability
, &real_almost_one
) > 0)
2420 memcpy (&cyclic_probability
, &real_almost_one
,
2421 sizeof (real_almost_one
));
2424 /* BLOCK_INFO (bb)->frequency = frequency
2425 / (1 - cyclic_probability) */
2427 sreal_sub (&cyclic_probability
, &real_one
, &cyclic_probability
);
2428 sreal_div (&BLOCK_INFO (bb
)->frequency
,
2429 &frequency
, &cyclic_probability
);
2433 bitmap_clear_bit (tovisit
, bb
->index
);
2435 e
= find_edge (bb
, head
);
2440 /* EDGE_INFO (e)->back_edge_prob
2441 = ((e->probability * BLOCK_INFO (bb)->frequency)
2442 / REG_BR_PROB_BASE); */
2444 sreal_init (&tmp
, e
->probability
, 0);
2445 sreal_mul (&tmp
, &tmp
, &BLOCK_INFO (bb
)->frequency
);
2446 sreal_mul (&EDGE_INFO (e
)->back_edge_prob
,
2447 &tmp
, &real_inv_br_prob_base
);
2450 /* Propagate to successor blocks. */
2451 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2452 if (!(e
->flags
& EDGE_DFS_BACK
)
2453 && BLOCK_INFO (e
->dest
)->npredecessors
)
2455 BLOCK_INFO (e
->dest
)->npredecessors
--;
2456 if (!BLOCK_INFO (e
->dest
)->npredecessors
)
2461 BLOCK_INFO (last
)->next
= e
->dest
;
2469 /* Estimate probabilities of loopback edges in loops at same nest level. */
2472 estimate_loops_at_level (struct loop
*first_loop
)
2476 for (loop
= first_loop
; loop
; loop
= loop
->next
)
2481 bitmap tovisit
= BITMAP_ALLOC (NULL
);
2483 estimate_loops_at_level (loop
->inner
);
2485 /* Find current loop back edge and mark it. */
2486 e
= loop_latch_edge (loop
);
2487 EDGE_INFO (e
)->back_edge
= 1;
2489 bbs
= get_loop_body (loop
);
2490 for (i
= 0; i
< loop
->num_nodes
; i
++)
2491 bitmap_set_bit (tovisit
, bbs
[i
]->index
);
2493 propagate_freq (loop
->header
, tovisit
);
2494 BITMAP_FREE (tovisit
);
2498 /* Propagates frequencies through structure of loops. */
2501 estimate_loops (void)
2503 bitmap tovisit
= BITMAP_ALLOC (NULL
);
2506 /* Start by estimating the frequencies in the loops. */
2507 if (number_of_loops () > 1)
2508 estimate_loops_at_level (current_loops
->tree_root
->inner
);
2510 /* Now propagate the frequencies through all the blocks. */
2513 bitmap_set_bit (tovisit
, bb
->index
);
2515 propagate_freq (ENTRY_BLOCK_PTR
, tovisit
);
2516 BITMAP_FREE (tovisit
);
2519 /* Convert counts measured by profile driven feedback to frequencies.
2520 Return nonzero iff there was any nonzero execution count. */
2523 counts_to_freqs (void)
2525 gcov_type count_max
, true_count_max
= 0;
2528 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
2529 true_count_max
= MAX (bb
->count
, true_count_max
);
2531 count_max
= MAX (true_count_max
, 1);
2532 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
2533 bb
->frequency
= (bb
->count
* BB_FREQ_MAX
+ count_max
/ 2) / count_max
;
2535 return true_count_max
;
2538 /* Return true if function is likely to be expensive, so there is no point to
2539 optimize performance of prologue, epilogue or do inlining at the expense
2540 of code size growth. THRESHOLD is the limit of number of instructions
2541 function can execute at average to be still considered not expensive. */
2544 expensive_function_p (int threshold
)
2546 unsigned int sum
= 0;
2550 /* We can not compute accurately for large thresholds due to scaled
2552 gcc_assert (threshold
<= BB_FREQ_MAX
);
2554 /* Frequencies are out of range. This either means that function contains
2555 internal loop executing more than BB_FREQ_MAX times or profile feedback
2556 is available and function has not been executed at all. */
2557 if (ENTRY_BLOCK_PTR
->frequency
== 0)
2560 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
2561 limit
= ENTRY_BLOCK_PTR
->frequency
* threshold
;
2566 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
2567 insn
= NEXT_INSN (insn
))
2568 if (active_insn_p (insn
))
2570 sum
+= bb
->frequency
;
2579 /* Estimate basic blocks frequency by given branch probabilities. */
2582 estimate_bb_frequencies (void)
2587 if (profile_status
!= PROFILE_READ
|| !counts_to_freqs ())
2589 static int real_values_initialized
= 0;
2591 if (!real_values_initialized
)
2593 real_values_initialized
= 1;
2594 sreal_init (&real_zero
, 0, 0);
2595 sreal_init (&real_one
, 1, 0);
2596 sreal_init (&real_br_prob_base
, REG_BR_PROB_BASE
, 0);
2597 sreal_init (&real_bb_freq_max
, BB_FREQ_MAX
, 0);
2598 sreal_init (&real_one_half
, 1, -1);
2599 sreal_div (&real_inv_br_prob_base
, &real_one
, &real_br_prob_base
);
2600 sreal_sub (&real_almost_one
, &real_one
, &real_inv_br_prob_base
);
2603 mark_dfs_back_edges ();
2605 single_succ_edge (ENTRY_BLOCK_PTR
)->probability
= REG_BR_PROB_BASE
;
2607 /* Set up block info for each basic block. */
2608 alloc_aux_for_blocks (sizeof (struct block_info_def
));
2609 alloc_aux_for_edges (sizeof (struct edge_info_def
));
2610 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
2615 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2617 sreal_init (&EDGE_INFO (e
)->back_edge_prob
, e
->probability
, 0);
2618 sreal_mul (&EDGE_INFO (e
)->back_edge_prob
,
2619 &EDGE_INFO (e
)->back_edge_prob
,
2620 &real_inv_br_prob_base
);
2624 /* First compute probabilities locally for each loop from innermost
2625 to outermost to examine probabilities for back edges. */
2628 memcpy (&freq_max
, &real_zero
, sizeof (real_zero
));
2630 if (sreal_compare (&freq_max
, &BLOCK_INFO (bb
)->frequency
) < 0)
2631 memcpy (&freq_max
, &BLOCK_INFO (bb
)->frequency
, sizeof (freq_max
));
2633 sreal_div (&freq_max
, &real_bb_freq_max
, &freq_max
);
2634 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
2638 sreal_mul (&tmp
, &BLOCK_INFO (bb
)->frequency
, &freq_max
);
2639 sreal_add (&tmp
, &tmp
, &real_one_half
);
2640 bb
->frequency
= sreal_to_int (&tmp
);
2643 free_aux_for_blocks ();
2644 free_aux_for_edges ();
2646 compute_function_frequency ();
2649 /* Decide whether function is hot, cold or unlikely executed. */
2651 compute_function_frequency (void)
2654 struct cgraph_node
*node
= cgraph_get_node (current_function_decl
);
2655 if (DECL_STATIC_CONSTRUCTOR (current_function_decl
)
2656 || MAIN_NAME_P (DECL_NAME (current_function_decl
)))
2657 node
->only_called_at_startup
= true;
2658 if (DECL_STATIC_DESTRUCTOR (current_function_decl
))
2659 node
->only_called_at_exit
= true;
2661 if (!profile_info
|| !flag_branch_probabilities
)
2663 int flags
= flags_from_decl_or_type (current_function_decl
);
2664 if (lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl
))
2666 node
->frequency
= NODE_FREQUENCY_UNLIKELY_EXECUTED
;
2667 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (current_function_decl
))
2669 node
->frequency
= NODE_FREQUENCY_HOT
;
2670 else if (flags
& ECF_NORETURN
)
2671 node
->frequency
= NODE_FREQUENCY_EXECUTED_ONCE
;
2672 else if (MAIN_NAME_P (DECL_NAME (current_function_decl
)))
2673 node
->frequency
= NODE_FREQUENCY_EXECUTED_ONCE
;
2674 else if (DECL_STATIC_CONSTRUCTOR (current_function_decl
)
2675 || DECL_STATIC_DESTRUCTOR (current_function_decl
))
2676 node
->frequency
= NODE_FREQUENCY_EXECUTED_ONCE
;
2679 node
->frequency
= NODE_FREQUENCY_UNLIKELY_EXECUTED
;
2682 if (maybe_hot_bb_p (bb
))
2684 node
->frequency
= NODE_FREQUENCY_HOT
;
2687 if (!probably_never_executed_bb_p (bb
))
2688 node
->frequency
= NODE_FREQUENCY_NORMAL
;
2693 gate_estimate_probability (void)
2695 return flag_guess_branch_prob
;
2698 /* Build PREDICT_EXPR. */
2700 build_predict_expr (enum br_predictor predictor
, enum prediction taken
)
2702 tree t
= build1 (PREDICT_EXPR
, void_type_node
,
2703 build_int_cst (integer_type_node
, predictor
));
2704 SET_PREDICT_EXPR_OUTCOME (t
, taken
);
2709 predictor_name (enum br_predictor predictor
)
2711 return predictor_info
[predictor
].name
;
2714 struct gimple_opt_pass pass_profile
=
2718 "profile_estimate", /* name */
2719 gate_estimate_probability
, /* gate */
2720 tree_estimate_probability_driver
, /* execute */
2723 0, /* static_pass_number */
2724 TV_BRANCH_PROB
, /* tv_id */
2725 PROP_cfg
, /* properties_required */
2726 0, /* properties_provided */
2727 0, /* properties_destroyed */
2728 0, /* todo_flags_start */
2729 TODO_ggc_collect
| TODO_verify_ssa
/* todo_flags_finish */
2733 struct gimple_opt_pass pass_strip_predict_hints
=
2737 "*strip_predict_hints", /* name */
2739 strip_predict_hints
, /* execute */
2742 0, /* static_pass_number */
2743 TV_BRANCH_PROB
, /* tv_id */
2744 PROP_cfg
, /* properties_required */
2745 0, /* properties_provided */
2746 0, /* properties_destroyed */
2747 0, /* todo_flags_start */
2748 TODO_ggc_collect
| TODO_verify_ssa
/* todo_flags_finish */
2752 /* Rebuild function frequencies. Passes are in general expected to
2753 maintain profile by hand, however in some cases this is not possible:
2754 for example when inlining several functions with loops freuqencies might run
2755 out of scale and thus needs to be recomputed. */
2758 rebuild_frequencies (void)
2760 timevar_push (TV_REBUILD_FREQUENCIES
);
2761 if (profile_status
== PROFILE_GUESSED
)
2763 loop_optimizer_init (0);
2764 add_noreturn_fake_exit_edges ();
2765 mark_irreducible_loops ();
2766 connect_infinite_loops_to_exit ();
2767 estimate_bb_frequencies ();
2768 remove_fake_exit_edges ();
2769 loop_optimizer_finalize ();
2771 else if (profile_status
== PROFILE_READ
)
2775 timevar_pop (TV_REBUILD_FREQUENCIES
);