1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 [1] "Branch Prediction for Free"
25 Ball and Larus; PLDI '93.
26 [2] "Static Branch Frequency and Program Profile Analysis"
27 Wu and Larus; MICRO-27.
28 [3] "Corpus-based Static Branch Prediction"
29 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
34 #include "coretypes.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
41 #include "insn-config.h"
56 #include "tree-flow.h"
58 #include "tree-dump.h"
59 #include "tree-pass.h"
61 #include "tree-scalar-evolution.h"
64 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
65 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
66 static sreal real_zero
, real_one
, real_almost_one
, real_br_prob_base
,
67 real_inv_br_prob_base
, real_one_half
, real_bb_freq_max
;
69 /* Random guesstimation given names. */
70 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 100 - 1)
71 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
72 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
73 #define PROB_ALWAYS (REG_BR_PROB_BASE)
75 static void combine_predictions_for_insn (rtx
, basic_block
);
76 static void dump_prediction (FILE *, enum br_predictor
, int, basic_block
, int);
77 static void estimate_loops_at_level (struct loop
*, bitmap
);
78 static void propagate_freq (struct loop
*, bitmap
);
79 static void estimate_bb_frequencies (struct loops
*);
80 static void predict_paths_leading_to (basic_block
, int *, enum br_predictor
, enum prediction
);
81 static bool last_basic_block_p (basic_block
);
82 static void compute_function_frequency (void);
83 static void choose_function_section (void);
84 static bool can_predict_insn_p (rtx
);
86 /* Information we hold about each branch predictor.
87 Filled using information from predict.def. */
91 const char *const name
; /* Name used in the debugging dumps. */
92 const int hitrate
; /* Expected hitrate used by
93 predict_insn_def call. */
97 /* Use given predictor without Dempster-Shaffer theory if it matches
98 using first_match heuristics. */
99 #define PRED_FLAG_FIRST_MATCH 1
101 /* Recompute hitrate in percent to our representation. */
103 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
105 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
106 static const struct predictor_info predictor_info
[]= {
107 #include "predict.def"
109 /* Upper bound on predictors. */
114 /* Return true in case BB can be CPU intensive and should be optimized
115 for maximal performance. */
118 maybe_hot_bb_p (basic_block bb
)
120 if (profile_info
&& flag_branch_probabilities
122 < profile_info
->sum_max
/ PARAM_VALUE (HOT_BB_COUNT_FRACTION
)))
124 if (bb
->frequency
< BB_FREQ_MAX
/ PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION
))
129 /* Return true in case BB is cold and should be optimized for size. */
132 probably_cold_bb_p (basic_block bb
)
134 if (profile_info
&& flag_branch_probabilities
136 < profile_info
->sum_max
/ PARAM_VALUE (HOT_BB_COUNT_FRACTION
)))
138 if (bb
->frequency
< BB_FREQ_MAX
/ PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION
))
143 /* Return true in case BB is probably never executed. */
145 probably_never_executed_bb_p (basic_block bb
)
147 if (profile_info
&& flag_branch_probabilities
)
148 return ((bb
->count
+ profile_info
->runs
/ 2) / profile_info
->runs
) == 0;
152 /* Return true if the one of outgoing edges is already predicted by
156 rtl_predicted_by_p (basic_block bb
, enum br_predictor predictor
)
159 if (!INSN_P (BB_END (bb
)))
161 for (note
= REG_NOTES (BB_END (bb
)); note
; note
= XEXP (note
, 1))
162 if (REG_NOTE_KIND (note
) == REG_BR_PRED
163 && INTVAL (XEXP (XEXP (note
, 0), 0)) == (int)predictor
)
168 /* Return true if the one of outgoing edges is already predicted by
172 tree_predicted_by_p (basic_block bb
, enum br_predictor predictor
)
174 struct edge_prediction
*i
;
175 for (i
= bb
->predictions
; i
; i
= i
->ep_next
)
176 if (i
->ep_predictor
== predictor
)
182 predict_insn (rtx insn
, enum br_predictor predictor
, int probability
)
184 gcc_assert (any_condjump_p (insn
));
185 if (!flag_guess_branch_prob
)
189 = gen_rtx_EXPR_LIST (REG_BR_PRED
,
190 gen_rtx_CONCAT (VOIDmode
,
191 GEN_INT ((int) predictor
),
192 GEN_INT ((int) probability
)),
196 /* Predict insn by given predictor. */
199 predict_insn_def (rtx insn
, enum br_predictor predictor
,
200 enum prediction taken
)
202 int probability
= predictor_info
[(int) predictor
].hitrate
;
205 probability
= REG_BR_PROB_BASE
- probability
;
207 predict_insn (insn
, predictor
, probability
);
210 /* Predict edge E with given probability if possible. */
213 rtl_predict_edge (edge e
, enum br_predictor predictor
, int probability
)
216 last_insn
= BB_END (e
->src
);
218 /* We can store the branch prediction information only about
219 conditional jumps. */
220 if (!any_condjump_p (last_insn
))
223 /* We always store probability of branching. */
224 if (e
->flags
& EDGE_FALLTHRU
)
225 probability
= REG_BR_PROB_BASE
- probability
;
227 predict_insn (last_insn
, predictor
, probability
);
230 /* Predict edge E with the given PROBABILITY. */
232 tree_predict_edge (edge e
, enum br_predictor predictor
, int probability
)
234 gcc_assert (profile_status
!= PROFILE_GUESSED
);
235 if ((e
->src
!= ENTRY_BLOCK_PTR
&& EDGE_COUNT (e
->src
->succs
) > 1)
236 && flag_guess_branch_prob
&& optimize
)
238 struct edge_prediction
*i
= ggc_alloc (sizeof (struct edge_prediction
));
240 i
->ep_next
= e
->src
->predictions
;
241 e
->src
->predictions
= i
;
242 i
->ep_probability
= probability
;
243 i
->ep_predictor
= predictor
;
248 /* Remove all predictions on given basic block that are attached
251 remove_predictions_associated_with_edge (edge e
)
253 if (e
->src
->predictions
)
255 struct edge_prediction
**prediction
= &e
->src
->predictions
;
258 if ((*prediction
)->ep_edge
== e
)
259 *prediction
= (*prediction
)->ep_next
;
261 prediction
= &((*prediction
)->ep_next
);
266 /* Return true when we can store prediction on insn INSN.
267 At the moment we represent predictions only on conditional
268 jumps, not at computed jump or other complicated cases. */
270 can_predict_insn_p (rtx insn
)
272 return (JUMP_P (insn
)
273 && any_condjump_p (insn
)
274 && EDGE_COUNT (BLOCK_FOR_INSN (insn
)->succs
) >= 2);
277 /* Predict edge E by given predictor if possible. */
280 predict_edge_def (edge e
, enum br_predictor predictor
,
281 enum prediction taken
)
283 int probability
= predictor_info
[(int) predictor
].hitrate
;
286 probability
= REG_BR_PROB_BASE
- probability
;
288 predict_edge (e
, predictor
, probability
);
291 /* Invert all branch predictions or probability notes in the INSN. This needs
292 to be done each time we invert the condition used by the jump. */
295 invert_br_probabilities (rtx insn
)
299 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
300 if (REG_NOTE_KIND (note
) == REG_BR_PROB
)
301 XEXP (note
, 0) = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (note
, 0)));
302 else if (REG_NOTE_KIND (note
) == REG_BR_PRED
)
303 XEXP (XEXP (note
, 0), 1)
304 = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (XEXP (note
, 0), 1)));
307 /* Dump information about the branch prediction to the output file. */
310 dump_prediction (FILE *file
, enum br_predictor predictor
, int probability
,
311 basic_block bb
, int used
)
319 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
320 if (! (e
->flags
& EDGE_FALLTHRU
))
323 fprintf (file
, " %s heuristics%s: %.1f%%",
324 predictor_info
[predictor
].name
,
325 used
? "" : " (ignored)", probability
* 100.0 / REG_BR_PROB_BASE
);
329 fprintf (file
, " exec ");
330 fprintf (file
, HOST_WIDEST_INT_PRINT_DEC
, bb
->count
);
333 fprintf (file
, " hit ");
334 fprintf (file
, HOST_WIDEST_INT_PRINT_DEC
, e
->count
);
335 fprintf (file
, " (%.1f%%)", e
->count
* 100.0 / bb
->count
);
339 fprintf (file
, "\n");
342 /* We can not predict the probabilities of outgoing edges of bb. Set them
343 evenly and hope for the best. */
345 set_even_probabilities (basic_block bb
)
351 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
352 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
354 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
355 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
356 e
->probability
= (REG_BR_PROB_BASE
+ nedges
/ 2) / nedges
;
361 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
362 note if not already present. Remove now useless REG_BR_PRED notes. */
365 combine_predictions_for_insn (rtx insn
, basic_block bb
)
370 int best_probability
= PROB_EVEN
;
371 int best_predictor
= END_PREDICTORS
;
372 int combined_probability
= REG_BR_PROB_BASE
/ 2;
374 bool first_match
= false;
377 if (!can_predict_insn_p (insn
))
379 set_even_probabilities (bb
);
383 prob_note
= find_reg_note (insn
, REG_BR_PROB
, 0);
384 pnote
= ®_NOTES (insn
);
386 fprintf (dump_file
, "Predictions for insn %i bb %i\n", INSN_UID (insn
),
389 /* We implement "first match" heuristics and use probability guessed
390 by predictor with smallest index. */
391 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
392 if (REG_NOTE_KIND (note
) == REG_BR_PRED
)
394 int predictor
= INTVAL (XEXP (XEXP (note
, 0), 0));
395 int probability
= INTVAL (XEXP (XEXP (note
, 0), 1));
398 if (best_predictor
> predictor
)
399 best_probability
= probability
, best_predictor
= predictor
;
401 d
= (combined_probability
* probability
402 + (REG_BR_PROB_BASE
- combined_probability
)
403 * (REG_BR_PROB_BASE
- probability
));
405 /* Use FP math to avoid overflows of 32bit integers. */
407 /* If one probability is 0% and one 100%, avoid division by zero. */
408 combined_probability
= REG_BR_PROB_BASE
/ 2;
410 combined_probability
= (((double) combined_probability
) * probability
411 * REG_BR_PROB_BASE
/ d
+ 0.5);
414 /* Decide which heuristic to use. In case we didn't match anything,
415 use no_prediction heuristic, in case we did match, use either
416 first match or Dempster-Shaffer theory depending on the flags. */
418 if (predictor_info
[best_predictor
].flags
& PRED_FLAG_FIRST_MATCH
)
422 dump_prediction (dump_file
, PRED_NO_PREDICTION
,
423 combined_probability
, bb
, true);
426 dump_prediction (dump_file
, PRED_DS_THEORY
, combined_probability
,
428 dump_prediction (dump_file
, PRED_FIRST_MATCH
, best_probability
,
433 combined_probability
= best_probability
;
434 dump_prediction (dump_file
, PRED_COMBINED
, combined_probability
, bb
, true);
438 if (REG_NOTE_KIND (*pnote
) == REG_BR_PRED
)
440 int predictor
= INTVAL (XEXP (XEXP (*pnote
, 0), 0));
441 int probability
= INTVAL (XEXP (XEXP (*pnote
, 0), 1));
443 dump_prediction (dump_file
, predictor
, probability
, bb
,
444 !first_match
|| best_predictor
== predictor
);
445 *pnote
= XEXP (*pnote
, 1);
448 pnote
= &XEXP (*pnote
, 1);
454 = gen_rtx_EXPR_LIST (REG_BR_PROB
,
455 GEN_INT (combined_probability
), REG_NOTES (insn
));
457 /* Save the prediction into CFG in case we are seeing non-degenerated
459 if (!single_succ_p (bb
))
461 BRANCH_EDGE (bb
)->probability
= combined_probability
;
462 FALLTHRU_EDGE (bb
)->probability
463 = REG_BR_PROB_BASE
- combined_probability
;
466 else if (!single_succ_p (bb
))
468 int prob
= INTVAL (XEXP (prob_note
, 0));
470 BRANCH_EDGE (bb
)->probability
= prob
;
471 FALLTHRU_EDGE (bb
)->probability
= REG_BR_PROB_BASE
- prob
;
474 single_succ_edge (bb
)->probability
= REG_BR_PROB_BASE
;
477 /* Combine predictions into single probability and store them into CFG.
478 Remove now useless prediction entries. */
481 combine_predictions_for_bb (basic_block bb
)
483 int best_probability
= PROB_EVEN
;
484 int best_predictor
= END_PREDICTORS
;
485 int combined_probability
= REG_BR_PROB_BASE
/ 2;
487 bool first_match
= false;
489 struct edge_prediction
*pred
;
491 edge e
, first
= NULL
, second
= NULL
;
494 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
495 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
498 if (first
&& !second
)
504 /* When there is no successor or only one choice, prediction is easy.
506 We are lazy for now and predict only basic blocks with two outgoing
507 edges. It is possible to predict generic case too, but we have to
508 ignore first match heuristics and do more involved combining. Implement
513 set_even_probabilities (bb
);
514 bb
->predictions
= NULL
;
516 fprintf (dump_file
, "%i edges in bb %i predicted to even probabilities\n",
522 fprintf (dump_file
, "Predictions for bb %i\n", bb
->index
);
524 /* We implement "first match" heuristics and use probability guessed
525 by predictor with smallest index. */
526 for (pred
= bb
->predictions
; pred
; pred
= pred
->ep_next
)
528 int predictor
= pred
->ep_predictor
;
529 int probability
= pred
->ep_probability
;
531 if (pred
->ep_edge
!= first
)
532 probability
= REG_BR_PROB_BASE
- probability
;
535 if (best_predictor
> predictor
)
536 best_probability
= probability
, best_predictor
= predictor
;
538 d
= (combined_probability
* probability
539 + (REG_BR_PROB_BASE
- combined_probability
)
540 * (REG_BR_PROB_BASE
- probability
));
542 /* Use FP math to avoid overflows of 32bit integers. */
544 /* If one probability is 0% and one 100%, avoid division by zero. */
545 combined_probability
= REG_BR_PROB_BASE
/ 2;
547 combined_probability
= (((double) combined_probability
) * probability
548 * REG_BR_PROB_BASE
/ d
+ 0.5);
551 /* Decide which heuristic to use. In case we didn't match anything,
552 use no_prediction heuristic, in case we did match, use either
553 first match or Dempster-Shaffer theory depending on the flags. */
555 if (predictor_info
[best_predictor
].flags
& PRED_FLAG_FIRST_MATCH
)
559 dump_prediction (dump_file
, PRED_NO_PREDICTION
, combined_probability
, bb
, true);
562 dump_prediction (dump_file
, PRED_DS_THEORY
, combined_probability
, bb
,
564 dump_prediction (dump_file
, PRED_FIRST_MATCH
, best_probability
, bb
,
569 combined_probability
= best_probability
;
570 dump_prediction (dump_file
, PRED_COMBINED
, combined_probability
, bb
, true);
572 for (pred
= bb
->predictions
; pred
; pred
= pred
->ep_next
)
574 int predictor
= pred
->ep_predictor
;
575 int probability
= pred
->ep_probability
;
577 if (pred
->ep_edge
!= EDGE_SUCC (bb
, 0))
578 probability
= REG_BR_PROB_BASE
- probability
;
579 dump_prediction (dump_file
, predictor
, probability
, bb
,
580 !first_match
|| best_predictor
== predictor
);
582 bb
->predictions
= NULL
;
586 first
->probability
= combined_probability
;
587 second
->probability
= REG_BR_PROB_BASE
- combined_probability
;
591 /* Predict edge probabilities by exploiting loop structure.
592 When RTLSIMPLELOOPS is set, attempt to count number of iterations by analyzing
593 RTL otherwise use tree based approach. */
595 predict_loops (struct loops
*loops_info
, bool rtlsimpleloops
)
600 scev_initialize (loops_info
);
602 /* Try to predict out blocks in a loop that are not part of a
604 for (i
= 1; i
< loops_info
->num
; i
++)
606 basic_block bb
, *bbs
;
609 struct loop
*loop
= loops_info
->parray
[i
];
610 struct niter_desc desc
;
611 unsigned HOST_WIDE_INT niter
;
614 exits
= get_loop_exit_edges (loop
, &n_exits
);
618 iv_analysis_loop_init (loop
);
619 find_simple_exit (loop
, &desc
);
621 if (desc
.simple_p
&& desc
.const_iter
)
624 niter
= desc
.niter
+ 1;
625 if (niter
== 0) /* We might overflow here. */
628 > (unsigned int) PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS
))
629 niter
= PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS
);
631 prob
= (REG_BR_PROB_BASE
632 - (REG_BR_PROB_BASE
+ niter
/2) / niter
);
633 /* Branch prediction algorithm gives 0 frequency for everything
634 after the end of loop for loop having 0 probability to finish. */
635 if (prob
== REG_BR_PROB_BASE
)
636 prob
= REG_BR_PROB_BASE
- 1;
637 predict_edge (desc
.in_edge
, PRED_LOOP_ITERATIONS
,
643 struct tree_niter_desc niter_desc
;
645 for (j
= 0; j
< n_exits
; j
++)
649 if (number_of_iterations_exit (loop
, exits
[j
], &niter_desc
, false))
650 niter
= niter_desc
.niter
;
651 if (!niter
|| TREE_CODE (niter_desc
.niter
) != INTEGER_CST
)
652 niter
= loop_niter_by_eval (loop
, exits
[j
]);
654 if (TREE_CODE (niter
) == INTEGER_CST
)
657 int max
= PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS
);
658 if (host_integerp (niter
, 1)
659 && tree_int_cst_lt (niter
,
660 build_int_cstu (NULL_TREE
, max
- 1)))
662 HOST_WIDE_INT nitercst
= tree_low_cst (niter
, 1) + 1;
663 probability
= ((REG_BR_PROB_BASE
+ nitercst
/ 2)
667 probability
= ((REG_BR_PROB_BASE
+ max
/ 2) / max
);
669 predict_edge (exits
[j
], PRED_LOOP_ITERATIONS
, probability
);
676 bbs
= get_loop_body (loop
);
678 for (j
= 0; j
< loop
->num_nodes
; j
++)
680 int header_found
= 0;
686 /* Bypass loop heuristics on continue statement. These
687 statements construct loops via "non-loop" constructs
688 in the source language and are better to be handled
690 if ((rtlsimpleloops
&& !can_predict_insn_p (BB_END (bb
)))
691 || predicted_by_p (bb
, PRED_CONTINUE
))
694 /* Loop branch heuristics - predict an edge back to a
695 loop's head as taken. */
696 if (bb
== loop
->latch
)
698 e
= find_edge (loop
->latch
, loop
->header
);
702 predict_edge_def (e
, PRED_LOOP_BRANCH
, TAKEN
);
706 /* Loop exit heuristics - predict an edge exiting the loop if the
707 conditional has no loop header successors as not taken. */
709 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
710 if (e
->dest
->index
< NUM_FIXED_BLOCKS
711 || !flow_bb_inside_loop_p (loop
, e
->dest
))
715 - predictor_info
[(int) PRED_LOOP_EXIT
].hitrate
)
719 /* Free basic blocks from get_loop_body. */
726 current_loops
= NULL
;
730 /* Attempt to predict probabilities of BB outgoing edges using local
733 bb_estimate_probability_locally (basic_block bb
)
735 rtx last_insn
= BB_END (bb
);
738 if (! can_predict_insn_p (last_insn
))
740 cond
= get_condition (last_insn
, NULL
, false, false);
744 /* Try "pointer heuristic."
745 A comparison ptr == 0 is predicted as false.
746 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
747 if (COMPARISON_P (cond
)
748 && ((REG_P (XEXP (cond
, 0)) && REG_POINTER (XEXP (cond
, 0)))
749 || (REG_P (XEXP (cond
, 1)) && REG_POINTER (XEXP (cond
, 1)))))
751 if (GET_CODE (cond
) == EQ
)
752 predict_insn_def (last_insn
, PRED_POINTER
, NOT_TAKEN
);
753 else if (GET_CODE (cond
) == NE
)
754 predict_insn_def (last_insn
, PRED_POINTER
, TAKEN
);
758 /* Try "opcode heuristic."
759 EQ tests are usually false and NE tests are usually true. Also,
760 most quantities are positive, so we can make the appropriate guesses
761 about signed comparisons against zero. */
762 switch (GET_CODE (cond
))
765 /* Unconditional branch. */
766 predict_insn_def (last_insn
, PRED_UNCONDITIONAL
,
767 cond
== const0_rtx
? NOT_TAKEN
: TAKEN
);
772 /* Floating point comparisons appears to behave in a very
773 unpredictable way because of special role of = tests in
775 if (FLOAT_MODE_P (GET_MODE (XEXP (cond
, 0))))
777 /* Comparisons with 0 are often used for booleans and there is
778 nothing useful to predict about them. */
779 else if (XEXP (cond
, 1) == const0_rtx
780 || XEXP (cond
, 0) == const0_rtx
)
783 predict_insn_def (last_insn
, PRED_OPCODE_NONEQUAL
, NOT_TAKEN
);
788 /* Floating point comparisons appears to behave in a very
789 unpredictable way because of special role of = tests in
791 if (FLOAT_MODE_P (GET_MODE (XEXP (cond
, 0))))
793 /* Comparisons with 0 are often used for booleans and there is
794 nothing useful to predict about them. */
795 else if (XEXP (cond
, 1) == const0_rtx
796 || XEXP (cond
, 0) == const0_rtx
)
799 predict_insn_def (last_insn
, PRED_OPCODE_NONEQUAL
, TAKEN
);
803 predict_insn_def (last_insn
, PRED_FPOPCODE
, TAKEN
);
807 predict_insn_def (last_insn
, PRED_FPOPCODE
, NOT_TAKEN
);
812 if (XEXP (cond
, 1) == const0_rtx
|| XEXP (cond
, 1) == const1_rtx
813 || XEXP (cond
, 1) == constm1_rtx
)
814 predict_insn_def (last_insn
, PRED_OPCODE_POSITIVE
, NOT_TAKEN
);
819 if (XEXP (cond
, 1) == const0_rtx
|| XEXP (cond
, 1) == const1_rtx
820 || XEXP (cond
, 1) == constm1_rtx
)
821 predict_insn_def (last_insn
, PRED_OPCODE_POSITIVE
, TAKEN
);
829 /* Set edge->probability for each successor edge of BB. */
831 guess_outgoing_edge_probabilities (basic_block bb
)
833 bb_estimate_probability_locally (bb
);
834 combine_predictions_for_insn (BB_END (bb
), bb
);
837 /* Return constant EXPR will likely have at execution time, NULL if unknown.
838 The function is used by builtin_expect branch predictor so the evidence
839 must come from this construct and additional possible constant folding.
841 We may want to implement more involved value guess (such as value range
842 propagation based prediction), but such tricks shall go to new
846 expr_expected_value (tree expr
, bitmap visited
)
848 if (TREE_CONSTANT (expr
))
850 else if (TREE_CODE (expr
) == SSA_NAME
)
852 tree def
= SSA_NAME_DEF_STMT (expr
);
854 /* If we were already here, break the infinite cycle. */
855 if (bitmap_bit_p (visited
, SSA_NAME_VERSION (expr
)))
857 bitmap_set_bit (visited
, SSA_NAME_VERSION (expr
));
859 if (TREE_CODE (def
) == PHI_NODE
)
861 /* All the arguments of the PHI node must have the same constant
864 tree val
= NULL
, new_val
;
866 for (i
= 0; i
< PHI_NUM_ARGS (def
); i
++)
868 tree arg
= PHI_ARG_DEF (def
, i
);
870 /* If this PHI has itself as an argument, we cannot
871 determine the string length of this argument. However,
872 if we can find an expected constant value for the other
873 PHI args then we can still be sure that this is
874 likely a constant. So be optimistic and just
875 continue with the next argument. */
876 if (arg
== PHI_RESULT (def
))
879 new_val
= expr_expected_value (arg
, visited
);
884 else if (!operand_equal_p (val
, new_val
, false))
889 if (TREE_CODE (def
) != MODIFY_EXPR
|| TREE_OPERAND (def
, 0) != expr
)
891 return expr_expected_value (TREE_OPERAND (def
, 1), visited
);
893 else if (TREE_CODE (expr
) == CALL_EXPR
)
895 tree decl
= get_callee_fndecl (expr
);
898 if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
899 && DECL_FUNCTION_CODE (decl
) == BUILT_IN_EXPECT
)
901 tree arglist
= TREE_OPERAND (expr
, 1);
904 if (arglist
== NULL_TREE
905 || TREE_CHAIN (arglist
) == NULL_TREE
)
907 val
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr
, 1)));
908 if (TREE_CONSTANT (val
))
910 return TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr
, 1)));
913 if (BINARY_CLASS_P (expr
) || COMPARISON_CLASS_P (expr
))
916 op0
= expr_expected_value (TREE_OPERAND (expr
, 0), visited
);
919 op1
= expr_expected_value (TREE_OPERAND (expr
, 1), visited
);
922 res
= fold_build2 (TREE_CODE (expr
), TREE_TYPE (expr
), op0
, op1
);
923 if (TREE_CONSTANT (res
))
927 if (UNARY_CLASS_P (expr
))
930 op0
= expr_expected_value (TREE_OPERAND (expr
, 0), visited
);
933 res
= fold_build1 (TREE_CODE (expr
), TREE_TYPE (expr
), op0
);
934 if (TREE_CONSTANT (res
))
941 /* Get rid of all builtin_expect calls we no longer need. */
943 strip_builtin_expect (void)
948 block_stmt_iterator bi
;
949 for (bi
= bsi_start (bb
); !bsi_end_p (bi
); bsi_next (&bi
))
951 tree stmt
= bsi_stmt (bi
);
955 if (TREE_CODE (stmt
) == MODIFY_EXPR
956 && TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
957 && (fndecl
= get_callee_fndecl (TREE_OPERAND (stmt
, 1)))
958 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
959 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
960 && (arglist
= TREE_OPERAND (TREE_OPERAND (stmt
, 1), 1))
961 && TREE_CHAIN (arglist
))
963 TREE_OPERAND (stmt
, 1) = TREE_VALUE (arglist
);
970 /* Predict using opcode of the last statement in basic block. */
972 tree_predict_by_opcode (basic_block bb
)
974 tree stmt
= last_stmt (bb
);
983 if (!stmt
|| TREE_CODE (stmt
) != COND_EXPR
)
985 FOR_EACH_EDGE (then_edge
, ei
, bb
->succs
)
986 if (then_edge
->flags
& EDGE_TRUE_VALUE
)
988 cond
= TREE_OPERAND (stmt
, 0);
989 if (!COMPARISON_CLASS_P (cond
))
991 op0
= TREE_OPERAND (cond
, 0);
992 type
= TREE_TYPE (op0
);
993 visited
= BITMAP_ALLOC (NULL
);
994 val
= expr_expected_value (cond
, visited
);
995 BITMAP_FREE (visited
);
998 if (integer_zerop (val
))
999 predict_edge_def (then_edge
, PRED_BUILTIN_EXPECT
, NOT_TAKEN
);
1001 predict_edge_def (then_edge
, PRED_BUILTIN_EXPECT
, TAKEN
);
1004 /* Try "pointer heuristic."
1005 A comparison ptr == 0 is predicted as false.
1006 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1007 if (POINTER_TYPE_P (type
))
1009 if (TREE_CODE (cond
) == EQ_EXPR
)
1010 predict_edge_def (then_edge
, PRED_TREE_POINTER
, NOT_TAKEN
);
1011 else if (TREE_CODE (cond
) == NE_EXPR
)
1012 predict_edge_def (then_edge
, PRED_TREE_POINTER
, TAKEN
);
1016 /* Try "opcode heuristic."
1017 EQ tests are usually false and NE tests are usually true. Also,
1018 most quantities are positive, so we can make the appropriate guesses
1019 about signed comparisons against zero. */
1020 switch (TREE_CODE (cond
))
1024 /* Floating point comparisons appears to behave in a very
1025 unpredictable way because of special role of = tests in
1027 if (FLOAT_TYPE_P (type
))
1029 /* Comparisons with 0 are often used for booleans and there is
1030 nothing useful to predict about them. */
1031 else if (integer_zerop (op0
)
1032 || integer_zerop (TREE_OPERAND (cond
, 1)))
1035 predict_edge_def (then_edge
, PRED_TREE_OPCODE_NONEQUAL
, NOT_TAKEN
);
1040 /* Floating point comparisons appears to behave in a very
1041 unpredictable way because of special role of = tests in
1043 if (FLOAT_TYPE_P (type
))
1045 /* Comparisons with 0 are often used for booleans and there is
1046 nothing useful to predict about them. */
1047 else if (integer_zerop (op0
)
1048 || integer_zerop (TREE_OPERAND (cond
, 1)))
1051 predict_edge_def (then_edge
, PRED_TREE_OPCODE_NONEQUAL
, TAKEN
);
1055 predict_edge_def (then_edge
, PRED_TREE_FPOPCODE
, TAKEN
);
1058 case UNORDERED_EXPR
:
1059 predict_edge_def (then_edge
, PRED_TREE_FPOPCODE
, NOT_TAKEN
);
1064 if (integer_zerop (TREE_OPERAND (cond
, 1))
1065 || integer_onep (TREE_OPERAND (cond
, 1))
1066 || integer_all_onesp (TREE_OPERAND (cond
, 1))
1067 || real_zerop (TREE_OPERAND (cond
, 1))
1068 || real_onep (TREE_OPERAND (cond
, 1))
1069 || real_minus_onep (TREE_OPERAND (cond
, 1)))
1070 predict_edge_def (then_edge
, PRED_TREE_OPCODE_POSITIVE
, NOT_TAKEN
);
1075 if (integer_zerop (TREE_OPERAND (cond
, 1))
1076 || integer_onep (TREE_OPERAND (cond
, 1))
1077 || integer_all_onesp (TREE_OPERAND (cond
, 1))
1078 || real_zerop (TREE_OPERAND (cond
, 1))
1079 || real_onep (TREE_OPERAND (cond
, 1))
1080 || real_minus_onep (TREE_OPERAND (cond
, 1)))
1081 predict_edge_def (then_edge
, PRED_TREE_OPCODE_POSITIVE
, TAKEN
);
1089 /* Try to guess whether the value of return means error code. */
1090 static enum br_predictor
1091 return_prediction (tree val
, enum prediction
*prediction
)
1095 return PRED_NO_PREDICTION
;
1096 /* Different heuristics for pointers and scalars. */
1097 if (POINTER_TYPE_P (TREE_TYPE (val
)))
1099 /* NULL is usually not returned. */
1100 if (integer_zerop (val
))
1102 *prediction
= NOT_TAKEN
;
1103 return PRED_NULL_RETURN
;
1106 else if (INTEGRAL_TYPE_P (TREE_TYPE (val
)))
1108 /* Negative return values are often used to indicate
1110 if (TREE_CODE (val
) == INTEGER_CST
1111 && tree_int_cst_sgn (val
) < 0)
1113 *prediction
= NOT_TAKEN
;
1114 return PRED_NEGATIVE_RETURN
;
1116 /* Constant return values seems to be commonly taken.
1117 Zero/one often represent booleans so exclude them from the
1119 if (TREE_CONSTANT (val
)
1120 && (!integer_zerop (val
) && !integer_onep (val
)))
1122 *prediction
= TAKEN
;
1123 return PRED_NEGATIVE_RETURN
;
1126 return PRED_NO_PREDICTION
;
1129 /* Find the basic block with return expression and look up for possible
1130 return value trying to apply RETURN_PREDICTION heuristics. */
1132 apply_return_prediction (int *heads
)
1134 tree return_stmt
= NULL
;
1138 int phi_num_args
, i
;
1139 enum br_predictor pred
;
1140 enum prediction direction
;
1143 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
1145 return_stmt
= last_stmt (e
->src
);
1146 if (TREE_CODE (return_stmt
) == RETURN_EXPR
)
1151 return_val
= TREE_OPERAND (return_stmt
, 0);
1154 if (TREE_CODE (return_val
) == MODIFY_EXPR
)
1155 return_val
= TREE_OPERAND (return_val
, 1);
1156 if (TREE_CODE (return_val
) != SSA_NAME
1157 || !SSA_NAME_DEF_STMT (return_val
)
1158 || TREE_CODE (SSA_NAME_DEF_STMT (return_val
)) != PHI_NODE
)
1160 for (phi
= SSA_NAME_DEF_STMT (return_val
); phi
; phi
= PHI_CHAIN (phi
))
1161 if (PHI_RESULT (phi
) == return_val
)
1165 phi_num_args
= PHI_NUM_ARGS (phi
);
1166 pred
= return_prediction (PHI_ARG_DEF (phi
, 0), &direction
);
1168 /* Avoid the degenerate case where all return values form the function
1169 belongs to same category (ie they are all positive constants)
1170 so we can hardly say something about them. */
1171 for (i
= 1; i
< phi_num_args
; i
++)
1172 if (pred
!= return_prediction (PHI_ARG_DEF (phi
, i
), &direction
))
1174 if (i
!= phi_num_args
)
1175 for (i
= 0; i
< phi_num_args
; i
++)
1177 pred
= return_prediction (PHI_ARG_DEF (phi
, i
), &direction
);
1178 if (pred
!= PRED_NO_PREDICTION
)
1179 predict_paths_leading_to (PHI_ARG_EDGE (phi
, i
)->src
, heads
, pred
,
1184 /* Look for basic block that contains unlikely to happen events
1185 (such as noreturn calls) and mark all paths leading to execution
1186 of this basic blocks as unlikely. */
1189 tree_bb_level_predictions (void)
1194 heads
= XNEWVEC (int, last_basic_block
);
1195 memset (heads
, ENTRY_BLOCK
, sizeof (int) * last_basic_block
);
1196 heads
[ENTRY_BLOCK_PTR
->next_bb
->index
] = last_basic_block
;
1198 apply_return_prediction (heads
);
1202 block_stmt_iterator bsi
= bsi_last (bb
);
1204 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
1206 tree stmt
= bsi_stmt (bsi
);
1207 switch (TREE_CODE (stmt
))
1210 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
1212 stmt
= TREE_OPERAND (stmt
, 1);
1218 if (call_expr_flags (stmt
) & ECF_NORETURN
)
1219 predict_paths_leading_to (bb
, heads
, PRED_NORETURN
,
1231 /* Predict branch probabilities and estimate profile of the tree CFG. */
1233 tree_estimate_probability (void)
1236 struct loops loops_info
;
1238 flow_loops_find (&loops_info
);
1239 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1240 flow_loops_dump (&loops_info
, dump_file
, NULL
, 0);
1242 add_noreturn_fake_exit_edges ();
1243 connect_infinite_loops_to_exit ();
1244 calculate_dominance_info (CDI_DOMINATORS
);
1245 calculate_dominance_info (CDI_POST_DOMINATORS
);
1247 tree_bb_level_predictions ();
1249 mark_irreducible_loops (&loops_info
);
1250 predict_loops (&loops_info
, false);
1257 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1259 /* Predict early returns to be probable, as we've already taken
1260 care for error returns and other cases are often used for
1261 fast paths through function. */
1262 if (e
->dest
== EXIT_BLOCK_PTR
1263 && TREE_CODE (last_stmt (bb
)) == RETURN_EXPR
1264 && !single_pred_p (bb
))
1269 FOR_EACH_EDGE (e1
, ei1
, bb
->preds
)
1270 if (!predicted_by_p (e1
->src
, PRED_NULL_RETURN
)
1271 && !predicted_by_p (e1
->src
, PRED_CONST_RETURN
)
1272 && !predicted_by_p (e1
->src
, PRED_NEGATIVE_RETURN
)
1273 && !last_basic_block_p (e1
->src
))
1274 predict_edge_def (e1
, PRED_TREE_EARLY_RETURN
, NOT_TAKEN
);
1277 /* Look for block we are guarding (ie we dominate it,
1278 but it doesn't postdominate us). */
1279 if (e
->dest
!= EXIT_BLOCK_PTR
&& e
->dest
!= bb
1280 && dominated_by_p (CDI_DOMINATORS
, e
->dest
, e
->src
)
1281 && !dominated_by_p (CDI_POST_DOMINATORS
, e
->src
, e
->dest
))
1283 block_stmt_iterator bi
;
1285 /* The call heuristic claims that a guarded function call
1286 is improbable. This is because such calls are often used
1287 to signal exceptional situations such as printing error
1289 for (bi
= bsi_start (e
->dest
); !bsi_end_p (bi
);
1292 tree stmt
= bsi_stmt (bi
);
1293 if ((TREE_CODE (stmt
) == CALL_EXPR
1294 || (TREE_CODE (stmt
) == MODIFY_EXPR
1295 && TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
))
1296 /* Constant and pure calls are hardly used to signalize
1297 something exceptional. */
1298 && TREE_SIDE_EFFECTS (stmt
))
1300 predict_edge_def (e
, PRED_CALL
, NOT_TAKEN
);
1306 tree_predict_by_opcode (bb
);
1309 combine_predictions_for_bb (bb
);
1311 strip_builtin_expect ();
1312 estimate_bb_frequencies (&loops_info
);
1313 free_dominance_info (CDI_POST_DOMINATORS
);
1314 remove_fake_exit_edges ();
1315 flow_loops_free (&loops_info
);
1316 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1317 dump_tree_cfg (dump_file
, dump_flags
);
1318 if (profile_status
== PROFILE_ABSENT
)
1319 profile_status
= PROFILE_GUESSED
;
1323 /* __builtin_expect dropped tokens into the insn stream describing expected
1324 values of registers. Generate branch probabilities based off these
1328 expected_value_to_br_prob (void)
1330 rtx insn
, cond
, ev
= NULL_RTX
, ev_reg
= NULL_RTX
;
1332 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1334 switch (GET_CODE (insn
))
1337 /* Look for expected value notes. */
1338 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EXPECTED_VALUE
)
1340 ev
= NOTE_EXPECTED_VALUE (insn
);
1341 ev_reg
= XEXP (ev
, 0);
1347 /* Never propagate across labels. */
1352 /* Look for simple conditional branches. If we haven't got an
1353 expected value yet, no point going further. */
1354 if (!JUMP_P (insn
) || ev
== NULL_RTX
1355 || ! any_condjump_p (insn
))
1360 /* Look for insns that clobber the EV register. */
1361 if (ev
&& reg_set_p (ev_reg
, insn
))
1366 /* Collect the branch condition, hopefully relative to EV_REG. */
1367 /* ??? At present we'll miss things like
1368 (expected_value (eq r70 0))
1370 (set r80 (lt r70 r71))
1371 (set pc (if_then_else (ne r80 0) ...))
1372 as canonicalize_condition will render this to us as
1374 Could use cselib to try and reduce this further. */
1375 cond
= XEXP (SET_SRC (pc_set (insn
)), 0);
1376 cond
= canonicalize_condition (insn
, cond
, 0, NULL
, ev_reg
,
1378 if (! cond
|| XEXP (cond
, 0) != ev_reg
1379 || GET_CODE (XEXP (cond
, 1)) != CONST_INT
)
1382 /* Substitute and simplify. Given that the expression we're
1383 building involves two constants, we should wind up with either
1385 cond
= gen_rtx_fmt_ee (GET_CODE (cond
), VOIDmode
,
1386 XEXP (ev
, 1), XEXP (cond
, 1));
1387 cond
= simplify_rtx (cond
);
1389 /* Turn the condition into a scaled branch probability. */
1390 gcc_assert (cond
== const_true_rtx
|| cond
== const0_rtx
);
1391 predict_insn_def (insn
, PRED_BUILTIN_EXPECT
,
1392 cond
== const_true_rtx
? TAKEN
: NOT_TAKEN
);
1396 /* Check whether this is the last basic block of function. Commonly
1397 there is one extra common cleanup block. */
1399 last_basic_block_p (basic_block bb
)
1401 if (bb
== EXIT_BLOCK_PTR
)
1404 return (bb
->next_bb
== EXIT_BLOCK_PTR
1405 || (bb
->next_bb
->next_bb
== EXIT_BLOCK_PTR
1406 && single_succ_p (bb
)
1407 && single_succ (bb
)->next_bb
== EXIT_BLOCK_PTR
));
1410 /* Sets branch probabilities according to PREDiction and
1411 FLAGS. HEADS[bb->index] should be index of basic block in that we
1412 need to alter branch predictions (i.e. the first of our dominators
1413 such that we do not post-dominate it) (but we fill this information
1414 on demand, so -1 may be there in case this was not needed yet). */
1417 predict_paths_leading_to (basic_block bb
, int *heads
, enum br_predictor pred
,
1418 enum prediction taken
)
1424 if (heads
[bb
->index
] == ENTRY_BLOCK
)
1426 /* This is first time we need this field in heads array; so
1427 find first dominator that we do not post-dominate (we are
1428 using already known members of heads array). */
1429 basic_block ai
= bb
;
1430 basic_block next_ai
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
1433 while (heads
[next_ai
->index
] == ENTRY_BLOCK
)
1435 if (!dominated_by_p (CDI_POST_DOMINATORS
, next_ai
, bb
))
1437 heads
[next_ai
->index
] = ai
->index
;
1439 next_ai
= get_immediate_dominator (CDI_DOMINATORS
, next_ai
);
1441 if (!dominated_by_p (CDI_POST_DOMINATORS
, next_ai
, bb
))
1442 head
= next_ai
->index
;
1444 head
= heads
[next_ai
->index
];
1445 while (next_ai
!= bb
)
1448 ai
= BASIC_BLOCK (heads
[ai
->index
]);
1449 heads
[next_ai
->index
] = head
;
1452 y
= heads
[bb
->index
];
1454 /* Now find the edge that leads to our branch and aply the prediction. */
1456 if (y
== last_basic_block
)
1458 FOR_EACH_EDGE (e
, ei
, BASIC_BLOCK (y
)->succs
)
1459 if (e
->dest
->index
>= NUM_FIXED_BLOCKS
1460 && dominated_by_p (CDI_POST_DOMINATORS
, e
->dest
, bb
))
1461 predict_edge_def (e
, pred
, taken
);
1464 /* This is used to carry information about basic blocks. It is
1465 attached to the AUX field of the standard CFG block. */
1467 typedef struct block_info_def
1469 /* Estimated frequency of execution of basic_block. */
1472 /* To keep queue of basic blocks to process. */
1475 /* Number of predecessors we need to visit first. */
1479 /* Similar information for edges. */
1480 typedef struct edge_info_def
1482 /* In case edge is a loopback edge, the probability edge will be reached
1483 in case header is. Estimated number of iterations of the loop can be
1484 then computed as 1 / (1 - back_edge_prob). */
1485 sreal back_edge_prob
;
1486 /* True if the edge is a loopback edge in the natural loop. */
1487 unsigned int back_edge
:1;
1490 #define BLOCK_INFO(B) ((block_info) (B)->aux)
1491 #define EDGE_INFO(E) ((edge_info) (E)->aux)
1493 /* Helper function for estimate_bb_frequencies.
1494 Propagate the frequencies for LOOP. */
1497 propagate_freq (struct loop
*loop
, bitmap tovisit
)
1499 basic_block head
= loop
->header
;
1507 /* For each basic block we need to visit count number of his predecessors
1508 we need to visit first. */
1509 EXECUTE_IF_SET_IN_BITMAP (tovisit
, 0, i
, bi
)
1514 /* The outermost "loop" includes the exit block, which we can not
1515 look up via BASIC_BLOCK. Detect this and use EXIT_BLOCK_PTR
1516 directly. Do the same for the entry block. */
1517 bb
= BASIC_BLOCK (i
);
1519 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1521 bool visit
= bitmap_bit_p (tovisit
, e
->src
->index
);
1523 if (visit
&& !(e
->flags
& EDGE_DFS_BACK
))
1525 else if (visit
&& dump_file
&& !EDGE_INFO (e
)->back_edge
)
1527 "Irreducible region hit, ignoring edge to %i->%i\n",
1528 e
->src
->index
, bb
->index
);
1530 BLOCK_INFO (bb
)->npredecessors
= count
;
1533 memcpy (&BLOCK_INFO (head
)->frequency
, &real_one
, sizeof (real_one
));
1535 for (bb
= head
; bb
; bb
= nextbb
)
1538 sreal cyclic_probability
, frequency
;
1540 memcpy (&cyclic_probability
, &real_zero
, sizeof (real_zero
));
1541 memcpy (&frequency
, &real_zero
, sizeof (real_zero
));
1543 nextbb
= BLOCK_INFO (bb
)->next
;
1544 BLOCK_INFO (bb
)->next
= NULL
;
1546 /* Compute frequency of basic block. */
1549 #ifdef ENABLE_CHECKING
1550 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1551 gcc_assert (!bitmap_bit_p (tovisit
, e
->src
->index
)
1552 || (e
->flags
& EDGE_DFS_BACK
));
1555 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1556 if (EDGE_INFO (e
)->back_edge
)
1558 sreal_add (&cyclic_probability
, &cyclic_probability
,
1559 &EDGE_INFO (e
)->back_edge_prob
);
1561 else if (!(e
->flags
& EDGE_DFS_BACK
))
1565 /* frequency += (e->probability
1566 * BLOCK_INFO (e->src)->frequency /
1567 REG_BR_PROB_BASE); */
1569 sreal_init (&tmp
, e
->probability
, 0);
1570 sreal_mul (&tmp
, &tmp
, &BLOCK_INFO (e
->src
)->frequency
);
1571 sreal_mul (&tmp
, &tmp
, &real_inv_br_prob_base
);
1572 sreal_add (&frequency
, &frequency
, &tmp
);
1575 if (sreal_compare (&cyclic_probability
, &real_zero
) == 0)
1577 memcpy (&BLOCK_INFO (bb
)->frequency
, &frequency
,
1578 sizeof (frequency
));
1582 if (sreal_compare (&cyclic_probability
, &real_almost_one
) > 0)
1584 memcpy (&cyclic_probability
, &real_almost_one
,
1585 sizeof (real_almost_one
));
1588 /* BLOCK_INFO (bb)->frequency = frequency
1589 / (1 - cyclic_probability) */
1591 sreal_sub (&cyclic_probability
, &real_one
, &cyclic_probability
);
1592 sreal_div (&BLOCK_INFO (bb
)->frequency
,
1593 &frequency
, &cyclic_probability
);
1597 bitmap_clear_bit (tovisit
, bb
->index
);
1599 e
= find_edge (bb
, head
);
1604 /* EDGE_INFO (e)->back_edge_prob
1605 = ((e->probability * BLOCK_INFO (bb)->frequency)
1606 / REG_BR_PROB_BASE); */
1608 sreal_init (&tmp
, e
->probability
, 0);
1609 sreal_mul (&tmp
, &tmp
, &BLOCK_INFO (bb
)->frequency
);
1610 sreal_mul (&EDGE_INFO (e
)->back_edge_prob
,
1611 &tmp
, &real_inv_br_prob_base
);
1614 /* Propagate to successor blocks. */
1615 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1616 if (!(e
->flags
& EDGE_DFS_BACK
)
1617 && BLOCK_INFO (e
->dest
)->npredecessors
)
1619 BLOCK_INFO (e
->dest
)->npredecessors
--;
1620 if (!BLOCK_INFO (e
->dest
)->npredecessors
)
1625 BLOCK_INFO (last
)->next
= e
->dest
;
1633 /* Estimate probabilities of loopback edges in loops at same nest level. */
1636 estimate_loops_at_level (struct loop
*first_loop
, bitmap tovisit
)
1640 for (loop
= first_loop
; loop
; loop
= loop
->next
)
1646 estimate_loops_at_level (loop
->inner
, tovisit
);
1648 /* Do not do this for dummy function loop. */
1649 if (EDGE_COUNT (loop
->latch
->succs
) > 0)
1651 /* Find current loop back edge and mark it. */
1652 e
= loop_latch_edge (loop
);
1653 EDGE_INFO (e
)->back_edge
= 1;
1656 bbs
= get_loop_body (loop
);
1657 for (i
= 0; i
< loop
->num_nodes
; i
++)
1658 bitmap_set_bit (tovisit
, bbs
[i
]->index
);
1660 propagate_freq (loop
, tovisit
);
1664 /* Convert counts measured by profile driven feedback to frequencies.
1665 Return nonzero iff there was any nonzero execution count. */
1668 counts_to_freqs (void)
1670 gcov_type count_max
, true_count_max
= 0;
1674 true_count_max
= MAX (bb
->count
, true_count_max
);
1676 count_max
= MAX (true_count_max
, 1);
1677 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1678 bb
->frequency
= (bb
->count
* BB_FREQ_MAX
+ count_max
/ 2) / count_max
;
1679 return true_count_max
;
1682 /* Return true if function is likely to be expensive, so there is no point to
1683 optimize performance of prologue, epilogue or do inlining at the expense
1684 of code size growth. THRESHOLD is the limit of number of instructions
1685 function can execute at average to be still considered not expensive. */
1688 expensive_function_p (int threshold
)
1690 unsigned int sum
= 0;
1694 /* We can not compute accurately for large thresholds due to scaled
1696 gcc_assert (threshold
<= BB_FREQ_MAX
);
1698 /* Frequencies are out of range. This either means that function contains
1699 internal loop executing more than BB_FREQ_MAX times or profile feedback
1700 is available and function has not been executed at all. */
1701 if (ENTRY_BLOCK_PTR
->frequency
== 0)
1704 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
1705 limit
= ENTRY_BLOCK_PTR
->frequency
* threshold
;
1710 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
1711 insn
= NEXT_INSN (insn
))
1712 if (active_insn_p (insn
))
1714 sum
+= bb
->frequency
;
1723 /* Estimate basic blocks frequency by given branch probabilities. */
1726 estimate_bb_frequencies (struct loops
*loops
)
1731 if (!flag_branch_probabilities
|| !counts_to_freqs ())
1733 static int real_values_initialized
= 0;
1736 if (!real_values_initialized
)
1738 real_values_initialized
= 1;
1739 sreal_init (&real_zero
, 0, 0);
1740 sreal_init (&real_one
, 1, 0);
1741 sreal_init (&real_br_prob_base
, REG_BR_PROB_BASE
, 0);
1742 sreal_init (&real_bb_freq_max
, BB_FREQ_MAX
, 0);
1743 sreal_init (&real_one_half
, 1, -1);
1744 sreal_div (&real_inv_br_prob_base
, &real_one
, &real_br_prob_base
);
1745 sreal_sub (&real_almost_one
, &real_one
, &real_inv_br_prob_base
);
1748 mark_dfs_back_edges ();
1750 single_succ_edge (ENTRY_BLOCK_PTR
)->probability
= REG_BR_PROB_BASE
;
1752 /* Set up block info for each basic block. */
1753 tovisit
= BITMAP_ALLOC (NULL
);
1754 alloc_aux_for_blocks (sizeof (struct block_info_def
));
1755 alloc_aux_for_edges (sizeof (struct edge_info_def
));
1756 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1761 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1763 sreal_init (&EDGE_INFO (e
)->back_edge_prob
, e
->probability
, 0);
1764 sreal_mul (&EDGE_INFO (e
)->back_edge_prob
,
1765 &EDGE_INFO (e
)->back_edge_prob
,
1766 &real_inv_br_prob_base
);
1770 /* First compute probabilities locally for each loop from innermost
1771 to outermost to examine probabilities for back edges. */
1772 estimate_loops_at_level (loops
->tree_root
, tovisit
);
1774 memcpy (&freq_max
, &real_zero
, sizeof (real_zero
));
1776 if (sreal_compare (&freq_max
, &BLOCK_INFO (bb
)->frequency
) < 0)
1777 memcpy (&freq_max
, &BLOCK_INFO (bb
)->frequency
, sizeof (freq_max
));
1779 sreal_div (&freq_max
, &real_bb_freq_max
, &freq_max
);
1780 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1784 sreal_mul (&tmp
, &BLOCK_INFO (bb
)->frequency
, &freq_max
);
1785 sreal_add (&tmp
, &tmp
, &real_one_half
);
1786 bb
->frequency
= sreal_to_int (&tmp
);
1789 free_aux_for_blocks ();
1790 free_aux_for_edges ();
1791 BITMAP_FREE (tovisit
);
1793 compute_function_frequency ();
1794 if (flag_reorder_functions
)
1795 choose_function_section ();
1798 /* Decide whether function is hot, cold or unlikely executed. */
1800 compute_function_frequency (void)
1804 if (!profile_info
|| !flag_branch_probabilities
)
1806 cfun
->function_frequency
= FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
;
1809 if (maybe_hot_bb_p (bb
))
1811 cfun
->function_frequency
= FUNCTION_FREQUENCY_HOT
;
1814 if (!probably_never_executed_bb_p (bb
))
1815 cfun
->function_frequency
= FUNCTION_FREQUENCY_NORMAL
;
1819 /* Choose appropriate section for the function. */
1821 choose_function_section (void)
1823 if (DECL_SECTION_NAME (current_function_decl
)
1824 || !targetm
.have_named_sections
1825 /* Theoretically we can split the gnu.linkonce text section too,
1826 but this requires more work as the frequency needs to match
1827 for all generated objects so we need to merge the frequency
1828 of all instances. For now just never set frequency for these. */
1829 || DECL_ONE_ONLY (current_function_decl
))
1832 /* If we are doing the partitioning optimization, let the optimization
1833 choose the correct section into which to put things. */
1835 if (flag_reorder_blocks_and_partition
)
1838 if (cfun
->function_frequency
== FUNCTION_FREQUENCY_HOT
)
1839 DECL_SECTION_NAME (current_function_decl
) =
1840 build_string (strlen (HOT_TEXT_SECTION_NAME
), HOT_TEXT_SECTION_NAME
);
1841 if (cfun
->function_frequency
== FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
)
1842 DECL_SECTION_NAME (current_function_decl
) =
1843 build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME
),
1844 UNLIKELY_EXECUTED_TEXT_SECTION_NAME
);
1848 gate_estimate_probability (void)
1850 return flag_guess_branch_prob
;
1853 struct tree_opt_pass pass_profile
=
1855 "profile", /* name */
1856 gate_estimate_probability
, /* gate */
1857 tree_estimate_probability
, /* execute */
1860 0, /* static_pass_number */
1861 TV_BRANCH_PROB
, /* tv_id */
1862 PROP_cfg
, /* properties_required */
1863 0, /* properties_provided */
1864 0, /* properties_destroyed */
1865 0, /* todo_flags_start */
1866 TODO_ggc_collect
| TODO_verify_ssa
, /* todo_flags_finish */