1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 [1] "Branch Prediction for Free"
25 Ball and Larus; PLDI '93.
26 [2] "Static Branch Frequency and Program Profile Analysis"
27 Wu and Larus; MICRO-27.
28 [3] "Corpus-based Static Branch Prediction"
29 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
34 #include "coretypes.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
41 #include "insn-config.h"
56 #include "tree-flow.h"
58 #include "tree-dump.h"
59 #include "tree-pass.h"
61 #include "tree-scalar-evolution.h"
64 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
65 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
66 static sreal real_zero
, real_one
, real_almost_one
, real_br_prob_base
,
67 real_inv_br_prob_base
, real_one_half
, real_bb_freq_max
;
69 /* Random guesstimation given names. */
70 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 100 - 1)
71 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
72 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
73 #define PROB_ALWAYS (REG_BR_PROB_BASE)
75 static void combine_predictions_for_insn (rtx
, basic_block
);
76 static void dump_prediction (FILE *, enum br_predictor
, int, basic_block
, int);
77 static void estimate_loops_at_level (struct loop
*, bitmap
);
78 static void propagate_freq (struct loop
*, bitmap
);
79 static void estimate_bb_frequencies (struct loops
*);
80 static void predict_paths_leading_to (basic_block
, int *, enum br_predictor
, enum prediction
);
81 static bool last_basic_block_p (basic_block
);
82 static void compute_function_frequency (void);
83 static void choose_function_section (void);
84 static bool can_predict_insn_p (rtx
);
86 /* Information we hold about each branch predictor.
87 Filled using information from predict.def. */
91 const char *const name
; /* Name used in the debugging dumps. */
92 const int hitrate
; /* Expected hitrate used by
93 predict_insn_def call. */
97 /* Use given predictor without Dempster-Shaffer theory if it matches
98 using first_match heuristics. */
99 #define PRED_FLAG_FIRST_MATCH 1
101 /* Recompute hitrate in percent to our representation. */
103 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
105 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
106 static const struct predictor_info predictor_info
[]= {
107 #include "predict.def"
109 /* Upper bound on predictors. */
114 /* Return true in case BB can be CPU intensive and should be optimized
115 for maximal performance. */
118 maybe_hot_bb_p (basic_block bb
)
120 if (profile_info
&& flag_branch_probabilities
122 < profile_info
->sum_max
/ PARAM_VALUE (HOT_BB_COUNT_FRACTION
)))
124 if (bb
->frequency
< BB_FREQ_MAX
/ PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION
))
129 /* Return true in case BB is cold and should be optimized for size. */
132 probably_cold_bb_p (basic_block bb
)
134 if (profile_info
&& flag_branch_probabilities
136 < profile_info
->sum_max
/ PARAM_VALUE (HOT_BB_COUNT_FRACTION
)))
138 if (bb
->frequency
< BB_FREQ_MAX
/ PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION
))
143 /* Return true in case BB is probably never executed. */
145 probably_never_executed_bb_p (basic_block bb
)
147 if (profile_info
&& flag_branch_probabilities
)
148 return ((bb
->count
+ profile_info
->runs
/ 2) / profile_info
->runs
) == 0;
152 /* Return true if the one of outgoing edges is already predicted by
156 rtl_predicted_by_p (basic_block bb
, enum br_predictor predictor
)
159 if (!INSN_P (BB_END (bb
)))
161 for (note
= REG_NOTES (BB_END (bb
)); note
; note
= XEXP (note
, 1))
162 if (REG_NOTE_KIND (note
) == REG_BR_PRED
163 && INTVAL (XEXP (XEXP (note
, 0), 0)) == (int)predictor
)
168 /* Return true if the one of outgoing edges is already predicted by
172 tree_predicted_by_p (basic_block bb
, enum br_predictor predictor
)
174 struct edge_prediction
*i
;
175 for (i
= bb
->predictions
; i
; i
= i
->ep_next
)
176 if (i
->ep_predictor
== predictor
)
182 predict_insn (rtx insn
, enum br_predictor predictor
, int probability
)
184 gcc_assert (any_condjump_p (insn
));
185 if (!flag_guess_branch_prob
)
189 = gen_rtx_EXPR_LIST (REG_BR_PRED
,
190 gen_rtx_CONCAT (VOIDmode
,
191 GEN_INT ((int) predictor
),
192 GEN_INT ((int) probability
)),
196 /* Predict insn by given predictor. */
199 predict_insn_def (rtx insn
, enum br_predictor predictor
,
200 enum prediction taken
)
202 int probability
= predictor_info
[(int) predictor
].hitrate
;
205 probability
= REG_BR_PROB_BASE
- probability
;
207 predict_insn (insn
, predictor
, probability
);
210 /* Predict edge E with given probability if possible. */
213 rtl_predict_edge (edge e
, enum br_predictor predictor
, int probability
)
216 last_insn
= BB_END (e
->src
);
218 /* We can store the branch prediction information only about
219 conditional jumps. */
220 if (!any_condjump_p (last_insn
))
223 /* We always store probability of branching. */
224 if (e
->flags
& EDGE_FALLTHRU
)
225 probability
= REG_BR_PROB_BASE
- probability
;
227 predict_insn (last_insn
, predictor
, probability
);
230 /* Predict edge E with the given PROBABILITY. */
232 tree_predict_edge (edge e
, enum br_predictor predictor
, int probability
)
234 gcc_assert (profile_status
!= PROFILE_GUESSED
);
235 if ((e
->src
!= ENTRY_BLOCK_PTR
&& EDGE_COUNT (e
->src
->succs
) > 1)
236 && flag_guess_branch_prob
&& optimize
)
238 struct edge_prediction
*i
= ggc_alloc (sizeof (struct edge_prediction
));
240 i
->ep_next
= e
->src
->predictions
;
241 e
->src
->predictions
= i
;
242 i
->ep_probability
= probability
;
243 i
->ep_predictor
= predictor
;
248 /* Remove all predictions on given basic block that are attached
251 remove_predictions_associated_with_edge (edge e
)
253 if (e
->src
->predictions
)
255 struct edge_prediction
**prediction
= &e
->src
->predictions
;
258 if ((*prediction
)->ep_edge
== e
)
259 *prediction
= (*prediction
)->ep_next
;
261 prediction
= &((*prediction
)->ep_next
);
266 /* Return true when we can store prediction on insn INSN.
267 At the moment we represent predictions only on conditional
268 jumps, not at computed jump or other complicated cases. */
270 can_predict_insn_p (rtx insn
)
272 return (JUMP_P (insn
)
273 && any_condjump_p (insn
)
274 && EDGE_COUNT (BLOCK_FOR_INSN (insn
)->succs
) >= 2);
277 /* Predict edge E by given predictor if possible. */
280 predict_edge_def (edge e
, enum br_predictor predictor
,
281 enum prediction taken
)
283 int probability
= predictor_info
[(int) predictor
].hitrate
;
286 probability
= REG_BR_PROB_BASE
- probability
;
288 predict_edge (e
, predictor
, probability
);
291 /* Invert all branch predictions or probability notes in the INSN. This needs
292 to be done each time we invert the condition used by the jump. */
295 invert_br_probabilities (rtx insn
)
299 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
300 if (REG_NOTE_KIND (note
) == REG_BR_PROB
)
301 XEXP (note
, 0) = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (note
, 0)));
302 else if (REG_NOTE_KIND (note
) == REG_BR_PRED
)
303 XEXP (XEXP (note
, 0), 1)
304 = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (XEXP (note
, 0), 1)));
307 /* Dump information about the branch prediction to the output file. */
310 dump_prediction (FILE *file
, enum br_predictor predictor
, int probability
,
311 basic_block bb
, int used
)
319 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
320 if (! (e
->flags
& EDGE_FALLTHRU
))
323 fprintf (file
, " %s heuristics%s: %.1f%%",
324 predictor_info
[predictor
].name
,
325 used
? "" : " (ignored)", probability
* 100.0 / REG_BR_PROB_BASE
);
329 fprintf (file
, " exec ");
330 fprintf (file
, HOST_WIDEST_INT_PRINT_DEC
, bb
->count
);
333 fprintf (file
, " hit ");
334 fprintf (file
, HOST_WIDEST_INT_PRINT_DEC
, e
->count
);
335 fprintf (file
, " (%.1f%%)", e
->count
* 100.0 / bb
->count
);
339 fprintf (file
, "\n");
342 /* We can not predict the probabilities of outgoing edges of bb. Set them
343 evenly and hope for the best. */
345 set_even_probabilities (basic_block bb
)
351 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
352 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
354 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
355 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
356 e
->probability
= (REG_BR_PROB_BASE
+ nedges
/ 2) / nedges
;
361 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
362 note if not already present. Remove now useless REG_BR_PRED notes. */
365 combine_predictions_for_insn (rtx insn
, basic_block bb
)
370 int best_probability
= PROB_EVEN
;
371 int best_predictor
= END_PREDICTORS
;
372 int combined_probability
= REG_BR_PROB_BASE
/ 2;
374 bool first_match
= false;
377 if (!can_predict_insn_p (insn
))
379 set_even_probabilities (bb
);
383 prob_note
= find_reg_note (insn
, REG_BR_PROB
, 0);
384 pnote
= ®_NOTES (insn
);
386 fprintf (dump_file
, "Predictions for insn %i bb %i\n", INSN_UID (insn
),
389 /* We implement "first match" heuristics and use probability guessed
390 by predictor with smallest index. */
391 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
392 if (REG_NOTE_KIND (note
) == REG_BR_PRED
)
394 int predictor
= INTVAL (XEXP (XEXP (note
, 0), 0));
395 int probability
= INTVAL (XEXP (XEXP (note
, 0), 1));
398 if (best_predictor
> predictor
)
399 best_probability
= probability
, best_predictor
= predictor
;
401 d
= (combined_probability
* probability
402 + (REG_BR_PROB_BASE
- combined_probability
)
403 * (REG_BR_PROB_BASE
- probability
));
405 /* Use FP math to avoid overflows of 32bit integers. */
407 /* If one probability is 0% and one 100%, avoid division by zero. */
408 combined_probability
= REG_BR_PROB_BASE
/ 2;
410 combined_probability
= (((double) combined_probability
) * probability
411 * REG_BR_PROB_BASE
/ d
+ 0.5);
414 /* Decide which heuristic to use. In case we didn't match anything,
415 use no_prediction heuristic, in case we did match, use either
416 first match or Dempster-Shaffer theory depending on the flags. */
418 if (predictor_info
[best_predictor
].flags
& PRED_FLAG_FIRST_MATCH
)
422 dump_prediction (dump_file
, PRED_NO_PREDICTION
,
423 combined_probability
, bb
, true);
426 dump_prediction (dump_file
, PRED_DS_THEORY
, combined_probability
,
428 dump_prediction (dump_file
, PRED_FIRST_MATCH
, best_probability
,
433 combined_probability
= best_probability
;
434 dump_prediction (dump_file
, PRED_COMBINED
, combined_probability
, bb
, true);
438 if (REG_NOTE_KIND (*pnote
) == REG_BR_PRED
)
440 int predictor
= INTVAL (XEXP (XEXP (*pnote
, 0), 0));
441 int probability
= INTVAL (XEXP (XEXP (*pnote
, 0), 1));
443 dump_prediction (dump_file
, predictor
, probability
, bb
,
444 !first_match
|| best_predictor
== predictor
);
445 *pnote
= XEXP (*pnote
, 1);
448 pnote
= &XEXP (*pnote
, 1);
454 = gen_rtx_EXPR_LIST (REG_BR_PROB
,
455 GEN_INT (combined_probability
), REG_NOTES (insn
));
457 /* Save the prediction into CFG in case we are seeing non-degenerated
459 if (!single_succ_p (bb
))
461 BRANCH_EDGE (bb
)->probability
= combined_probability
;
462 FALLTHRU_EDGE (bb
)->probability
463 = REG_BR_PROB_BASE
- combined_probability
;
466 else if (!single_succ_p (bb
))
468 int prob
= INTVAL (XEXP (prob_note
, 0));
470 BRANCH_EDGE (bb
)->probability
= prob
;
471 FALLTHRU_EDGE (bb
)->probability
= REG_BR_PROB_BASE
- prob
;
474 single_succ_edge (bb
)->probability
= REG_BR_PROB_BASE
;
477 /* Combine predictions into single probability and store them into CFG.
478 Remove now useless prediction entries. */
481 combine_predictions_for_bb (basic_block bb
)
483 int best_probability
= PROB_EVEN
;
484 int best_predictor
= END_PREDICTORS
;
485 int combined_probability
= REG_BR_PROB_BASE
/ 2;
487 bool first_match
= false;
489 struct edge_prediction
*pred
;
491 edge e
, first
= NULL
, second
= NULL
;
494 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
495 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
498 if (first
&& !second
)
504 /* When there is no successor or only one choice, prediction is easy.
506 We are lazy for now and predict only basic blocks with two outgoing
507 edges. It is possible to predict generic case too, but we have to
508 ignore first match heuristics and do more involved combining. Implement
513 set_even_probabilities (bb
);
514 bb
->predictions
= NULL
;
516 fprintf (dump_file
, "%i edges in bb %i predicted to even probabilities\n",
522 fprintf (dump_file
, "Predictions for bb %i\n", bb
->index
);
524 /* We implement "first match" heuristics and use probability guessed
525 by predictor with smallest index. */
526 for (pred
= bb
->predictions
; pred
; pred
= pred
->ep_next
)
528 int predictor
= pred
->ep_predictor
;
529 int probability
= pred
->ep_probability
;
531 if (pred
->ep_edge
!= first
)
532 probability
= REG_BR_PROB_BASE
- probability
;
535 if (best_predictor
> predictor
)
536 best_probability
= probability
, best_predictor
= predictor
;
538 d
= (combined_probability
* probability
539 + (REG_BR_PROB_BASE
- combined_probability
)
540 * (REG_BR_PROB_BASE
- probability
));
542 /* Use FP math to avoid overflows of 32bit integers. */
544 /* If one probability is 0% and one 100%, avoid division by zero. */
545 combined_probability
= REG_BR_PROB_BASE
/ 2;
547 combined_probability
= (((double) combined_probability
) * probability
548 * REG_BR_PROB_BASE
/ d
+ 0.5);
551 /* Decide which heuristic to use. In case we didn't match anything,
552 use no_prediction heuristic, in case we did match, use either
553 first match or Dempster-Shaffer theory depending on the flags. */
555 if (predictor_info
[best_predictor
].flags
& PRED_FLAG_FIRST_MATCH
)
559 dump_prediction (dump_file
, PRED_NO_PREDICTION
, combined_probability
, bb
, true);
562 dump_prediction (dump_file
, PRED_DS_THEORY
, combined_probability
, bb
,
564 dump_prediction (dump_file
, PRED_FIRST_MATCH
, best_probability
, bb
,
569 combined_probability
= best_probability
;
570 dump_prediction (dump_file
, PRED_COMBINED
, combined_probability
, bb
, true);
572 for (pred
= bb
->predictions
; pred
; pred
= pred
->ep_next
)
574 int predictor
= pred
->ep_predictor
;
575 int probability
= pred
->ep_probability
;
577 if (pred
->ep_edge
!= EDGE_SUCC (bb
, 0))
578 probability
= REG_BR_PROB_BASE
- probability
;
579 dump_prediction (dump_file
, predictor
, probability
, bb
,
580 !first_match
|| best_predictor
== predictor
);
582 bb
->predictions
= NULL
;
586 first
->probability
= combined_probability
;
587 second
->probability
= REG_BR_PROB_BASE
- combined_probability
;
591 /* Predict edge probabilities by exploiting loop structure.
592 When RTLSIMPLELOOPS is set, attempt to count number of iterations by analyzing
593 RTL otherwise use tree based approach. */
595 predict_loops (struct loops
*loops_info
, bool rtlsimpleloops
)
600 scev_initialize (loops_info
);
602 /* Try to predict out blocks in a loop that are not part of a
604 for (i
= 1; i
< loops_info
->num
; i
++)
606 basic_block bb
, *bbs
;
609 struct loop
*loop
= loops_info
->parray
[i
];
610 struct niter_desc desc
;
611 unsigned HOST_WIDE_INT niter
;
614 exits
= get_loop_exit_edges (loop
, &n_exits
);
618 iv_analysis_loop_init (loop
);
619 find_simple_exit (loop
, &desc
);
621 if (desc
.simple_p
&& desc
.const_iter
)
624 niter
= desc
.niter
+ 1;
625 if (niter
== 0) /* We might overflow here. */
628 > (unsigned int) PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS
))
629 niter
= PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS
);
631 prob
= (REG_BR_PROB_BASE
632 - (REG_BR_PROB_BASE
+ niter
/2) / niter
);
633 /* Branch prediction algorithm gives 0 frequency for everything
634 after the end of loop for loop having 0 probability to finish. */
635 if (prob
== REG_BR_PROB_BASE
)
636 prob
= REG_BR_PROB_BASE
- 1;
637 predict_edge (desc
.in_edge
, PRED_LOOP_ITERATIONS
,
643 struct tree_niter_desc niter_desc
;
645 for (j
= 0; j
< n_exits
; j
++)
649 if (number_of_iterations_exit (loop
, exits
[j
], &niter_desc
, false))
650 niter
= niter_desc
.niter
;
651 if (!niter
|| TREE_CODE (niter_desc
.niter
) != INTEGER_CST
)
652 niter
= loop_niter_by_eval (loop
, exits
[j
]);
654 if (TREE_CODE (niter
) == INTEGER_CST
)
657 int max
= PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS
);
658 if (host_integerp (niter
, 1)
659 && tree_int_cst_lt (niter
,
660 build_int_cstu (NULL_TREE
, max
- 1)))
662 HOST_WIDE_INT nitercst
= tree_low_cst (niter
, 1) + 1;
663 probability
= ((REG_BR_PROB_BASE
+ nitercst
/ 2)
667 probability
= ((REG_BR_PROB_BASE
+ max
/ 2) / max
);
669 predict_edge (exits
[j
], PRED_LOOP_ITERATIONS
, probability
);
676 bbs
= get_loop_body (loop
);
678 for (j
= 0; j
< loop
->num_nodes
; j
++)
680 int header_found
= 0;
686 /* Bypass loop heuristics on continue statement. These
687 statements construct loops via "non-loop" constructs
688 in the source language and are better to be handled
690 if ((rtlsimpleloops
&& !can_predict_insn_p (BB_END (bb
)))
691 || predicted_by_p (bb
, PRED_CONTINUE
))
694 /* Loop branch heuristics - predict an edge back to a
695 loop's head as taken. */
696 if (bb
== loop
->latch
)
698 e
= find_edge (loop
->latch
, loop
->header
);
702 predict_edge_def (e
, PRED_LOOP_BRANCH
, TAKEN
);
706 /* Loop exit heuristics - predict an edge exiting the loop if the
707 conditional has no loop header successors as not taken. */
709 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
710 if (e
->dest
->index
< NUM_FIXED_BLOCKS
711 || !flow_bb_inside_loop_p (loop
, e
->dest
))
715 - predictor_info
[(int) PRED_LOOP_EXIT
].hitrate
)
719 /* Free basic blocks from get_loop_body. */
726 current_loops
= NULL
;
730 /* Attempt to predict probabilities of BB outgoing edges using local
733 bb_estimate_probability_locally (basic_block bb
)
735 rtx last_insn
= BB_END (bb
);
738 if (! can_predict_insn_p (last_insn
))
740 cond
= get_condition (last_insn
, NULL
, false, false);
744 /* Try "pointer heuristic."
745 A comparison ptr == 0 is predicted as false.
746 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
747 if (COMPARISON_P (cond
)
748 && ((REG_P (XEXP (cond
, 0)) && REG_POINTER (XEXP (cond
, 0)))
749 || (REG_P (XEXP (cond
, 1)) && REG_POINTER (XEXP (cond
, 1)))))
751 if (GET_CODE (cond
) == EQ
)
752 predict_insn_def (last_insn
, PRED_POINTER
, NOT_TAKEN
);
753 else if (GET_CODE (cond
) == NE
)
754 predict_insn_def (last_insn
, PRED_POINTER
, TAKEN
);
758 /* Try "opcode heuristic."
759 EQ tests are usually false and NE tests are usually true. Also,
760 most quantities are positive, so we can make the appropriate guesses
761 about signed comparisons against zero. */
762 switch (GET_CODE (cond
))
765 /* Unconditional branch. */
766 predict_insn_def (last_insn
, PRED_UNCONDITIONAL
,
767 cond
== const0_rtx
? NOT_TAKEN
: TAKEN
);
772 /* Floating point comparisons appears to behave in a very
773 unpredictable way because of special role of = tests in
775 if (FLOAT_MODE_P (GET_MODE (XEXP (cond
, 0))))
777 /* Comparisons with 0 are often used for booleans and there is
778 nothing useful to predict about them. */
779 else if (XEXP (cond
, 1) == const0_rtx
780 || XEXP (cond
, 0) == const0_rtx
)
783 predict_insn_def (last_insn
, PRED_OPCODE_NONEQUAL
, NOT_TAKEN
);
788 /* Floating point comparisons appears to behave in a very
789 unpredictable way because of special role of = tests in
791 if (FLOAT_MODE_P (GET_MODE (XEXP (cond
, 0))))
793 /* Comparisons with 0 are often used for booleans and there is
794 nothing useful to predict about them. */
795 else if (XEXP (cond
, 1) == const0_rtx
796 || XEXP (cond
, 0) == const0_rtx
)
799 predict_insn_def (last_insn
, PRED_OPCODE_NONEQUAL
, TAKEN
);
803 predict_insn_def (last_insn
, PRED_FPOPCODE
, TAKEN
);
807 predict_insn_def (last_insn
, PRED_FPOPCODE
, NOT_TAKEN
);
812 if (XEXP (cond
, 1) == const0_rtx
|| XEXP (cond
, 1) == const1_rtx
813 || XEXP (cond
, 1) == constm1_rtx
)
814 predict_insn_def (last_insn
, PRED_OPCODE_POSITIVE
, NOT_TAKEN
);
819 if (XEXP (cond
, 1) == const0_rtx
|| XEXP (cond
, 1) == const1_rtx
820 || XEXP (cond
, 1) == constm1_rtx
)
821 predict_insn_def (last_insn
, PRED_OPCODE_POSITIVE
, TAKEN
);
829 /* Statically estimate the probability that a branch will be taken and produce
830 estimated profile. When profile feedback is present never executed portions
831 of function gets estimated. */
834 estimate_probability (struct loops
*loops_info
)
838 connect_infinite_loops_to_exit ();
839 calculate_dominance_info (CDI_DOMINATORS
);
840 calculate_dominance_info (CDI_POST_DOMINATORS
);
842 predict_loops (loops_info
, true);
846 /* Attempt to predict conditional jumps using a number of heuristics. */
849 rtx last_insn
= BB_END (bb
);
853 if (! can_predict_insn_p (last_insn
))
856 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
858 /* Predict early returns to be probable, as we've already taken
859 care for error returns and other are often used for fast paths
861 if ((e
->dest
== EXIT_BLOCK_PTR
862 || (single_succ_p (e
->dest
)
863 && single_succ (e
->dest
) == EXIT_BLOCK_PTR
))
864 && !predicted_by_p (bb
, PRED_NULL_RETURN
)
865 && !predicted_by_p (bb
, PRED_CONST_RETURN
)
866 && !predicted_by_p (bb
, PRED_NEGATIVE_RETURN
)
867 && !last_basic_block_p (e
->dest
))
868 predict_edge_def (e
, PRED_EARLY_RETURN
, TAKEN
);
870 /* Look for block we are guarding (i.e. we dominate it,
871 but it doesn't postdominate us). */
872 if (e
->dest
!= EXIT_BLOCK_PTR
&& e
->dest
!= bb
873 && dominated_by_p (CDI_DOMINATORS
, e
->dest
, e
->src
)
874 && !dominated_by_p (CDI_POST_DOMINATORS
, e
->src
, e
->dest
))
878 /* The call heuristic claims that a guarded function call
879 is improbable. This is because such calls are often used
880 to signal exceptional situations such as printing error
882 for (insn
= BB_HEAD (e
->dest
); insn
!= NEXT_INSN (BB_END (e
->dest
));
883 insn
= NEXT_INSN (insn
))
885 /* Constant and pure calls are hardly used to signalize
886 something exceptional. */
887 && ! CONST_OR_PURE_CALL_P (insn
))
889 predict_edge_def (e
, PRED_CALL
, NOT_TAKEN
);
894 bb_estimate_probability_locally (bb
);
897 /* Attach the combined probability to each conditional jump. */
899 combine_predictions_for_insn (BB_END (bb
), bb
);
901 remove_fake_edges ();
902 estimate_bb_frequencies (loops_info
);
903 free_dominance_info (CDI_POST_DOMINATORS
);
904 if (profile_status
== PROFILE_ABSENT
)
905 profile_status
= PROFILE_GUESSED
;
908 /* Set edge->probability for each successor edge of BB. */
910 guess_outgoing_edge_probabilities (basic_block bb
)
912 bb_estimate_probability_locally (bb
);
913 combine_predictions_for_insn (BB_END (bb
), bb
);
916 /* Return constant EXPR will likely have at execution time, NULL if unknown.
917 The function is used by builtin_expect branch predictor so the evidence
918 must come from this construct and additional possible constant folding.
920 We may want to implement more involved value guess (such as value range
921 propagation based prediction), but such tricks shall go to new
925 expr_expected_value (tree expr
, bitmap visited
)
927 if (TREE_CONSTANT (expr
))
929 else if (TREE_CODE (expr
) == SSA_NAME
)
931 tree def
= SSA_NAME_DEF_STMT (expr
);
933 /* If we were already here, break the infinite cycle. */
934 if (bitmap_bit_p (visited
, SSA_NAME_VERSION (expr
)))
936 bitmap_set_bit (visited
, SSA_NAME_VERSION (expr
));
938 if (TREE_CODE (def
) == PHI_NODE
)
940 /* All the arguments of the PHI node must have the same constant
943 tree val
= NULL
, new_val
;
945 for (i
= 0; i
< PHI_NUM_ARGS (def
); i
++)
947 tree arg
= PHI_ARG_DEF (def
, i
);
949 /* If this PHI has itself as an argument, we cannot
950 determine the string length of this argument. However,
951 if we can find an expected constant value for the other
952 PHI args then we can still be sure that this is
953 likely a constant. So be optimistic and just
954 continue with the next argument. */
955 if (arg
== PHI_RESULT (def
))
958 new_val
= expr_expected_value (arg
, visited
);
963 else if (!operand_equal_p (val
, new_val
, false))
968 if (TREE_CODE (def
) != MODIFY_EXPR
|| TREE_OPERAND (def
, 0) != expr
)
970 return expr_expected_value (TREE_OPERAND (def
, 1), visited
);
972 else if (TREE_CODE (expr
) == CALL_EXPR
)
974 tree decl
= get_callee_fndecl (expr
);
977 if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
978 && DECL_FUNCTION_CODE (decl
) == BUILT_IN_EXPECT
)
980 tree arglist
= TREE_OPERAND (expr
, 1);
983 if (arglist
== NULL_TREE
984 || TREE_CHAIN (arglist
) == NULL_TREE
)
986 val
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr
, 1)));
987 if (TREE_CONSTANT (val
))
989 return TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr
, 1)));
992 if (BINARY_CLASS_P (expr
) || COMPARISON_CLASS_P (expr
))
995 op0
= expr_expected_value (TREE_OPERAND (expr
, 0), visited
);
998 op1
= expr_expected_value (TREE_OPERAND (expr
, 1), visited
);
1001 res
= fold_build2 (TREE_CODE (expr
), TREE_TYPE (expr
), op0
, op1
);
1002 if (TREE_CONSTANT (res
))
1006 if (UNARY_CLASS_P (expr
))
1009 op0
= expr_expected_value (TREE_OPERAND (expr
, 0), visited
);
1012 res
= fold_build1 (TREE_CODE (expr
), TREE_TYPE (expr
), op0
);
1013 if (TREE_CONSTANT (res
))
1020 /* Get rid of all builtin_expect calls we no longer need. */
1022 strip_builtin_expect (void)
1027 block_stmt_iterator bi
;
1028 for (bi
= bsi_start (bb
); !bsi_end_p (bi
); bsi_next (&bi
))
1030 tree stmt
= bsi_stmt (bi
);
1034 if (TREE_CODE (stmt
) == MODIFY_EXPR
1035 && TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
1036 && (fndecl
= get_callee_fndecl (TREE_OPERAND (stmt
, 1)))
1037 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1038 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
1039 && (arglist
= TREE_OPERAND (TREE_OPERAND (stmt
, 1), 1))
1040 && TREE_CHAIN (arglist
))
1042 TREE_OPERAND (stmt
, 1) = TREE_VALUE (arglist
);
1049 /* Predict using opcode of the last statement in basic block. */
1051 tree_predict_by_opcode (basic_block bb
)
1053 tree stmt
= last_stmt (bb
);
1062 if (!stmt
|| TREE_CODE (stmt
) != COND_EXPR
)
1064 FOR_EACH_EDGE (then_edge
, ei
, bb
->succs
)
1065 if (then_edge
->flags
& EDGE_TRUE_VALUE
)
1067 cond
= TREE_OPERAND (stmt
, 0);
1068 if (!COMPARISON_CLASS_P (cond
))
1070 op0
= TREE_OPERAND (cond
, 0);
1071 type
= TREE_TYPE (op0
);
1072 visited
= BITMAP_ALLOC (NULL
);
1073 val
= expr_expected_value (cond
, visited
);
1074 BITMAP_FREE (visited
);
1077 if (integer_zerop (val
))
1078 predict_edge_def (then_edge
, PRED_BUILTIN_EXPECT
, NOT_TAKEN
);
1080 predict_edge_def (then_edge
, PRED_BUILTIN_EXPECT
, TAKEN
);
1083 /* Try "pointer heuristic."
1084 A comparison ptr == 0 is predicted as false.
1085 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1086 if (POINTER_TYPE_P (type
))
1088 if (TREE_CODE (cond
) == EQ_EXPR
)
1089 predict_edge_def (then_edge
, PRED_TREE_POINTER
, NOT_TAKEN
);
1090 else if (TREE_CODE (cond
) == NE_EXPR
)
1091 predict_edge_def (then_edge
, PRED_TREE_POINTER
, TAKEN
);
1095 /* Try "opcode heuristic."
1096 EQ tests are usually false and NE tests are usually true. Also,
1097 most quantities are positive, so we can make the appropriate guesses
1098 about signed comparisons against zero. */
1099 switch (TREE_CODE (cond
))
1103 /* Floating point comparisons appears to behave in a very
1104 unpredictable way because of special role of = tests in
1106 if (FLOAT_TYPE_P (type
))
1108 /* Comparisons with 0 are often used for booleans and there is
1109 nothing useful to predict about them. */
1110 else if (integer_zerop (op0
)
1111 || integer_zerop (TREE_OPERAND (cond
, 1)))
1114 predict_edge_def (then_edge
, PRED_TREE_OPCODE_NONEQUAL
, NOT_TAKEN
);
1119 /* Floating point comparisons appears to behave in a very
1120 unpredictable way because of special role of = tests in
1122 if (FLOAT_TYPE_P (type
))
1124 /* Comparisons with 0 are often used for booleans and there is
1125 nothing useful to predict about them. */
1126 else if (integer_zerop (op0
)
1127 || integer_zerop (TREE_OPERAND (cond
, 1)))
1130 predict_edge_def (then_edge
, PRED_TREE_OPCODE_NONEQUAL
, TAKEN
);
1134 predict_edge_def (then_edge
, PRED_TREE_FPOPCODE
, TAKEN
);
1137 case UNORDERED_EXPR
:
1138 predict_edge_def (then_edge
, PRED_TREE_FPOPCODE
, NOT_TAKEN
);
1143 if (integer_zerop (TREE_OPERAND (cond
, 1))
1144 || integer_onep (TREE_OPERAND (cond
, 1))
1145 || integer_all_onesp (TREE_OPERAND (cond
, 1))
1146 || real_zerop (TREE_OPERAND (cond
, 1))
1147 || real_onep (TREE_OPERAND (cond
, 1))
1148 || real_minus_onep (TREE_OPERAND (cond
, 1)))
1149 predict_edge_def (then_edge
, PRED_TREE_OPCODE_POSITIVE
, NOT_TAKEN
);
1154 if (integer_zerop (TREE_OPERAND (cond
, 1))
1155 || integer_onep (TREE_OPERAND (cond
, 1))
1156 || integer_all_onesp (TREE_OPERAND (cond
, 1))
1157 || real_zerop (TREE_OPERAND (cond
, 1))
1158 || real_onep (TREE_OPERAND (cond
, 1))
1159 || real_minus_onep (TREE_OPERAND (cond
, 1)))
1160 predict_edge_def (then_edge
, PRED_TREE_OPCODE_POSITIVE
, TAKEN
);
1168 /* Try to guess whether the value of return means error code. */
1169 static enum br_predictor
1170 return_prediction (tree val
, enum prediction
*prediction
)
1174 return PRED_NO_PREDICTION
;
1175 /* Different heuristics for pointers and scalars. */
1176 if (POINTER_TYPE_P (TREE_TYPE (val
)))
1178 /* NULL is usually not returned. */
1179 if (integer_zerop (val
))
1181 *prediction
= NOT_TAKEN
;
1182 return PRED_NULL_RETURN
;
1185 else if (INTEGRAL_TYPE_P (TREE_TYPE (val
)))
1187 /* Negative return values are often used to indicate
1189 if (TREE_CODE (val
) == INTEGER_CST
1190 && tree_int_cst_sgn (val
) < 0)
1192 *prediction
= NOT_TAKEN
;
1193 return PRED_NEGATIVE_RETURN
;
1195 /* Constant return values seems to be commonly taken.
1196 Zero/one often represent booleans so exclude them from the
1198 if (TREE_CONSTANT (val
)
1199 && (!integer_zerop (val
) && !integer_onep (val
)))
1201 *prediction
= TAKEN
;
1202 return PRED_NEGATIVE_RETURN
;
1205 return PRED_NO_PREDICTION
;
1208 /* Find the basic block with return expression and look up for possible
1209 return value trying to apply RETURN_PREDICTION heuristics. */
1211 apply_return_prediction (int *heads
)
1213 tree return_stmt
= NULL
;
1217 int phi_num_args
, i
;
1218 enum br_predictor pred
;
1219 enum prediction direction
;
1222 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
1224 return_stmt
= last_stmt (e
->src
);
1225 if (TREE_CODE (return_stmt
) == RETURN_EXPR
)
1230 return_val
= TREE_OPERAND (return_stmt
, 0);
1233 if (TREE_CODE (return_val
) == MODIFY_EXPR
)
1234 return_val
= TREE_OPERAND (return_val
, 1);
1235 if (TREE_CODE (return_val
) != SSA_NAME
1236 || !SSA_NAME_DEF_STMT (return_val
)
1237 || TREE_CODE (SSA_NAME_DEF_STMT (return_val
)) != PHI_NODE
)
1239 for (phi
= SSA_NAME_DEF_STMT (return_val
); phi
; phi
= PHI_CHAIN (phi
))
1240 if (PHI_RESULT (phi
) == return_val
)
1244 phi_num_args
= PHI_NUM_ARGS (phi
);
1245 pred
= return_prediction (PHI_ARG_DEF (phi
, 0), &direction
);
1247 /* Avoid the degenerate case where all return values form the function
1248 belongs to same category (ie they are all positive constants)
1249 so we can hardly say something about them. */
1250 for (i
= 1; i
< phi_num_args
; i
++)
1251 if (pred
!= return_prediction (PHI_ARG_DEF (phi
, i
), &direction
))
1253 if (i
!= phi_num_args
)
1254 for (i
= 0; i
< phi_num_args
; i
++)
1256 pred
= return_prediction (PHI_ARG_DEF (phi
, i
), &direction
);
1257 if (pred
!= PRED_NO_PREDICTION
)
1258 predict_paths_leading_to (PHI_ARG_EDGE (phi
, i
)->src
, heads
, pred
,
1263 /* Look for basic block that contains unlikely to happen events
1264 (such as noreturn calls) and mark all paths leading to execution
1265 of this basic blocks as unlikely. */
1268 tree_bb_level_predictions (void)
1273 heads
= XNEWVEC (int, last_basic_block
);
1274 memset (heads
, ENTRY_BLOCK
, sizeof (int) * last_basic_block
);
1275 heads
[ENTRY_BLOCK_PTR
->next_bb
->index
] = last_basic_block
;
1277 apply_return_prediction (heads
);
1281 block_stmt_iterator bsi
= bsi_last (bb
);
1283 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
1285 tree stmt
= bsi_stmt (bsi
);
1286 switch (TREE_CODE (stmt
))
1289 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
1291 stmt
= TREE_OPERAND (stmt
, 1);
1297 if (call_expr_flags (stmt
) & ECF_NORETURN
)
1298 predict_paths_leading_to (bb
, heads
, PRED_NORETURN
,
1310 /* Predict branch probabilities and estimate profile of the tree CFG. */
1312 tree_estimate_probability (void)
1315 struct loops loops_info
;
1317 flow_loops_find (&loops_info
);
1318 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1319 flow_loops_dump (&loops_info
, dump_file
, NULL
, 0);
1321 add_noreturn_fake_exit_edges ();
1322 connect_infinite_loops_to_exit ();
1323 calculate_dominance_info (CDI_DOMINATORS
);
1324 calculate_dominance_info (CDI_POST_DOMINATORS
);
1326 tree_bb_level_predictions ();
1328 mark_irreducible_loops (&loops_info
);
1329 predict_loops (&loops_info
, false);
1336 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1338 /* Predict early returns to be probable, as we've already taken
1339 care for error returns and other cases are often used for
1340 fast paths trought function. */
1341 if (e
->dest
== EXIT_BLOCK_PTR
1342 && TREE_CODE (last_stmt (bb
)) == RETURN_EXPR
1343 && !single_pred_p (bb
))
1348 FOR_EACH_EDGE (e1
, ei1
, bb
->preds
)
1349 if (!predicted_by_p (e1
->src
, PRED_NULL_RETURN
)
1350 && !predicted_by_p (e1
->src
, PRED_CONST_RETURN
)
1351 && !predicted_by_p (e1
->src
, PRED_NEGATIVE_RETURN
)
1352 && !last_basic_block_p (e1
->src
))
1353 predict_edge_def (e1
, PRED_TREE_EARLY_RETURN
, NOT_TAKEN
);
1356 /* Look for block we are guarding (ie we dominate it,
1357 but it doesn't postdominate us). */
1358 if (e
->dest
!= EXIT_BLOCK_PTR
&& e
->dest
!= bb
1359 && dominated_by_p (CDI_DOMINATORS
, e
->dest
, e
->src
)
1360 && !dominated_by_p (CDI_POST_DOMINATORS
, e
->src
, e
->dest
))
1362 block_stmt_iterator bi
;
1364 /* The call heuristic claims that a guarded function call
1365 is improbable. This is because such calls are often used
1366 to signal exceptional situations such as printing error
1368 for (bi
= bsi_start (e
->dest
); !bsi_end_p (bi
);
1371 tree stmt
= bsi_stmt (bi
);
1372 if ((TREE_CODE (stmt
) == CALL_EXPR
1373 || (TREE_CODE (stmt
) == MODIFY_EXPR
1374 && TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
))
1375 /* Constant and pure calls are hardly used to signalize
1376 something exceptional. */
1377 && TREE_SIDE_EFFECTS (stmt
))
1379 predict_edge_def (e
, PRED_CALL
, NOT_TAKEN
);
1385 tree_predict_by_opcode (bb
);
1388 combine_predictions_for_bb (bb
);
1390 if (!flag_loop_optimize
)
1391 strip_builtin_expect ();
1392 estimate_bb_frequencies (&loops_info
);
1393 free_dominance_info (CDI_POST_DOMINATORS
);
1394 remove_fake_exit_edges ();
1395 flow_loops_free (&loops_info
);
1396 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1397 dump_tree_cfg (dump_file
, dump_flags
);
1398 if (profile_status
== PROFILE_ABSENT
)
1399 profile_status
= PROFILE_GUESSED
;
1402 /* __builtin_expect dropped tokens into the insn stream describing expected
1403 values of registers. Generate branch probabilities based off these
1407 expected_value_to_br_prob (void)
1409 rtx insn
, cond
, ev
= NULL_RTX
, ev_reg
= NULL_RTX
;
1411 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1413 switch (GET_CODE (insn
))
1416 /* Look for expected value notes. */
1417 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EXPECTED_VALUE
)
1419 ev
= NOTE_EXPECTED_VALUE (insn
);
1420 ev_reg
= XEXP (ev
, 0);
1426 /* Never propagate across labels. */
1431 /* Look for simple conditional branches. If we haven't got an
1432 expected value yet, no point going further. */
1433 if (!JUMP_P (insn
) || ev
== NULL_RTX
1434 || ! any_condjump_p (insn
))
1439 /* Look for insns that clobber the EV register. */
1440 if (ev
&& reg_set_p (ev_reg
, insn
))
1445 /* Collect the branch condition, hopefully relative to EV_REG. */
1446 /* ??? At present we'll miss things like
1447 (expected_value (eq r70 0))
1449 (set r80 (lt r70 r71))
1450 (set pc (if_then_else (ne r80 0) ...))
1451 as canonicalize_condition will render this to us as
1453 Could use cselib to try and reduce this further. */
1454 cond
= XEXP (SET_SRC (pc_set (insn
)), 0);
1455 cond
= canonicalize_condition (insn
, cond
, 0, NULL
, ev_reg
,
1457 if (! cond
|| XEXP (cond
, 0) != ev_reg
1458 || GET_CODE (XEXP (cond
, 1)) != CONST_INT
)
1461 /* Substitute and simplify. Given that the expression we're
1462 building involves two constants, we should wind up with either
1464 cond
= gen_rtx_fmt_ee (GET_CODE (cond
), VOIDmode
,
1465 XEXP (ev
, 1), XEXP (cond
, 1));
1466 cond
= simplify_rtx (cond
);
1468 /* Turn the condition into a scaled branch probability. */
1469 gcc_assert (cond
== const_true_rtx
|| cond
== const0_rtx
);
1470 predict_insn_def (insn
, PRED_BUILTIN_EXPECT
,
1471 cond
== const_true_rtx
? TAKEN
: NOT_TAKEN
);
1475 /* Check whether this is the last basic block of function. Commonly
1476 there is one extra common cleanup block. */
1478 last_basic_block_p (basic_block bb
)
1480 if (bb
== EXIT_BLOCK_PTR
)
1483 return (bb
->next_bb
== EXIT_BLOCK_PTR
1484 || (bb
->next_bb
->next_bb
== EXIT_BLOCK_PTR
1485 && single_succ_p (bb
)
1486 && single_succ (bb
)->next_bb
== EXIT_BLOCK_PTR
));
1489 /* Sets branch probabilities according to PREDiction and
1490 FLAGS. HEADS[bb->index] should be index of basic block in that we
1491 need to alter branch predictions (i.e. the first of our dominators
1492 such that we do not post-dominate it) (but we fill this information
1493 on demand, so -1 may be there in case this was not needed yet). */
1496 predict_paths_leading_to (basic_block bb
, int *heads
, enum br_predictor pred
,
1497 enum prediction taken
)
1503 if (heads
[bb
->index
] == ENTRY_BLOCK
)
1505 /* This is first time we need this field in heads array; so
1506 find first dominator that we do not post-dominate (we are
1507 using already known members of heads array). */
1508 basic_block ai
= bb
;
1509 basic_block next_ai
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
1512 while (heads
[next_ai
->index
] == ENTRY_BLOCK
)
1514 if (!dominated_by_p (CDI_POST_DOMINATORS
, next_ai
, bb
))
1516 heads
[next_ai
->index
] = ai
->index
;
1518 next_ai
= get_immediate_dominator (CDI_DOMINATORS
, next_ai
);
1520 if (!dominated_by_p (CDI_POST_DOMINATORS
, next_ai
, bb
))
1521 head
= next_ai
->index
;
1523 head
= heads
[next_ai
->index
];
1524 while (next_ai
!= bb
)
1527 ai
= BASIC_BLOCK (heads
[ai
->index
]);
1528 heads
[next_ai
->index
] = head
;
1531 y
= heads
[bb
->index
];
1533 /* Now find the edge that leads to our branch and aply the prediction. */
1535 if (y
== last_basic_block
)
1537 FOR_EACH_EDGE (e
, ei
, BASIC_BLOCK (y
)->succs
)
1538 if (e
->dest
->index
>= NUM_FIXED_BLOCKS
1539 && dominated_by_p (CDI_POST_DOMINATORS
, e
->dest
, bb
))
1540 predict_edge_def (e
, pred
, taken
);
1543 /* This is used to carry information about basic blocks. It is
1544 attached to the AUX field of the standard CFG block. */
1546 typedef struct block_info_def
1548 /* Estimated frequency of execution of basic_block. */
1551 /* To keep queue of basic blocks to process. */
1554 /* Number of predecessors we need to visit first. */
1558 /* Similar information for edges. */
1559 typedef struct edge_info_def
1561 /* In case edge is a loopback edge, the probability edge will be reached
1562 in case header is. Estimated number of iterations of the loop can be
1563 then computed as 1 / (1 - back_edge_prob). */
1564 sreal back_edge_prob
;
1565 /* True if the edge is a loopback edge in the natural loop. */
1566 unsigned int back_edge
:1;
1569 #define BLOCK_INFO(B) ((block_info) (B)->aux)
1570 #define EDGE_INFO(E) ((edge_info) (E)->aux)
1572 /* Helper function for estimate_bb_frequencies.
1573 Propagate the frequencies for LOOP. */
1576 propagate_freq (struct loop
*loop
, bitmap tovisit
)
1578 basic_block head
= loop
->header
;
1586 /* For each basic block we need to visit count number of his predecessors
1587 we need to visit first. */
1588 EXECUTE_IF_SET_IN_BITMAP (tovisit
, 0, i
, bi
)
1593 /* The outermost "loop" includes the exit block, which we can not
1594 look up via BASIC_BLOCK. Detect this and use EXIT_BLOCK_PTR
1595 directly. Do the same for the entry block. */
1596 bb
= BASIC_BLOCK (i
);
1598 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1600 bool visit
= bitmap_bit_p (tovisit
, e
->src
->index
);
1602 if (visit
&& !(e
->flags
& EDGE_DFS_BACK
))
1604 else if (visit
&& dump_file
&& !EDGE_INFO (e
)->back_edge
)
1606 "Irreducible region hit, ignoring edge to %i->%i\n",
1607 e
->src
->index
, bb
->index
);
1609 BLOCK_INFO (bb
)->npredecessors
= count
;
1612 memcpy (&BLOCK_INFO (head
)->frequency
, &real_one
, sizeof (real_one
));
1614 for (bb
= head
; bb
; bb
= nextbb
)
1617 sreal cyclic_probability
, frequency
;
1619 memcpy (&cyclic_probability
, &real_zero
, sizeof (real_zero
));
1620 memcpy (&frequency
, &real_zero
, sizeof (real_zero
));
1622 nextbb
= BLOCK_INFO (bb
)->next
;
1623 BLOCK_INFO (bb
)->next
= NULL
;
1625 /* Compute frequency of basic block. */
1628 #ifdef ENABLE_CHECKING
1629 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1630 gcc_assert (!bitmap_bit_p (tovisit
, e
->src
->index
)
1631 || (e
->flags
& EDGE_DFS_BACK
));
1634 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1635 if (EDGE_INFO (e
)->back_edge
)
1637 sreal_add (&cyclic_probability
, &cyclic_probability
,
1638 &EDGE_INFO (e
)->back_edge_prob
);
1640 else if (!(e
->flags
& EDGE_DFS_BACK
))
1644 /* frequency += (e->probability
1645 * BLOCK_INFO (e->src)->frequency /
1646 REG_BR_PROB_BASE); */
1648 sreal_init (&tmp
, e
->probability
, 0);
1649 sreal_mul (&tmp
, &tmp
, &BLOCK_INFO (e
->src
)->frequency
);
1650 sreal_mul (&tmp
, &tmp
, &real_inv_br_prob_base
);
1651 sreal_add (&frequency
, &frequency
, &tmp
);
1654 if (sreal_compare (&cyclic_probability
, &real_zero
) == 0)
1656 memcpy (&BLOCK_INFO (bb
)->frequency
, &frequency
,
1657 sizeof (frequency
));
1661 if (sreal_compare (&cyclic_probability
, &real_almost_one
) > 0)
1663 memcpy (&cyclic_probability
, &real_almost_one
,
1664 sizeof (real_almost_one
));
1667 /* BLOCK_INFO (bb)->frequency = frequency
1668 / (1 - cyclic_probability) */
1670 sreal_sub (&cyclic_probability
, &real_one
, &cyclic_probability
);
1671 sreal_div (&BLOCK_INFO (bb
)->frequency
,
1672 &frequency
, &cyclic_probability
);
1676 bitmap_clear_bit (tovisit
, bb
->index
);
1678 e
= find_edge (bb
, head
);
1683 /* EDGE_INFO (e)->back_edge_prob
1684 = ((e->probability * BLOCK_INFO (bb)->frequency)
1685 / REG_BR_PROB_BASE); */
1687 sreal_init (&tmp
, e
->probability
, 0);
1688 sreal_mul (&tmp
, &tmp
, &BLOCK_INFO (bb
)->frequency
);
1689 sreal_mul (&EDGE_INFO (e
)->back_edge_prob
,
1690 &tmp
, &real_inv_br_prob_base
);
1693 /* Propagate to successor blocks. */
1694 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1695 if (!(e
->flags
& EDGE_DFS_BACK
)
1696 && BLOCK_INFO (e
->dest
)->npredecessors
)
1698 BLOCK_INFO (e
->dest
)->npredecessors
--;
1699 if (!BLOCK_INFO (e
->dest
)->npredecessors
)
1704 BLOCK_INFO (last
)->next
= e
->dest
;
1712 /* Estimate probabilities of loopback edges in loops at same nest level. */
1715 estimate_loops_at_level (struct loop
*first_loop
, bitmap tovisit
)
1719 for (loop
= first_loop
; loop
; loop
= loop
->next
)
1725 estimate_loops_at_level (loop
->inner
, tovisit
);
1727 /* Do not do this for dummy function loop. */
1728 if (EDGE_COUNT (loop
->latch
->succs
) > 0)
1730 /* Find current loop back edge and mark it. */
1731 e
= loop_latch_edge (loop
);
1732 EDGE_INFO (e
)->back_edge
= 1;
1735 bbs
= get_loop_body (loop
);
1736 for (i
= 0; i
< loop
->num_nodes
; i
++)
1737 bitmap_set_bit (tovisit
, bbs
[i
]->index
);
1739 propagate_freq (loop
, tovisit
);
1743 /* Convert counts measured by profile driven feedback to frequencies.
1744 Return nonzero iff there was any nonzero execution count. */
1747 counts_to_freqs (void)
1749 gcov_type count_max
, true_count_max
= 0;
1753 true_count_max
= MAX (bb
->count
, true_count_max
);
1755 count_max
= MAX (true_count_max
, 1);
1756 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1757 bb
->frequency
= (bb
->count
* BB_FREQ_MAX
+ count_max
/ 2) / count_max
;
1758 return true_count_max
;
1761 /* Return true if function is likely to be expensive, so there is no point to
1762 optimize performance of prologue, epilogue or do inlining at the expense
1763 of code size growth. THRESHOLD is the limit of number of instructions
1764 function can execute at average to be still considered not expensive. */
1767 expensive_function_p (int threshold
)
1769 unsigned int sum
= 0;
1773 /* We can not compute accurately for large thresholds due to scaled
1775 gcc_assert (threshold
<= BB_FREQ_MAX
);
1777 /* Frequencies are out of range. This either means that function contains
1778 internal loop executing more than BB_FREQ_MAX times or profile feedback
1779 is available and function has not been executed at all. */
1780 if (ENTRY_BLOCK_PTR
->frequency
== 0)
1783 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
1784 limit
= ENTRY_BLOCK_PTR
->frequency
* threshold
;
1789 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
1790 insn
= NEXT_INSN (insn
))
1791 if (active_insn_p (insn
))
1793 sum
+= bb
->frequency
;
1802 /* Estimate basic blocks frequency by given branch probabilities. */
1805 estimate_bb_frequencies (struct loops
*loops
)
1810 if (!flag_branch_probabilities
|| !counts_to_freqs ())
1812 static int real_values_initialized
= 0;
1815 if (!real_values_initialized
)
1817 real_values_initialized
= 1;
1818 sreal_init (&real_zero
, 0, 0);
1819 sreal_init (&real_one
, 1, 0);
1820 sreal_init (&real_br_prob_base
, REG_BR_PROB_BASE
, 0);
1821 sreal_init (&real_bb_freq_max
, BB_FREQ_MAX
, 0);
1822 sreal_init (&real_one_half
, 1, -1);
1823 sreal_div (&real_inv_br_prob_base
, &real_one
, &real_br_prob_base
);
1824 sreal_sub (&real_almost_one
, &real_one
, &real_inv_br_prob_base
);
1827 mark_dfs_back_edges ();
1829 single_succ_edge (ENTRY_BLOCK_PTR
)->probability
= REG_BR_PROB_BASE
;
1831 /* Set up block info for each basic block. */
1832 tovisit
= BITMAP_ALLOC (NULL
);
1833 alloc_aux_for_blocks (sizeof (struct block_info_def
));
1834 alloc_aux_for_edges (sizeof (struct edge_info_def
));
1835 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1840 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1842 sreal_init (&EDGE_INFO (e
)->back_edge_prob
, e
->probability
, 0);
1843 sreal_mul (&EDGE_INFO (e
)->back_edge_prob
,
1844 &EDGE_INFO (e
)->back_edge_prob
,
1845 &real_inv_br_prob_base
);
1849 /* First compute probabilities locally for each loop from innermost
1850 to outermost to examine probabilities for back edges. */
1851 estimate_loops_at_level (loops
->tree_root
, tovisit
);
1853 memcpy (&freq_max
, &real_zero
, sizeof (real_zero
));
1855 if (sreal_compare (&freq_max
, &BLOCK_INFO (bb
)->frequency
) < 0)
1856 memcpy (&freq_max
, &BLOCK_INFO (bb
)->frequency
, sizeof (freq_max
));
1858 sreal_div (&freq_max
, &real_bb_freq_max
, &freq_max
);
1859 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1863 sreal_mul (&tmp
, &BLOCK_INFO (bb
)->frequency
, &freq_max
);
1864 sreal_add (&tmp
, &tmp
, &real_one_half
);
1865 bb
->frequency
= sreal_to_int (&tmp
);
1868 free_aux_for_blocks ();
1869 free_aux_for_edges ();
1870 BITMAP_FREE (tovisit
);
1872 compute_function_frequency ();
1873 if (flag_reorder_functions
)
1874 choose_function_section ();
1877 /* Decide whether function is hot, cold or unlikely executed. */
1879 compute_function_frequency (void)
1883 if (!profile_info
|| !flag_branch_probabilities
)
1885 cfun
->function_frequency
= FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
;
1888 if (maybe_hot_bb_p (bb
))
1890 cfun
->function_frequency
= FUNCTION_FREQUENCY_HOT
;
1893 if (!probably_never_executed_bb_p (bb
))
1894 cfun
->function_frequency
= FUNCTION_FREQUENCY_NORMAL
;
1898 /* Choose appropriate section for the function. */
1900 choose_function_section (void)
1902 if (DECL_SECTION_NAME (current_function_decl
)
1903 || !targetm
.have_named_sections
1904 /* Theoretically we can split the gnu.linkonce text section too,
1905 but this requires more work as the frequency needs to match
1906 for all generated objects so we need to merge the frequency
1907 of all instances. For now just never set frequency for these. */
1908 || DECL_ONE_ONLY (current_function_decl
))
1911 /* If we are doing the partitioning optimization, let the optimization
1912 choose the correct section into which to put things. */
1914 if (flag_reorder_blocks_and_partition
)
1917 if (cfun
->function_frequency
== FUNCTION_FREQUENCY_HOT
)
1918 DECL_SECTION_NAME (current_function_decl
) =
1919 build_string (strlen (HOT_TEXT_SECTION_NAME
), HOT_TEXT_SECTION_NAME
);
1920 if (cfun
->function_frequency
== FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
)
1921 DECL_SECTION_NAME (current_function_decl
) =
1922 build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME
),
1923 UNLIKELY_EXECUTED_TEXT_SECTION_NAME
);
1927 gate_estimate_probability (void)
1929 return flag_guess_branch_prob
;
1932 struct tree_opt_pass pass_profile
=
1934 "profile", /* name */
1935 gate_estimate_probability
, /* gate */
1936 tree_estimate_probability
, /* execute */
1939 0, /* static_pass_number */
1940 TV_BRANCH_PROB
, /* tv_id */
1941 PROP_cfg
, /* properties_required */
1942 0, /* properties_provided */
1943 0, /* properties_destroyed */
1944 0, /* todo_flags_start */
1945 TODO_ggc_collect
| TODO_verify_ssa
, /* todo_flags_finish */