1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 [1] "Branch Prediction for Free"
24 Ball and Larus; PLDI '93.
25 [2] "Static Branch Frequency and Program Profile Analysis"
26 Wu and Larus; MICRO-27.
27 [3] "Corpus-based Static Branch Prediction"
28 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
33 #include "coretypes.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "insn-config.h"
55 #include "tree-flow.h"
57 #include "tree-dump.h"
58 #include "tree-pass.h"
60 #include "tree-scalar-evolution.h"
63 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
64 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
65 static sreal real_zero
, real_one
, real_almost_one
, real_br_prob_base
,
66 real_inv_br_prob_base
, real_one_half
, real_bb_freq_max
;
68 /* Random guesstimation given names. */
69 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 10 - 1)
70 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
71 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
72 #define PROB_ALWAYS (REG_BR_PROB_BASE)
74 static void combine_predictions_for_insn (rtx
, basic_block
);
75 static void dump_prediction (FILE *, enum br_predictor
, int, basic_block
, int);
76 static void estimate_loops_at_level (struct loop
*, bitmap
);
77 static void propagate_freq (struct loop
*, bitmap
);
78 static void estimate_bb_frequencies (struct loops
*);
79 static void predict_paths_leading_to (basic_block
, int *, enum br_predictor
, enum prediction
);
80 static bool last_basic_block_p (basic_block
);
81 static void compute_function_frequency (void);
82 static void choose_function_section (void);
83 static bool can_predict_insn_p (rtx
);
85 /* Information we hold about each branch predictor.
86 Filled using information from predict.def. */
90 const char *const name
; /* Name used in the debugging dumps. */
91 const int hitrate
; /* Expected hitrate used by
92 predict_insn_def call. */
96 /* Use given predictor without Dempster-Shaffer theory if it matches
97 using first_match heuristics. */
98 #define PRED_FLAG_FIRST_MATCH 1
100 /* Recompute hitrate in percent to our representation. */
102 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
104 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
105 static const struct predictor_info predictor_info
[]= {
106 #include "predict.def"
108 /* Upper bound on predictors. */
113 /* Return true in case BB can be CPU intensive and should be optimized
114 for maximal performance. */
117 maybe_hot_bb_p (basic_block bb
)
119 if (profile_info
&& flag_branch_probabilities
121 < profile_info
->sum_max
/ PARAM_VALUE (HOT_BB_COUNT_FRACTION
)))
123 if (bb
->frequency
< BB_FREQ_MAX
/ PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION
))
128 /* Return true in case BB is cold and should be optimized for size. */
131 probably_cold_bb_p (basic_block bb
)
133 if (profile_info
&& flag_branch_probabilities
135 < profile_info
->sum_max
/ PARAM_VALUE (HOT_BB_COUNT_FRACTION
)))
137 if (bb
->frequency
< BB_FREQ_MAX
/ PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION
))
142 /* Return true in case BB is probably never executed. */
144 probably_never_executed_bb_p (basic_block bb
)
146 if (profile_info
&& flag_branch_probabilities
)
147 return ((bb
->count
+ profile_info
->runs
/ 2) / profile_info
->runs
) == 0;
151 /* Return true if the one of outgoing edges is already predicted by
155 rtl_predicted_by_p (basic_block bb
, enum br_predictor predictor
)
158 if (!INSN_P (BB_END (bb
)))
160 for (note
= REG_NOTES (BB_END (bb
)); note
; note
= XEXP (note
, 1))
161 if (REG_NOTE_KIND (note
) == REG_BR_PRED
162 && INTVAL (XEXP (XEXP (note
, 0), 0)) == (int)predictor
)
167 /* Return true if the one of outgoing edges is already predicted by
171 tree_predicted_by_p (basic_block bb
, enum br_predictor predictor
)
173 struct edge_prediction
*i
= bb_ann (bb
)->predictions
;
174 for (i
= bb_ann (bb
)->predictions
; i
; i
= i
->next
)
175 if (i
->predictor
== predictor
)
181 predict_insn (rtx insn
, enum br_predictor predictor
, int probability
)
183 if (!any_condjump_p (insn
))
185 if (!flag_guess_branch_prob
)
189 = gen_rtx_EXPR_LIST (REG_BR_PRED
,
190 gen_rtx_CONCAT (VOIDmode
,
191 GEN_INT ((int) predictor
),
192 GEN_INT ((int) probability
)),
196 /* Predict insn by given predictor. */
199 predict_insn_def (rtx insn
, enum br_predictor predictor
,
200 enum prediction taken
)
202 int probability
= predictor_info
[(int) predictor
].hitrate
;
205 probability
= REG_BR_PROB_BASE
- probability
;
207 predict_insn (insn
, predictor
, probability
);
210 /* Predict edge E with given probability if possible. */
213 rtl_predict_edge (edge e
, enum br_predictor predictor
, int probability
)
216 last_insn
= BB_END (e
->src
);
218 /* We can store the branch prediction information only about
219 conditional jumps. */
220 if (!any_condjump_p (last_insn
))
223 /* We always store probability of branching. */
224 if (e
->flags
& EDGE_FALLTHRU
)
225 probability
= REG_BR_PROB_BASE
- probability
;
227 predict_insn (last_insn
, predictor
, probability
);
230 /* Predict edge E with the given PROBABILITY. */
232 tree_predict_edge (edge e
, enum br_predictor predictor
, int probability
)
234 struct edge_prediction
*i
= ggc_alloc (sizeof (struct edge_prediction
));
236 i
->next
= bb_ann (e
->src
)->predictions
;
237 bb_ann (e
->src
)->predictions
= i
;
238 i
->probability
= probability
;
239 i
->predictor
= predictor
;
243 /* Return true when we can store prediction on insn INSN.
244 At the moment we represent predictions only on conditional
245 jumps, not at computed jump or other complicated cases. */
247 can_predict_insn_p (rtx insn
)
249 return (JUMP_P (insn
)
250 && any_condjump_p (insn
)
251 && EDGE_COUNT (BLOCK_FOR_INSN (insn
)->succs
) >= 2);
254 /* Predict edge E by given predictor if possible. */
257 predict_edge_def (edge e
, enum br_predictor predictor
,
258 enum prediction taken
)
260 int probability
= predictor_info
[(int) predictor
].hitrate
;
263 probability
= REG_BR_PROB_BASE
- probability
;
265 predict_edge (e
, predictor
, probability
);
268 /* Invert all branch predictions or probability notes in the INSN. This needs
269 to be done each time we invert the condition used by the jump. */
272 invert_br_probabilities (rtx insn
)
276 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
277 if (REG_NOTE_KIND (note
) == REG_BR_PROB
)
278 XEXP (note
, 0) = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (note
, 0)));
279 else if (REG_NOTE_KIND (note
) == REG_BR_PRED
)
280 XEXP (XEXP (note
, 0), 1)
281 = GEN_INT (REG_BR_PROB_BASE
- INTVAL (XEXP (XEXP (note
, 0), 1)));
284 /* Dump information about the branch prediction to the output file. */
287 dump_prediction (FILE *file
, enum br_predictor predictor
, int probability
,
288 basic_block bb
, int used
)
296 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
297 if (! (e
->flags
& EDGE_FALLTHRU
))
300 fprintf (file
, " %s heuristics%s: %.1f%%",
301 predictor_info
[predictor
].name
,
302 used
? "" : " (ignored)", probability
* 100.0 / REG_BR_PROB_BASE
);
306 fprintf (file
, " exec ");
307 fprintf (file
, HOST_WIDEST_INT_PRINT_DEC
, bb
->count
);
310 fprintf (file
, " hit ");
311 fprintf (file
, HOST_WIDEST_INT_PRINT_DEC
, e
->count
);
312 fprintf (file
, " (%.1f%%)", e
->count
* 100.0 / bb
->count
);
316 fprintf (file
, "\n");
319 /* We can not predict the probabilities of outgoing edges of bb. Set them
320 evenly and hope for the best. */
322 set_even_probabilities (basic_block bb
)
328 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
329 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
331 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
332 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
333 e
->probability
= (REG_BR_PROB_BASE
+ nedges
/ 2) / nedges
;
338 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
339 note if not already present. Remove now useless REG_BR_PRED notes. */
342 combine_predictions_for_insn (rtx insn
, basic_block bb
)
347 int best_probability
= PROB_EVEN
;
348 int best_predictor
= END_PREDICTORS
;
349 int combined_probability
= REG_BR_PROB_BASE
/ 2;
351 bool first_match
= false;
354 if (!can_predict_insn_p (insn
))
356 set_even_probabilities (bb
);
360 prob_note
= find_reg_note (insn
, REG_BR_PROB
, 0);
361 pnote
= ®_NOTES (insn
);
363 fprintf (dump_file
, "Predictions for insn %i bb %i\n", INSN_UID (insn
),
366 /* We implement "first match" heuristics and use probability guessed
367 by predictor with smallest index. */
368 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
369 if (REG_NOTE_KIND (note
) == REG_BR_PRED
)
371 int predictor
= INTVAL (XEXP (XEXP (note
, 0), 0));
372 int probability
= INTVAL (XEXP (XEXP (note
, 0), 1));
375 if (best_predictor
> predictor
)
376 best_probability
= probability
, best_predictor
= predictor
;
378 d
= (combined_probability
* probability
379 + (REG_BR_PROB_BASE
- combined_probability
)
380 * (REG_BR_PROB_BASE
- probability
));
382 /* Use FP math to avoid overflows of 32bit integers. */
384 /* If one probability is 0% and one 100%, avoid division by zero. */
385 combined_probability
= REG_BR_PROB_BASE
/ 2;
387 combined_probability
= (((double) combined_probability
) * probability
388 * REG_BR_PROB_BASE
/ d
+ 0.5);
391 /* Decide which heuristic to use. In case we didn't match anything,
392 use no_prediction heuristic, in case we did match, use either
393 first match or Dempster-Shaffer theory depending on the flags. */
395 if (predictor_info
[best_predictor
].flags
& PRED_FLAG_FIRST_MATCH
)
399 dump_prediction (dump_file
, PRED_NO_PREDICTION
,
400 combined_probability
, bb
, true);
403 dump_prediction (dump_file
, PRED_DS_THEORY
, combined_probability
,
405 dump_prediction (dump_file
, PRED_FIRST_MATCH
, best_probability
,
410 combined_probability
= best_probability
;
411 dump_prediction (dump_file
, PRED_COMBINED
, combined_probability
, bb
, true);
415 if (REG_NOTE_KIND (*pnote
) == REG_BR_PRED
)
417 int predictor
= INTVAL (XEXP (XEXP (*pnote
, 0), 0));
418 int probability
= INTVAL (XEXP (XEXP (*pnote
, 0), 1));
420 dump_prediction (dump_file
, predictor
, probability
, bb
,
421 !first_match
|| best_predictor
== predictor
);
422 *pnote
= XEXP (*pnote
, 1);
425 pnote
= &XEXP (*pnote
, 1);
431 = gen_rtx_EXPR_LIST (REG_BR_PROB
,
432 GEN_INT (combined_probability
), REG_NOTES (insn
));
434 /* Save the prediction into CFG in case we are seeing non-degenerated
436 if (EDGE_COUNT (bb
->succs
) > 1)
438 BRANCH_EDGE (bb
)->probability
= combined_probability
;
439 FALLTHRU_EDGE (bb
)->probability
440 = REG_BR_PROB_BASE
- combined_probability
;
443 else if (EDGE_COUNT (bb
->succs
) > 1)
445 int prob
= INTVAL (XEXP (prob_note
, 0));
447 BRANCH_EDGE (bb
)->probability
= prob
;
448 FALLTHRU_EDGE (bb
)->probability
= REG_BR_PROB_BASE
- prob
;
451 EDGE_SUCC (bb
, 0)->probability
= REG_BR_PROB_BASE
;
454 /* Combine predictions into single probability and store them into CFG.
455 Remove now useless prediction entries. */
458 combine_predictions_for_bb (FILE *file
, basic_block bb
)
460 int best_probability
= PROB_EVEN
;
461 int best_predictor
= END_PREDICTORS
;
462 int combined_probability
= REG_BR_PROB_BASE
/ 2;
464 bool first_match
= false;
466 struct edge_prediction
*pred
;
468 edge e
, first
= NULL
, second
= NULL
;
471 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
472 if (!(e
->flags
& (EDGE_EH
| EDGE_FAKE
)))
475 if (first
&& !second
)
481 /* When there is no successor or only one choice, prediction is easy.
483 We are lazy for now and predict only basic blocks with two outgoing
484 edges. It is possible to predict generic case too, but we have to
485 ignore first match heuristics and do more involved combining. Implement
490 set_even_probabilities (bb
);
491 bb_ann (bb
)->predictions
= NULL
;
493 fprintf (file
, "%i edges in bb %i predicted to even probabilities\n",
499 fprintf (file
, "Predictions for bb %i\n", bb
->index
);
501 /* We implement "first match" heuristics and use probability guessed
502 by predictor with smallest index. */
503 for (pred
= bb_ann (bb
)->predictions
; pred
; pred
= pred
->next
)
505 int predictor
= pred
->predictor
;
506 int probability
= pred
->probability
;
508 if (pred
->edge
!= first
)
509 probability
= REG_BR_PROB_BASE
- probability
;
512 if (best_predictor
> predictor
)
513 best_probability
= probability
, best_predictor
= predictor
;
515 d
= (combined_probability
* probability
516 + (REG_BR_PROB_BASE
- combined_probability
)
517 * (REG_BR_PROB_BASE
- probability
));
519 /* Use FP math to avoid overflows of 32bit integers. */
521 /* If one probability is 0% and one 100%, avoid division by zero. */
522 combined_probability
= REG_BR_PROB_BASE
/ 2;
524 combined_probability
= (((double) combined_probability
) * probability
525 * REG_BR_PROB_BASE
/ d
+ 0.5);
528 /* Decide which heuristic to use. In case we didn't match anything,
529 use no_prediction heuristic, in case we did match, use either
530 first match or Dempster-Shaffer theory depending on the flags. */
532 if (predictor_info
[best_predictor
].flags
& PRED_FLAG_FIRST_MATCH
)
536 dump_prediction (file
, PRED_NO_PREDICTION
, combined_probability
, bb
, true);
539 dump_prediction (file
, PRED_DS_THEORY
, combined_probability
, bb
,
541 dump_prediction (file
, PRED_FIRST_MATCH
, best_probability
, bb
,
546 combined_probability
= best_probability
;
547 dump_prediction (file
, PRED_COMBINED
, combined_probability
, bb
, true);
549 for (pred
= bb_ann (bb
)->predictions
; pred
; pred
= pred
->next
)
551 int predictor
= pred
->predictor
;
552 int probability
= pred
->probability
;
554 if (pred
->edge
!= EDGE_SUCC (bb
, 0))
555 probability
= REG_BR_PROB_BASE
- probability
;
556 dump_prediction (file
, predictor
, probability
, bb
,
557 !first_match
|| best_predictor
== predictor
);
559 bb_ann (bb
)->predictions
= NULL
;
563 first
->probability
= combined_probability
;
564 second
->probability
= REG_BR_PROB_BASE
- combined_probability
;
568 /* Predict edge probabilities by exploiting loop structure.
569 When RTLSIMPLELOOPS is set, attempt to count number of iterations by analyzing
570 RTL otherwise use tree based approach. */
572 predict_loops (struct loops
*loops_info
, bool rtlsimpleloops
)
577 scev_initialize (loops_info
);
579 /* Try to predict out blocks in a loop that are not part of a
581 for (i
= 1; i
< loops_info
->num
; i
++)
583 basic_block bb
, *bbs
;
586 struct loop
*loop
= loops_info
->parray
[i
];
587 struct niter_desc desc
;
588 unsigned HOST_WIDE_INT niter
;
590 flow_loop_scan (loop
, LOOP_EXIT_EDGES
);
591 exits
= loop
->num_exits
;
595 iv_analysis_loop_init (loop
);
596 find_simple_exit (loop
, &desc
);
598 if (desc
.simple_p
&& desc
.const_iter
)
601 niter
= desc
.niter
+ 1;
602 if (niter
== 0) /* We might overflow here. */
605 prob
= (REG_BR_PROB_BASE
606 - (REG_BR_PROB_BASE
+ niter
/2) / niter
);
607 /* Branch prediction algorithm gives 0 frequency for everything
608 after the end of loop for loop having 0 probability to finish. */
609 if (prob
== REG_BR_PROB_BASE
)
610 prob
= REG_BR_PROB_BASE
- 1;
611 predict_edge (desc
.in_edge
, PRED_LOOP_ITERATIONS
,
619 struct tree_niter_desc niter_desc
;
621 exits
= get_loop_exit_edges (loop
, &n_exits
);
622 for (j
= 0; j
< n_exits
; j
++)
626 if (number_of_iterations_exit (loop
, exits
[j
], &niter_desc
))
627 niter
= niter_desc
.niter
;
628 if (!niter
|| TREE_CODE (niter_desc
.niter
) != INTEGER_CST
)
629 niter
= loop_niter_by_eval (loop
, exits
[j
]);
631 if (TREE_CODE (niter
) == INTEGER_CST
)
634 if (host_integerp (niter
, 1)
635 && tree_int_cst_lt (niter
,
636 build_int_cstu (NULL_TREE
,
637 REG_BR_PROB_BASE
- 1)))
639 HOST_WIDE_INT nitercst
= tree_low_cst (niter
, 1) + 1;
640 probability
= (REG_BR_PROB_BASE
+ nitercst
/ 2) / nitercst
;
645 predict_edge (exits
[j
], PRED_LOOP_ITERATIONS
, probability
);
652 bbs
= get_loop_body (loop
);
654 for (j
= 0; j
< loop
->num_nodes
; j
++)
656 int header_found
= 0;
662 /* Bypass loop heuristics on continue statement. These
663 statements construct loops via "non-loop" constructs
664 in the source language and are better to be handled
666 if ((rtlsimpleloops
&& !can_predict_insn_p (BB_END (bb
)))
667 || predicted_by_p (bb
, PRED_CONTINUE
))
670 /* Loop branch heuristics - predict an edge back to a
671 loop's head as taken. */
672 if (bb
== loop
->latch
)
674 e
= find_edge (loop
->latch
, loop
->header
);
678 predict_edge_def (e
, PRED_LOOP_BRANCH
, TAKEN
);
682 /* Loop exit heuristics - predict an edge exiting the loop if the
683 conditional has no loop header successors as not taken. */
685 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
686 if (e
->dest
->index
< 0
687 || !flow_bb_inside_loop_p (loop
, e
->dest
))
691 - predictor_info
[(int) PRED_LOOP_EXIT
].hitrate
)
695 /* Free basic blocks from get_loop_body. */
703 /* Attempt to predict probabilities of BB outgoing edges using local
706 bb_estimate_probability_locally (basic_block bb
)
708 rtx last_insn
= BB_END (bb
);
711 if (! can_predict_insn_p (last_insn
))
713 cond
= get_condition (last_insn
, NULL
, false, false);
717 /* Try "pointer heuristic."
718 A comparison ptr == 0 is predicted as false.
719 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
720 if (COMPARISON_P (cond
)
721 && ((REG_P (XEXP (cond
, 0)) && REG_POINTER (XEXP (cond
, 0)))
722 || (REG_P (XEXP (cond
, 1)) && REG_POINTER (XEXP (cond
, 1)))))
724 if (GET_CODE (cond
) == EQ
)
725 predict_insn_def (last_insn
, PRED_POINTER
, NOT_TAKEN
);
726 else if (GET_CODE (cond
) == NE
)
727 predict_insn_def (last_insn
, PRED_POINTER
, TAKEN
);
731 /* Try "opcode heuristic."
732 EQ tests are usually false and NE tests are usually true. Also,
733 most quantities are positive, so we can make the appropriate guesses
734 about signed comparisons against zero. */
735 switch (GET_CODE (cond
))
738 /* Unconditional branch. */
739 predict_insn_def (last_insn
, PRED_UNCONDITIONAL
,
740 cond
== const0_rtx
? NOT_TAKEN
: TAKEN
);
745 /* Floating point comparisons appears to behave in a very
746 unpredictable way because of special role of = tests in
748 if (FLOAT_MODE_P (GET_MODE (XEXP (cond
, 0))))
750 /* Comparisons with 0 are often used for booleans and there is
751 nothing useful to predict about them. */
752 else if (XEXP (cond
, 1) == const0_rtx
753 || XEXP (cond
, 0) == const0_rtx
)
756 predict_insn_def (last_insn
, PRED_OPCODE_NONEQUAL
, NOT_TAKEN
);
761 /* Floating point comparisons appears to behave in a very
762 unpredictable way because of special role of = tests in
764 if (FLOAT_MODE_P (GET_MODE (XEXP (cond
, 0))))
766 /* Comparisons with 0 are often used for booleans and there is
767 nothing useful to predict about them. */
768 else if (XEXP (cond
, 1) == const0_rtx
769 || XEXP (cond
, 0) == const0_rtx
)
772 predict_insn_def (last_insn
, PRED_OPCODE_NONEQUAL
, TAKEN
);
776 predict_insn_def (last_insn
, PRED_FPOPCODE
, TAKEN
);
780 predict_insn_def (last_insn
, PRED_FPOPCODE
, NOT_TAKEN
);
785 if (XEXP (cond
, 1) == const0_rtx
|| XEXP (cond
, 1) == const1_rtx
786 || XEXP (cond
, 1) == constm1_rtx
)
787 predict_insn_def (last_insn
, PRED_OPCODE_POSITIVE
, NOT_TAKEN
);
792 if (XEXP (cond
, 1) == const0_rtx
|| XEXP (cond
, 1) == const1_rtx
793 || XEXP (cond
, 1) == constm1_rtx
)
794 predict_insn_def (last_insn
, PRED_OPCODE_POSITIVE
, TAKEN
);
802 /* Statically estimate the probability that a branch will be taken and produce
803 estimated profile. When profile feedback is present never executed portions
804 of function gets estimated. */
807 estimate_probability (struct loops
*loops_info
)
811 connect_infinite_loops_to_exit ();
812 calculate_dominance_info (CDI_DOMINATORS
);
813 calculate_dominance_info (CDI_POST_DOMINATORS
);
815 predict_loops (loops_info
, true);
819 /* Attempt to predict conditional jumps using a number of heuristics. */
822 rtx last_insn
= BB_END (bb
);
826 if (! can_predict_insn_p (last_insn
))
829 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
831 /* Predict early returns to be probable, as we've already taken
832 care for error returns and other are often used for fast paths
834 if ((e
->dest
== EXIT_BLOCK_PTR
835 || (EDGE_COUNT (e
->dest
->succs
) == 1
836 && EDGE_SUCC (e
->dest
, 0)->dest
== EXIT_BLOCK_PTR
))
837 && !predicted_by_p (bb
, PRED_NULL_RETURN
)
838 && !predicted_by_p (bb
, PRED_CONST_RETURN
)
839 && !predicted_by_p (bb
, PRED_NEGATIVE_RETURN
)
840 && !last_basic_block_p (e
->dest
))
841 predict_edge_def (e
, PRED_EARLY_RETURN
, TAKEN
);
843 /* Look for block we are guarding (i.e. we dominate it,
844 but it doesn't postdominate us). */
845 if (e
->dest
!= EXIT_BLOCK_PTR
&& e
->dest
!= bb
846 && dominated_by_p (CDI_DOMINATORS
, e
->dest
, e
->src
)
847 && !dominated_by_p (CDI_POST_DOMINATORS
, e
->src
, e
->dest
))
851 /* The call heuristic claims that a guarded function call
852 is improbable. This is because such calls are often used
853 to signal exceptional situations such as printing error
855 for (insn
= BB_HEAD (e
->dest
); insn
!= NEXT_INSN (BB_END (e
->dest
));
856 insn
= NEXT_INSN (insn
))
858 /* Constant and pure calls are hardly used to signalize
859 something exceptional. */
860 && ! CONST_OR_PURE_CALL_P (insn
))
862 predict_edge_def (e
, PRED_CALL
, NOT_TAKEN
);
867 bb_estimate_probability_locally (bb
);
870 /* Attach the combined probability to each conditional jump. */
872 combine_predictions_for_insn (BB_END (bb
), bb
);
874 remove_fake_edges ();
875 estimate_bb_frequencies (loops_info
);
876 free_dominance_info (CDI_POST_DOMINATORS
);
877 if (profile_status
== PROFILE_ABSENT
)
878 profile_status
= PROFILE_GUESSED
;
881 /* Set edge->probability for each successor edge of BB. */
883 guess_outgoing_edge_probabilities (basic_block bb
)
885 bb_estimate_probability_locally (bb
);
886 combine_predictions_for_insn (BB_END (bb
), bb
);
889 /* Return constant EXPR will likely have at execution time, NULL if unknown.
890 The function is used by builtin_expect branch predictor so the evidence
891 must come from this construct and additional possible constant folding.
893 We may want to implement more involved value guess (such as value range
894 propagation based prediction), but such tricks shall go to new
898 expr_expected_value (tree expr
, bitmap visited
)
900 if (TREE_CONSTANT (expr
))
902 else if (TREE_CODE (expr
) == SSA_NAME
)
904 tree def
= SSA_NAME_DEF_STMT (expr
);
906 /* If we were already here, break the infinite cycle. */
907 if (bitmap_bit_p (visited
, SSA_NAME_VERSION (expr
)))
909 bitmap_set_bit (visited
, SSA_NAME_VERSION (expr
));
911 if (TREE_CODE (def
) == PHI_NODE
)
913 /* All the arguments of the PHI node must have the same constant
916 tree val
= NULL
, new_val
;
918 for (i
= 0; i
< PHI_NUM_ARGS (def
); i
++)
920 tree arg
= PHI_ARG_DEF (def
, i
);
922 /* If this PHI has itself as an argument, we cannot
923 determine the string length of this argument. However,
924 if we can find an expected constant value for the other
925 PHI args then we can still be sure that this is
926 likely a constant. So be optimistic and just
927 continue with the next argument. */
928 if (arg
== PHI_RESULT (def
))
931 new_val
= expr_expected_value (arg
, visited
);
936 else if (!operand_equal_p (val
, new_val
, false))
941 if (TREE_CODE (def
) != MODIFY_EXPR
|| TREE_OPERAND (def
, 0) != expr
)
943 return expr_expected_value (TREE_OPERAND (def
, 1), visited
);
945 else if (TREE_CODE (expr
) == CALL_EXPR
)
947 tree decl
= get_callee_fndecl (expr
);
950 if (DECL_BUILT_IN (decl
) && DECL_FUNCTION_CODE (decl
) == BUILT_IN_EXPECT
)
952 tree arglist
= TREE_OPERAND (expr
, 1);
955 if (arglist
== NULL_TREE
956 || TREE_CHAIN (arglist
) == NULL_TREE
)
958 val
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr
, 1)));
959 if (TREE_CONSTANT (val
))
961 return TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr
, 1)));
964 if (BINARY_CLASS_P (expr
) || COMPARISON_CLASS_P (expr
))
967 op0
= expr_expected_value (TREE_OPERAND (expr
, 0), visited
);
970 op1
= expr_expected_value (TREE_OPERAND (expr
, 1), visited
);
973 res
= fold (build (TREE_CODE (expr
), TREE_TYPE (expr
), op0
, op1
));
974 if (TREE_CONSTANT (res
))
978 if (UNARY_CLASS_P (expr
))
981 op0
= expr_expected_value (TREE_OPERAND (expr
, 0), visited
);
984 res
= fold (build1 (TREE_CODE (expr
), TREE_TYPE (expr
), op0
));
985 if (TREE_CONSTANT (res
))
992 /* Get rid of all builtin_expect calls we no longer need. */
994 strip_builtin_expect (void)
999 block_stmt_iterator bi
;
1000 for (bi
= bsi_start (bb
); !bsi_end_p (bi
); bsi_next (&bi
))
1002 tree stmt
= bsi_stmt (bi
);
1006 if (TREE_CODE (stmt
) == MODIFY_EXPR
1007 && TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
1008 && (fndecl
= get_callee_fndecl (TREE_OPERAND (stmt
, 1)))
1009 && DECL_BUILT_IN (fndecl
)
1010 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
1011 && (arglist
= TREE_OPERAND (TREE_OPERAND (stmt
, 1), 1))
1012 && TREE_CHAIN (arglist
))
1014 TREE_OPERAND (stmt
, 1) = TREE_VALUE (arglist
);
1021 /* Predict using opcode of the last statement in basic block. */
1023 tree_predict_by_opcode (basic_block bb
)
1025 tree stmt
= last_stmt (bb
);
1034 if (!stmt
|| TREE_CODE (stmt
) != COND_EXPR
)
1036 FOR_EACH_EDGE (then_edge
, ei
, bb
->succs
)
1037 if (then_edge
->flags
& EDGE_TRUE_VALUE
)
1039 cond
= TREE_OPERAND (stmt
, 0);
1040 if (!COMPARISON_CLASS_P (cond
))
1042 op0
= TREE_OPERAND (cond
, 0);
1043 type
= TREE_TYPE (op0
);
1044 visited
= BITMAP_XMALLOC ();
1045 val
= expr_expected_value (cond
, visited
);
1046 BITMAP_XFREE (visited
);
1049 if (integer_zerop (val
))
1050 predict_edge_def (then_edge
, PRED_BUILTIN_EXPECT
, NOT_TAKEN
);
1052 predict_edge_def (then_edge
, PRED_BUILTIN_EXPECT
, TAKEN
);
1055 /* Try "pointer heuristic."
1056 A comparison ptr == 0 is predicted as false.
1057 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1058 if (POINTER_TYPE_P (type
))
1060 if (TREE_CODE (cond
) == EQ_EXPR
)
1061 predict_edge_def (then_edge
, PRED_TREE_POINTER
, NOT_TAKEN
);
1062 else if (TREE_CODE (cond
) == NE_EXPR
)
1063 predict_edge_def (then_edge
, PRED_TREE_POINTER
, TAKEN
);
1067 /* Try "opcode heuristic."
1068 EQ tests are usually false and NE tests are usually true. Also,
1069 most quantities are positive, so we can make the appropriate guesses
1070 about signed comparisons against zero. */
1071 switch (TREE_CODE (cond
))
1075 /* Floating point comparisons appears to behave in a very
1076 unpredictable way because of special role of = tests in
1078 if (FLOAT_TYPE_P (type
))
1080 /* Comparisons with 0 are often used for booleans and there is
1081 nothing useful to predict about them. */
1082 else if (integer_zerop (op0
)
1083 || integer_zerop (TREE_OPERAND (cond
, 1)))
1086 predict_edge_def (then_edge
, PRED_TREE_OPCODE_NONEQUAL
, NOT_TAKEN
);
1091 /* Floating point comparisons appears to behave in a very
1092 unpredictable way because of special role of = tests in
1094 if (FLOAT_TYPE_P (type
))
1096 /* Comparisons with 0 are often used for booleans and there is
1097 nothing useful to predict about them. */
1098 else if (integer_zerop (op0
)
1099 || integer_zerop (TREE_OPERAND (cond
, 1)))
1102 predict_edge_def (then_edge
, PRED_TREE_OPCODE_NONEQUAL
, TAKEN
);
1106 predict_edge_def (then_edge
, PRED_TREE_FPOPCODE
, TAKEN
);
1109 case UNORDERED_EXPR
:
1110 predict_edge_def (then_edge
, PRED_TREE_FPOPCODE
, NOT_TAKEN
);
1115 if (integer_zerop (TREE_OPERAND (cond
, 1))
1116 || integer_onep (TREE_OPERAND (cond
, 1))
1117 || integer_all_onesp (TREE_OPERAND (cond
, 1))
1118 || real_zerop (TREE_OPERAND (cond
, 1))
1119 || real_onep (TREE_OPERAND (cond
, 1))
1120 || real_minus_onep (TREE_OPERAND (cond
, 1)))
1121 predict_edge_def (then_edge
, PRED_TREE_OPCODE_POSITIVE
, NOT_TAKEN
);
1126 if (integer_zerop (TREE_OPERAND (cond
, 1))
1127 || integer_onep (TREE_OPERAND (cond
, 1))
1128 || integer_all_onesp (TREE_OPERAND (cond
, 1))
1129 || real_zerop (TREE_OPERAND (cond
, 1))
1130 || real_onep (TREE_OPERAND (cond
, 1))
1131 || real_minus_onep (TREE_OPERAND (cond
, 1)))
1132 predict_edge_def (then_edge
, PRED_TREE_OPCODE_POSITIVE
, TAKEN
);
1140 /* Try to guess whether the value of return means error code. */
1141 static enum br_predictor
1142 return_prediction (tree val
, enum prediction
*prediction
)
1146 return PRED_NO_PREDICTION
;
1147 /* Different heuristics for pointers and scalars. */
1148 if (POINTER_TYPE_P (TREE_TYPE (val
)))
1150 /* NULL is usually not returned. */
1151 if (integer_zerop (val
))
1153 *prediction
= NOT_TAKEN
;
1154 return PRED_NULL_RETURN
;
1157 else if (INTEGRAL_TYPE_P (TREE_TYPE (val
)))
1159 /* Negative return values are often used to indicate
1161 if (TREE_CODE (val
) == INTEGER_CST
1162 && tree_int_cst_sgn (val
) < 0)
1164 *prediction
= NOT_TAKEN
;
1165 return PRED_NEGATIVE_RETURN
;
1167 /* Constant return values seems to be commonly taken.
1168 Zero/one often represent booleans so exclude them from the
1170 if (TREE_CONSTANT (val
)
1171 && (!integer_zerop (val
) && !integer_onep (val
)))
1173 *prediction
= TAKEN
;
1174 return PRED_NEGATIVE_RETURN
;
1177 return PRED_NO_PREDICTION
;
1180 /* Find the basic block with return expression and look up for possible
1181 return value trying to apply RETURN_PREDICTION heuristics. */
1183 apply_return_prediction (int *heads
)
1189 int phi_num_args
, i
;
1190 enum br_predictor pred
;
1191 enum prediction direction
;
1194 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
1196 return_stmt
= last_stmt (e
->src
);
1197 if (TREE_CODE (return_stmt
) == RETURN_EXPR
)
1202 return_val
= TREE_OPERAND (return_stmt
, 0);
1205 if (TREE_CODE (return_val
) == MODIFY_EXPR
)
1206 return_val
= TREE_OPERAND (return_val
, 1);
1207 if (TREE_CODE (return_val
) != SSA_NAME
1208 || !SSA_NAME_DEF_STMT (return_val
)
1209 || TREE_CODE (SSA_NAME_DEF_STMT (return_val
)) != PHI_NODE
)
1211 phi
= SSA_NAME_DEF_STMT (return_val
);
1214 tree next
= PHI_CHAIN (phi
);
1215 if (PHI_RESULT (phi
) == return_val
)
1221 phi_num_args
= PHI_NUM_ARGS (phi
);
1222 pred
= return_prediction (PHI_ARG_DEF (phi
, 0), &direction
);
1224 /* Avoid the degenerate case where all return values form the function
1225 belongs to same category (ie they are all positive constants)
1226 so we can hardly say something about them. */
1227 for (i
= 1; i
< phi_num_args
; i
++)
1228 if (pred
!= return_prediction (PHI_ARG_DEF (phi
, i
), &direction
))
1230 if (i
!= phi_num_args
)
1231 for (i
= 0; i
< phi_num_args
; i
++)
1233 pred
= return_prediction (PHI_ARG_DEF (phi
, i
), &direction
);
1234 if (pred
!= PRED_NO_PREDICTION
)
1235 predict_paths_leading_to (PHI_ARG_EDGE (phi
, i
)->src
, heads
, pred
,
1240 /* Look for basic block that contains unlikely to happen events
1241 (such as noreturn calls) and mark all paths leading to execution
1242 of this basic blocks as unlikely. */
1245 tree_bb_level_predictions (void)
1250 heads
= xmalloc (sizeof (int) * last_basic_block
);
1251 memset (heads
, -1, sizeof (int) * last_basic_block
);
1252 heads
[ENTRY_BLOCK_PTR
->next_bb
->index
] = last_basic_block
;
1254 apply_return_prediction (heads
);
1258 block_stmt_iterator bsi
= bsi_last (bb
);
1260 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
1262 tree stmt
= bsi_stmt (bsi
);
1263 switch (TREE_CODE (stmt
))
1266 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
1268 stmt
= TREE_OPERAND (stmt
, 1);
1274 if (call_expr_flags (stmt
) & ECF_NORETURN
)
1275 predict_paths_leading_to (bb
, heads
, PRED_NORETURN
,
1287 /* Predict branch probabilities and estimate profile of the tree CFG. */
1289 tree_estimate_probability (void)
1292 struct loops loops_info
;
1294 flow_loops_find (&loops_info
, LOOP_TREE
);
1295 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1296 flow_loops_dump (&loops_info
, dump_file
, NULL
, 0);
1298 add_noreturn_fake_exit_edges ();
1299 connect_infinite_loops_to_exit ();
1300 calculate_dominance_info (CDI_DOMINATORS
);
1301 calculate_dominance_info (CDI_POST_DOMINATORS
);
1303 tree_bb_level_predictions ();
1305 mark_irreducible_loops (&loops_info
);
1306 predict_loops (&loops_info
, false);
1313 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1315 /* Predict early returns to be probable, as we've already taken
1316 care for error returns and other cases are often used for
1317 fast paths trought function. */
1318 if (e
->dest
== EXIT_BLOCK_PTR
1319 && TREE_CODE (last_stmt (bb
)) == RETURN_EXPR
1320 && EDGE_COUNT (bb
->preds
) > 1)
1325 FOR_EACH_EDGE (e1
, ei1
, bb
->preds
)
1326 if (!predicted_by_p (e1
->src
, PRED_NULL_RETURN
)
1327 && !predicted_by_p (e1
->src
, PRED_CONST_RETURN
)
1328 && !predicted_by_p (e1
->src
, PRED_NEGATIVE_RETURN
)
1329 && !last_basic_block_p (e1
->src
))
1330 predict_edge_def (e1
, PRED_TREE_EARLY_RETURN
, NOT_TAKEN
);
1333 /* Look for block we are guarding (ie we dominate it,
1334 but it doesn't postdominate us). */
1335 if (e
->dest
!= EXIT_BLOCK_PTR
&& e
->dest
!= bb
1336 && dominated_by_p (CDI_DOMINATORS
, e
->dest
, e
->src
)
1337 && !dominated_by_p (CDI_POST_DOMINATORS
, e
->src
, e
->dest
))
1339 block_stmt_iterator bi
;
1341 /* The call heuristic claims that a guarded function call
1342 is improbable. This is because such calls are often used
1343 to signal exceptional situations such as printing error
1345 for (bi
= bsi_start (e
->dest
); !bsi_end_p (bi
);
1348 tree stmt
= bsi_stmt (bi
);
1349 if ((TREE_CODE (stmt
) == CALL_EXPR
1350 || (TREE_CODE (stmt
) == MODIFY_EXPR
1351 && TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
))
1352 /* Constant and pure calls are hardly used to signalize
1353 something exceptional. */
1354 && TREE_SIDE_EFFECTS (stmt
))
1356 predict_edge_def (e
, PRED_CALL
, NOT_TAKEN
);
1362 tree_predict_by_opcode (bb
);
1365 combine_predictions_for_bb (dump_file
, bb
);
1367 if (0) /* FIXME: Enable once we are pass down the profile to RTL level. */
1368 strip_builtin_expect ();
1369 estimate_bb_frequencies (&loops_info
);
1370 free_dominance_info (CDI_POST_DOMINATORS
);
1371 remove_fake_exit_edges ();
1372 flow_loops_free (&loops_info
);
1373 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1374 dump_tree_cfg (dump_file
, dump_flags
);
1375 if (profile_status
== PROFILE_ABSENT
)
1376 profile_status
= PROFILE_GUESSED
;
1379 /* __builtin_expect dropped tokens into the insn stream describing expected
1380 values of registers. Generate branch probabilities based off these
1384 expected_value_to_br_prob (void)
1386 rtx insn
, cond
, ev
= NULL_RTX
, ev_reg
= NULL_RTX
;
1388 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1390 switch (GET_CODE (insn
))
1393 /* Look for expected value notes. */
1394 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EXPECTED_VALUE
)
1396 ev
= NOTE_EXPECTED_VALUE (insn
);
1397 ev_reg
= XEXP (ev
, 0);
1403 /* Never propagate across labels. */
1408 /* Look for simple conditional branches. If we haven't got an
1409 expected value yet, no point going further. */
1410 if (!JUMP_P (insn
) || ev
== NULL_RTX
1411 || ! any_condjump_p (insn
))
1416 /* Look for insns that clobber the EV register. */
1417 if (ev
&& reg_set_p (ev_reg
, insn
))
1422 /* Collect the branch condition, hopefully relative to EV_REG. */
1423 /* ??? At present we'll miss things like
1424 (expected_value (eq r70 0))
1426 (set r80 (lt r70 r71))
1427 (set pc (if_then_else (ne r80 0) ...))
1428 as canonicalize_condition will render this to us as
1430 Could use cselib to try and reduce this further. */
1431 cond
= XEXP (SET_SRC (pc_set (insn
)), 0);
1432 cond
= canonicalize_condition (insn
, cond
, 0, NULL
, ev_reg
,
1434 if (! cond
|| XEXP (cond
, 0) != ev_reg
1435 || GET_CODE (XEXP (cond
, 1)) != CONST_INT
)
1438 /* Substitute and simplify. Given that the expression we're
1439 building involves two constants, we should wind up with either
1441 cond
= gen_rtx_fmt_ee (GET_CODE (cond
), VOIDmode
,
1442 XEXP (ev
, 1), XEXP (cond
, 1));
1443 cond
= simplify_rtx (cond
);
1445 /* Turn the condition into a scaled branch probability. */
1446 if (cond
!= const_true_rtx
&& cond
!= const0_rtx
)
1448 predict_insn_def (insn
, PRED_BUILTIN_EXPECT
,
1449 cond
== const_true_rtx
? TAKEN
: NOT_TAKEN
);
1453 /* Check whether this is the last basic block of function. Commonly
1454 there is one extra common cleanup block. */
1456 last_basic_block_p (basic_block bb
)
1458 if (bb
== EXIT_BLOCK_PTR
)
1461 return (bb
->next_bb
== EXIT_BLOCK_PTR
1462 || (bb
->next_bb
->next_bb
== EXIT_BLOCK_PTR
1463 && EDGE_COUNT (bb
->succs
) == 1
1464 && EDGE_SUCC (bb
, 0)->dest
->next_bb
== EXIT_BLOCK_PTR
));
1467 /* Sets branch probabilities according to PREDiction and
1468 FLAGS. HEADS[bb->index] should be index of basic block in that we
1469 need to alter branch predictions (i.e. the first of our dominators
1470 such that we do not post-dominate it) (but we fill this information
1471 on demand, so -1 may be there in case this was not needed yet). */
1474 predict_paths_leading_to (basic_block bb
, int *heads
, enum br_predictor pred
,
1475 enum prediction taken
)
1481 if (heads
[bb
->index
] < 0)
1483 /* This is first time we need this field in heads array; so
1484 find first dominator that we do not post-dominate (we are
1485 using already known members of heads array). */
1486 basic_block ai
= bb
;
1487 basic_block next_ai
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
1490 while (heads
[next_ai
->index
] < 0)
1492 if (!dominated_by_p (CDI_POST_DOMINATORS
, next_ai
, bb
))
1494 heads
[next_ai
->index
] = ai
->index
;
1496 next_ai
= get_immediate_dominator (CDI_DOMINATORS
, next_ai
);
1498 if (!dominated_by_p (CDI_POST_DOMINATORS
, next_ai
, bb
))
1499 head
= next_ai
->index
;
1501 head
= heads
[next_ai
->index
];
1502 while (next_ai
!= bb
)
1505 if (heads
[ai
->index
] == ENTRY_BLOCK
)
1506 ai
= ENTRY_BLOCK_PTR
;
1508 ai
= BASIC_BLOCK (heads
[ai
->index
]);
1509 heads
[next_ai
->index
] = head
;
1512 y
= heads
[bb
->index
];
1514 /* Now find the edge that leads to our branch and aply the prediction. */
1516 if (y
== last_basic_block
)
1518 FOR_EACH_EDGE (e
, ei
, BASIC_BLOCK (y
)->succs
)
1519 if (e
->dest
->index
>= 0
1520 && dominated_by_p (CDI_POST_DOMINATORS
, e
->dest
, bb
))
1521 predict_edge_def (e
, pred
, taken
);
1524 /* This is used to carry information about basic blocks. It is
1525 attached to the AUX field of the standard CFG block. */
1527 typedef struct block_info_def
1529 /* Estimated frequency of execution of basic_block. */
1532 /* To keep queue of basic blocks to process. */
1535 /* Number of predecessors we need to visit first. */
1539 /* Similar information for edges. */
1540 typedef struct edge_info_def
1542 /* In case edge is an loopback edge, the probability edge will be reached
1543 in case header is. Estimated number of iterations of the loop can be
1544 then computed as 1 / (1 - back_edge_prob). */
1545 sreal back_edge_prob
;
1546 /* True if the edge is an loopback edge in the natural loop. */
1547 unsigned int back_edge
:1;
1550 #define BLOCK_INFO(B) ((block_info) (B)->aux)
1551 #define EDGE_INFO(E) ((edge_info) (E)->aux)
1553 /* Helper function for estimate_bb_frequencies.
1554 Propagate the frequencies for LOOP. */
1557 propagate_freq (struct loop
*loop
, bitmap tovisit
)
1559 basic_block head
= loop
->header
;
1567 /* For each basic block we need to visit count number of his predecessors
1568 we need to visit first. */
1569 EXECUTE_IF_SET_IN_BITMAP (tovisit
, 0, i
, bi
)
1574 /* The outermost "loop" includes the exit block, which we can not
1575 look up via BASIC_BLOCK. Detect this and use EXIT_BLOCK_PTR
1576 directly. Do the same for the entry block. */
1577 if (i
== (unsigned)ENTRY_BLOCK
)
1578 bb
= ENTRY_BLOCK_PTR
;
1579 else if (i
== (unsigned)EXIT_BLOCK
)
1580 bb
= EXIT_BLOCK_PTR
;
1582 bb
= BASIC_BLOCK (i
);
1584 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1586 bool visit
= bitmap_bit_p (tovisit
, e
->src
->index
);
1588 if (visit
&& !(e
->flags
& EDGE_DFS_BACK
))
1590 else if (visit
&& dump_file
&& !EDGE_INFO (e
)->back_edge
)
1592 "Irreducible region hit, ignoring edge to %i->%i\n",
1593 e
->src
->index
, bb
->index
);
1595 BLOCK_INFO (bb
)->npredecessors
= count
;
1598 memcpy (&BLOCK_INFO (head
)->frequency
, &real_one
, sizeof (real_one
));
1600 for (bb
= head
; bb
; bb
= nextbb
)
1603 sreal cyclic_probability
, frequency
;
1605 memcpy (&cyclic_probability
, &real_zero
, sizeof (real_zero
));
1606 memcpy (&frequency
, &real_zero
, sizeof (real_zero
));
1608 nextbb
= BLOCK_INFO (bb
)->next
;
1609 BLOCK_INFO (bb
)->next
= NULL
;
1611 /* Compute frequency of basic block. */
1614 #ifdef ENABLE_CHECKING
1615 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1616 if (bitmap_bit_p (tovisit
, e
->src
->index
)
1617 && !(e
->flags
& EDGE_DFS_BACK
))
1621 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1622 if (EDGE_INFO (e
)->back_edge
)
1624 sreal_add (&cyclic_probability
, &cyclic_probability
,
1625 &EDGE_INFO (e
)->back_edge_prob
);
1627 else if (!(e
->flags
& EDGE_DFS_BACK
))
1631 /* frequency += (e->probability
1632 * BLOCK_INFO (e->src)->frequency /
1633 REG_BR_PROB_BASE); */
1635 sreal_init (&tmp
, e
->probability
, 0);
1636 sreal_mul (&tmp
, &tmp
, &BLOCK_INFO (e
->src
)->frequency
);
1637 sreal_mul (&tmp
, &tmp
, &real_inv_br_prob_base
);
1638 sreal_add (&frequency
, &frequency
, &tmp
);
1641 if (sreal_compare (&cyclic_probability
, &real_zero
) == 0)
1643 memcpy (&BLOCK_INFO (bb
)->frequency
, &frequency
,
1644 sizeof (frequency
));
1648 if (sreal_compare (&cyclic_probability
, &real_almost_one
) > 0)
1650 memcpy (&cyclic_probability
, &real_almost_one
,
1651 sizeof (real_almost_one
));
1654 /* BLOCK_INFO (bb)->frequency = frequency
1655 / (1 - cyclic_probability) */
1657 sreal_sub (&cyclic_probability
, &real_one
, &cyclic_probability
);
1658 sreal_div (&BLOCK_INFO (bb
)->frequency
,
1659 &frequency
, &cyclic_probability
);
1663 bitmap_clear_bit (tovisit
, bb
->index
);
1665 e
= find_edge (bb
, head
);
1670 /* EDGE_INFO (e)->back_edge_prob
1671 = ((e->probability * BLOCK_INFO (bb)->frequency)
1672 / REG_BR_PROB_BASE); */
1674 sreal_init (&tmp
, e
->probability
, 0);
1675 sreal_mul (&tmp
, &tmp
, &BLOCK_INFO (bb
)->frequency
);
1676 sreal_mul (&EDGE_INFO (e
)->back_edge_prob
,
1677 &tmp
, &real_inv_br_prob_base
);
1680 /* Propagate to successor blocks. */
1681 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1682 if (!(e
->flags
& EDGE_DFS_BACK
)
1683 && BLOCK_INFO (e
->dest
)->npredecessors
)
1685 BLOCK_INFO (e
->dest
)->npredecessors
--;
1686 if (!BLOCK_INFO (e
->dest
)->npredecessors
)
1691 BLOCK_INFO (last
)->next
= e
->dest
;
1699 /* Estimate probabilities of loopback edges in loops at same nest level. */
1702 estimate_loops_at_level (struct loop
*first_loop
, bitmap tovisit
)
1706 for (loop
= first_loop
; loop
; loop
= loop
->next
)
1712 estimate_loops_at_level (loop
->inner
, tovisit
);
1714 /* Do not do this for dummy function loop. */
1715 if (EDGE_COUNT (loop
->latch
->succs
) > 0)
1717 /* Find current loop back edge and mark it. */
1718 e
= loop_latch_edge (loop
);
1719 EDGE_INFO (e
)->back_edge
= 1;
1722 bbs
= get_loop_body (loop
);
1723 for (i
= 0; i
< loop
->num_nodes
; i
++)
1724 bitmap_set_bit (tovisit
, bbs
[i
]->index
);
1726 propagate_freq (loop
, tovisit
);
1730 /* Convert counts measured by profile driven feedback to frequencies.
1731 Return nonzero iff there was any nonzero execution count. */
1734 counts_to_freqs (void)
1736 gcov_type count_max
, true_count_max
= 0;
1740 true_count_max
= MAX (bb
->count
, true_count_max
);
1742 count_max
= MAX (true_count_max
, 1);
1743 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1744 bb
->frequency
= (bb
->count
* BB_FREQ_MAX
+ count_max
/ 2) / count_max
;
1745 return true_count_max
;
1748 /* Return true if function is likely to be expensive, so there is no point to
1749 optimize performance of prologue, epilogue or do inlining at the expense
1750 of code size growth. THRESHOLD is the limit of number of instructions
1751 function can execute at average to be still considered not expensive. */
1754 expensive_function_p (int threshold
)
1756 unsigned int sum
= 0;
1760 /* We can not compute accurately for large thresholds due to scaled
1762 if (threshold
> BB_FREQ_MAX
)
1765 /* Frequencies are out of range. This either means that function contains
1766 internal loop executing more than BB_FREQ_MAX times or profile feedback
1767 is available and function has not been executed at all. */
1768 if (ENTRY_BLOCK_PTR
->frequency
== 0)
1771 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
1772 limit
= ENTRY_BLOCK_PTR
->frequency
* threshold
;
1777 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
1778 insn
= NEXT_INSN (insn
))
1779 if (active_insn_p (insn
))
1781 sum
+= bb
->frequency
;
1790 /* Estimate basic blocks frequency by given branch probabilities. */
1793 estimate_bb_frequencies (struct loops
*loops
)
1798 if (!flag_branch_probabilities
|| !counts_to_freqs ())
1800 static int real_values_initialized
= 0;
1803 if (!real_values_initialized
)
1805 real_values_initialized
= 1;
1806 sreal_init (&real_zero
, 0, 0);
1807 sreal_init (&real_one
, 1, 0);
1808 sreal_init (&real_br_prob_base
, REG_BR_PROB_BASE
, 0);
1809 sreal_init (&real_bb_freq_max
, BB_FREQ_MAX
, 0);
1810 sreal_init (&real_one_half
, 1, -1);
1811 sreal_div (&real_inv_br_prob_base
, &real_one
, &real_br_prob_base
);
1812 sreal_sub (&real_almost_one
, &real_one
, &real_inv_br_prob_base
);
1815 mark_dfs_back_edges ();
1817 EDGE_SUCC (ENTRY_BLOCK_PTR
, 0)->probability
= REG_BR_PROB_BASE
;
1819 /* Set up block info for each basic block. */
1820 tovisit
= BITMAP_XMALLOC ();
1821 alloc_aux_for_blocks (sizeof (struct block_info_def
));
1822 alloc_aux_for_edges (sizeof (struct edge_info_def
));
1823 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1828 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1830 sreal_init (&EDGE_INFO (e
)->back_edge_prob
, e
->probability
, 0);
1831 sreal_mul (&EDGE_INFO (e
)->back_edge_prob
,
1832 &EDGE_INFO (e
)->back_edge_prob
,
1833 &real_inv_br_prob_base
);
1837 /* First compute probabilities locally for each loop from innermost
1838 to outermost to examine probabilities for back edges. */
1839 estimate_loops_at_level (loops
->tree_root
, tovisit
);
1841 memcpy (&freq_max
, &real_zero
, sizeof (real_zero
));
1843 if (sreal_compare (&freq_max
, &BLOCK_INFO (bb
)->frequency
) < 0)
1844 memcpy (&freq_max
, &BLOCK_INFO (bb
)->frequency
, sizeof (freq_max
));
1846 sreal_div (&freq_max
, &real_bb_freq_max
, &freq_max
);
1847 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1851 sreal_mul (&tmp
, &BLOCK_INFO (bb
)->frequency
, &freq_max
);
1852 sreal_add (&tmp
, &tmp
, &real_one_half
);
1853 bb
->frequency
= sreal_to_int (&tmp
);
1856 free_aux_for_blocks ();
1857 free_aux_for_edges ();
1858 BITMAP_XFREE (tovisit
);
1860 compute_function_frequency ();
1861 if (flag_reorder_functions
)
1862 choose_function_section ();
1865 /* Decide whether function is hot, cold or unlikely executed. */
1867 compute_function_frequency (void)
1871 if (!profile_info
|| !flag_branch_probabilities
)
1873 cfun
->function_frequency
= FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
;
1876 if (maybe_hot_bb_p (bb
))
1878 cfun
->function_frequency
= FUNCTION_FREQUENCY_HOT
;
1881 if (!probably_never_executed_bb_p (bb
))
1882 cfun
->function_frequency
= FUNCTION_FREQUENCY_NORMAL
;
1886 /* Choose appropriate section for the function. */
1888 choose_function_section (void)
1890 if (DECL_SECTION_NAME (current_function_decl
)
1891 || !targetm
.have_named_sections
1892 /* Theoretically we can split the gnu.linkonce text section too,
1893 but this requires more work as the frequency needs to match
1894 for all generated objects so we need to merge the frequency
1895 of all instances. For now just never set frequency for these. */
1896 || DECL_ONE_ONLY (current_function_decl
))
1899 /* If we are doing the partitioning optimization, let the optimization
1900 choose the correct section into which to put things. */
1902 if (flag_reorder_blocks_and_partition
)
1905 if (cfun
->function_frequency
== FUNCTION_FREQUENCY_HOT
)
1906 DECL_SECTION_NAME (current_function_decl
) =
1907 build_string (strlen (HOT_TEXT_SECTION_NAME
), HOT_TEXT_SECTION_NAME
);
1908 if (cfun
->function_frequency
== FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
)
1909 DECL_SECTION_NAME (current_function_decl
) =
1910 build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME
),
1911 UNLIKELY_EXECUTED_TEXT_SECTION_NAME
);
1915 struct tree_opt_pass pass_profile
=
1917 "profile", /* name */
1919 tree_estimate_probability
, /* execute */
1922 0, /* static_pass_number */
1923 TV_BRANCH_PROB
, /* tv_id */
1924 PROP_cfg
, /* properties_required */
1925 0, /* properties_provided */
1926 0, /* properties_destroyed */
1927 0, /* todo_flags_start */
1928 TODO_ggc_collect
| TODO_verify_ssa
, /* todo_flags_finish */