* emit-rtl.c (adjust_address_1): Always copy address to avoid
[official-gcc.git] / gcc / predict.c
blob516cfbcc1c57e3eab26d4b3cb82f604e0e787742
1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 /* References:
23 [1] "Branch Prediction for Free"
24 Ball and Larus; PLDI '93.
25 [2] "Static Branch Frequency and Program Profile Analysis"
26 Wu and Larus; MICRO-27.
27 [3] "Corpus-based Static Branch Prediction"
28 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95.
33 #include "config.h"
34 #include "system.h"
35 #include "tree.h"
36 #include "rtl.h"
37 #include "tm_p.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "insn-config.h"
41 #include "regs.h"
42 #include "flags.h"
43 #include "output.h"
44 #include "function.h"
45 #include "except.h"
46 #include "toplev.h"
47 #include "recog.h"
48 #include "expr.h"
49 #include "predict.h"
51 /* Random guesstimation given names. */
52 #define PROB_NEVER (0)
53 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 10 - 1)
54 #define PROB_UNLIKELY (REG_BR_PROB_BASE * 4 / 10 - 1)
55 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
56 #define PROB_LIKELY (REG_BR_PROB_BASE - PROB_UNLIKELY)
57 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
58 #define PROB_ALWAYS (REG_BR_PROB_BASE)
60 static void combine_predictions_for_insn PARAMS ((rtx, basic_block));
61 static void dump_prediction PARAMS ((enum br_predictor, int,
62 basic_block, int));
63 static void estimate_loops_at_level PARAMS ((struct loop *loop));
64 static void propagate_freq PARAMS ((basic_block));
65 static void estimate_bb_frequencies PARAMS ((struct loops *));
66 static void counts_to_freqs PARAMS ((void));
68 /* Information we hold about each branch predictor.
69 Filled using information from predict.def. */
70 struct predictor_info
72 const char *const name; /* Name used in the debugging dumps. */
73 const int hitrate; /* Expected hitrate used by
74 predict_insn_def call. */
75 const int flags;
78 /* Use given predictor without Dempster-Shaffer theory if it matches
79 using first_match heuristics. */
80 #define PRED_FLAG_FIRST_MATCH 1
82 /* Recompute hitrate in percent to our representation. */
84 #define HITRATE(VAL) ((int)((VAL) * REG_BR_PROB_BASE + 50) / 100)
86 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
87 static const struct predictor_info predictor_info[] = {
88 #include "predict.def"
90 /* Upper bound on predictors. */
91 {NULL, 0, 0}
93 #undef DEF_PREDICTOR
95 void
96 predict_insn (insn, predictor, probability)
97 rtx insn;
98 int probability;
99 enum br_predictor predictor;
101 if (!any_condjump_p (insn))
102 abort ();
103 REG_NOTES (insn)
104 = gen_rtx_EXPR_LIST (REG_BR_PRED,
105 gen_rtx_CONCAT (VOIDmode,
106 GEN_INT ((int) predictor),
107 GEN_INT ((int) probability)),
108 REG_NOTES (insn));
111 /* Predict insn by given predictor. */
112 void
113 predict_insn_def (insn, predictor, taken)
114 rtx insn;
115 enum br_predictor predictor;
116 enum prediction taken;
118 int probability = predictor_info[(int) predictor].hitrate;
119 if (taken != TAKEN)
120 probability = REG_BR_PROB_BASE - probability;
121 predict_insn (insn, predictor, probability);
124 /* Predict edge E with given probability if possible. */
125 void
126 predict_edge (e, predictor, probability)
127 edge e;
128 int probability;
129 enum br_predictor predictor;
131 rtx last_insn;
132 last_insn = e->src->end;
134 /* We can store the branch prediction information only about
135 conditional jumps. */
136 if (!any_condjump_p (last_insn))
137 return;
139 /* We always store probability of branching. */
140 if (e->flags & EDGE_FALLTHRU)
141 probability = REG_BR_PROB_BASE - probability;
143 predict_insn (last_insn, predictor, probability);
146 /* Predict edge E by given predictor if possible. */
147 void
148 predict_edge_def (e, predictor, taken)
149 edge e;
150 enum br_predictor predictor;
151 enum prediction taken;
153 int probability = predictor_info[(int) predictor].hitrate;
155 if (taken != TAKEN)
156 probability = REG_BR_PROB_BASE - probability;
157 predict_edge (e, predictor, probability);
160 /* Invert all branch predictions or probability notes in the INSN. This needs
161 to be done each time we invert the condition used by the jump. */
162 void
163 invert_br_probabilities (insn)
164 rtx insn;
166 rtx note = REG_NOTES (insn);
168 while (note)
170 if (REG_NOTE_KIND (note) == REG_BR_PROB)
171 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
172 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
173 XEXP (XEXP (note, 0), 1)
174 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
175 note = XEXP (note, 1);
179 /* Dump information about the branch prediction to the output file. */
180 static void
181 dump_prediction (predictor, probability, bb, used)
182 enum br_predictor predictor;
183 int probability;
184 basic_block bb;
185 int used;
187 edge e = bb->succ;
189 if (!rtl_dump_file)
190 return;
192 while (e->flags & EDGE_FALLTHRU)
193 e = e->succ_next;
195 fprintf (rtl_dump_file, " %s heuristics%s: %.1f%%",
196 predictor_info[predictor].name,
197 used ? "" : " (ignored)",
198 probability * 100.0 / REG_BR_PROB_BASE);
200 if (bb->count)
202 fprintf (rtl_dump_file, " exec ");
203 fprintf (rtl_dump_file, HOST_WIDEST_INT_PRINT_DEC,
204 (HOST_WIDEST_INT) bb->count);
205 fprintf (rtl_dump_file, " hit ");
206 fprintf (rtl_dump_file, HOST_WIDEST_INT_PRINT_DEC,
207 (HOST_WIDEST_INT) e->count);
208 fprintf (rtl_dump_file, " (%.1f%%)",
209 e->count * 100.0 / bb->count);
211 fprintf (rtl_dump_file, "\n");
214 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
215 note if not already present. Remove now useless REG_BR_PRED notes. */
216 static void
217 combine_predictions_for_insn (insn, bb)
218 rtx insn;
219 basic_block bb;
221 rtx prob_note = find_reg_note (insn, REG_BR_PROB, 0);
222 rtx *pnote = &REG_NOTES (insn);
223 rtx note = REG_NOTES (insn);
224 int best_probability = PROB_EVEN;
225 int best_predictor = END_PREDICTORS;
226 int combined_probability = REG_BR_PROB_BASE / 2;
227 int d;
228 bool first_match = false;
229 bool found = false;
231 if (rtl_dump_file)
232 fprintf (rtl_dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
233 bb->index);
235 /* We implement "first match" heuristics and use probability guessed
236 by predictor with smallest index. In the future we will use better
237 probability combination techniques. */
238 while (note)
240 if (REG_NOTE_KIND (note) == REG_BR_PRED)
242 int predictor = INTVAL (XEXP (XEXP (note, 0), 0));
243 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
245 found = true;
246 if (best_predictor > predictor)
247 best_probability = probability, best_predictor = predictor;
249 d = (combined_probability * probability
250 + (REG_BR_PROB_BASE - combined_probability)
251 * (REG_BR_PROB_BASE - probability));
252 /* An FP math to avoid overflows of 32bit integers. */
253 combined_probability = (((double)combined_probability) * probability
254 * REG_BR_PROB_BASE / d + 0.5);
256 note = XEXP (note, 1);
259 /* Decide heuristic to use. In case we didn't match anything, use
260 no_prediction heuristic, in case we did match, use either
261 first match or Dempster-Shaffer theory depending on the flags. */
263 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
264 first_match = true;
266 if (!found)
267 dump_prediction (PRED_NO_PREDICTION, combined_probability, bb, true);
268 else
270 dump_prediction (PRED_DS_THEORY, combined_probability, bb,
271 !first_match);
272 dump_prediction (PRED_FIRST_MATCH, best_probability, bb, first_match);
275 if (first_match)
276 combined_probability = best_probability;
277 dump_prediction (PRED_COMBINED, combined_probability, bb, true);
279 while (*pnote)
281 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
283 int predictor = INTVAL (XEXP (XEXP (*pnote, 0), 0));
284 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
286 dump_prediction (predictor, probability, bb,
287 !first_match || best_predictor == predictor);
288 *pnote = XEXP (*pnote, 1);
290 else
291 pnote = &XEXP (*pnote, 1);
293 if (!prob_note)
295 REG_NOTES (insn)
296 = gen_rtx_EXPR_LIST (REG_BR_PROB,
297 GEN_INT (combined_probability), REG_NOTES (insn));
298 /* Save the prediction into CFG in case we are seeing non-degenerated
299 conditional jump. */
300 if (bb->succ->succ_next)
302 BRANCH_EDGE (bb)->probability = combined_probability;
303 FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - combined_probability;
308 /* Statically estimate the probability that a branch will be taken.
309 ??? In the next revision there will be a number of other predictors added
310 from the above references. Further, each heuristic will be factored out
311 into its own function for clarity (and to facilitate the combination of
312 predictions). */
314 void
315 estimate_probability (loops_info)
316 struct loops *loops_info;
318 sbitmap *dominators, *post_dominators;
319 int i;
320 int found_noreturn = 0;
322 dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
323 post_dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
324 calculate_dominance_info (NULL, dominators, CDI_DOMINATORS);
325 calculate_dominance_info (NULL, post_dominators, CDI_POST_DOMINATORS);
327 /* Try to predict out blocks in a loop that are not part of a
328 natural loop. */
329 for (i = 0; i < loops_info->num; i++)
331 int j;
333 for (j = loops_info->array[i].first->index;
334 j <= loops_info->array[i].last->index;
335 ++j)
337 if (TEST_BIT (loops_info->array[i].nodes, j))
339 int header_found = 0;
340 edge e;
342 /* Loop branch heuristics - predict as taken an edge back to
343 a loop's head. */
344 for (e = BASIC_BLOCK(j)->succ; e; e = e->succ_next)
345 if (e->dest == loops_info->array[i].header
346 && e->src == loops_info->array[i].latch)
348 header_found = 1;
349 predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
351 /* Loop exit heuristics - predict as not taken an edge
352 exiting the loop if the conditinal has no loop header
353 successors. */
354 if (!header_found)
355 for (e = BASIC_BLOCK(j)->succ; e; e = e->succ_next)
356 if (e->dest->index <= 0
357 || !TEST_BIT (loops_info->array[i].nodes, e->dest->index))
358 predict_edge_def (e, PRED_LOOP_EXIT, NOT_TAKEN);
363 /* Attempt to predict conditional jumps using a number of heuristics. */
364 for (i = 0; i < n_basic_blocks; i++)
366 basic_block bb = BASIC_BLOCK (i);
367 rtx last_insn = bb->end;
368 rtx cond, earliest;
369 edge e;
371 /* If block has no successor, predict all possible paths to
372 it as improbable, as the block contains a call to a noreturn
373 function and thus can be executed only once. */
374 if (bb->succ == NULL && !found_noreturn)
376 int y;
378 /* ??? Postdominator claims each noreturn block to be postdominated
379 by each, so we need to run only once. This needs to be changed
380 once postdominace algorithm is updated to say something more sane.
382 found_noreturn = 1;
383 for (y = 0; y < n_basic_blocks; y++)
384 if (!TEST_BIT (post_dominators[y], i))
386 for (e = BASIC_BLOCK (y)->succ; e; e = e->succ_next)
387 if (e->dest->index >= 0
388 && TEST_BIT (post_dominators[e->dest->index], i))
389 predict_edge_def (e, PRED_NORETURN, NOT_TAKEN);
393 if (GET_CODE (last_insn) != JUMP_INSN
394 || ! any_condjump_p (last_insn))
395 continue;
397 for (e = bb->succ; e; e = e->succ_next)
399 /* Predict edges to blocks that return immediately to be
400 improbable. These are usually used to signal error states. */
401 if (e->dest == EXIT_BLOCK_PTR
402 || (e->dest->succ && !e->dest->succ->succ_next
403 && e->dest->succ->dest == EXIT_BLOCK_PTR))
404 predict_edge_def (e, PRED_ERROR_RETURN, NOT_TAKEN);
406 /* Look for block we are guarding (ie we dominate it,
407 but it doesn't postdominate us). */
408 if (e->dest != EXIT_BLOCK_PTR
409 && e->dest != bb
410 && TEST_BIT (dominators[e->dest->index], e->src->index)
411 && !TEST_BIT (post_dominators[e->src->index], e->dest->index))
413 rtx insn;
414 /* The call heuristic claims that a guarded function call
415 is improbable. This is because such calls are often used
416 to signal exceptional situations such as printing error
417 messages. */
418 for (insn = e->dest->head; insn != NEXT_INSN (e->dest->end);
419 insn = NEXT_INSN (insn))
420 if (GET_CODE (insn) == CALL_INSN
421 /* Constant and pure calls are hardly used to signalize
422 something exceptional. */
423 && ! CONST_OR_PURE_CALL_P (insn))
425 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
426 break;
431 cond = get_condition (last_insn, &earliest);
432 if (! cond)
433 continue;
435 /* Try "pointer heuristic."
436 A comparison ptr == 0 is predicted as false.
437 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
438 switch (GET_CODE (cond))
440 case EQ:
441 if (GET_CODE (XEXP (cond, 0)) == REG
442 && REG_POINTER (XEXP (cond, 0))
443 && (XEXP (cond, 1) == const0_rtx
444 || (GET_CODE (XEXP (cond, 1)) == REG
445 && REG_POINTER (XEXP (cond, 1)))))
447 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
448 break;
449 case NE:
450 if (GET_CODE (XEXP (cond, 0)) == REG
451 && REG_POINTER (XEXP (cond, 0))
452 && (XEXP (cond, 1) == const0_rtx
453 || (GET_CODE (XEXP (cond, 1)) == REG
454 && REG_POINTER (XEXP (cond, 1)))))
455 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
456 break;
458 default:
459 break;
462 /* Try "opcode heuristic."
463 EQ tests are usually false and NE tests are usually true. Also,
464 most quantities are positive, so we can make the appropriate guesses
465 about signed comparisons against zero. */
466 switch (GET_CODE (cond))
468 case CONST_INT:
469 /* Unconditional branch. */
470 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
471 cond == const0_rtx ? NOT_TAKEN : TAKEN);
472 break;
474 case EQ:
475 case UNEQ:
476 predict_insn_def (last_insn, PRED_OPCODE, NOT_TAKEN);
477 break;
478 case NE:
479 case LTGT:
480 predict_insn_def (last_insn, PRED_OPCODE, TAKEN);
481 break;
482 case ORDERED:
483 predict_insn_def (last_insn, PRED_OPCODE, TAKEN);
484 break;
485 case UNORDERED:
486 predict_insn_def (last_insn, PRED_OPCODE, NOT_TAKEN);
487 break;
488 case LE:
489 case LT:
490 if (XEXP (cond, 1) == const0_rtx
491 || (GET_CODE (XEXP (cond, 1)) == CONST_INT
492 && INTVAL (XEXP (cond, 1)) == -1))
493 predict_insn_def (last_insn, PRED_OPCODE, NOT_TAKEN);
494 break;
495 case GE:
496 case GT:
497 if (XEXP (cond, 1) == const0_rtx
498 || (GET_CODE (XEXP (cond, 1)) == CONST_INT
499 && INTVAL (XEXP (cond, 1)) == -1))
500 predict_insn_def (last_insn, PRED_OPCODE, TAKEN);
501 break;
503 default:
504 break;
508 /* Attach the combined probability to each conditional jump. */
509 for (i = 0; i < n_basic_blocks; i++)
511 rtx last_insn = BLOCK_END (i);
513 if (GET_CODE (last_insn) != JUMP_INSN
514 || ! any_condjump_p (last_insn))
515 continue;
516 combine_predictions_for_insn (last_insn, BASIC_BLOCK (i));
518 sbitmap_vector_free (post_dominators);
519 sbitmap_vector_free (dominators);
521 estimate_bb_frequencies (loops_info);
524 /* __builtin_expect dropped tokens into the insn stream describing
525 expected values of registers. Generate branch probabilities
526 based off these values. */
528 void
529 expected_value_to_br_prob ()
531 rtx insn, cond, ev = NULL_RTX, ev_reg = NULL_RTX;
533 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
535 switch (GET_CODE (insn))
537 case NOTE:
538 /* Look for expected value notes. */
539 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EXPECTED_VALUE)
541 ev = NOTE_EXPECTED_VALUE (insn);
542 ev_reg = XEXP (ev, 0);
543 delete_insn (insn);
545 continue;
547 case CODE_LABEL:
548 /* Never propagate across labels. */
549 ev = NULL_RTX;
550 continue;
552 default:
553 /* Look for insns that clobber the EV register. */
554 if (ev && reg_set_p (ev_reg, insn))
555 ev = NULL_RTX;
556 continue;
558 case JUMP_INSN:
559 /* Look for simple conditional branches. If we haven't got an
560 expected value yet, no point going further. */
561 if (GET_CODE (insn) != JUMP_INSN || ev == NULL_RTX)
562 continue;
563 if (! any_condjump_p (insn))
564 continue;
565 break;
568 /* Collect the branch condition, hopefully relative to EV_REG. */
569 /* ??? At present we'll miss things like
570 (expected_value (eq r70 0))
571 (set r71 -1)
572 (set r80 (lt r70 r71))
573 (set pc (if_then_else (ne r80 0) ...))
574 as canonicalize_condition will render this to us as
575 (lt r70, r71)
576 Could use cselib to try and reduce this further. */
577 cond = XEXP (SET_SRC (pc_set (insn)), 0);
578 cond = canonicalize_condition (insn, cond, 0, NULL, ev_reg);
579 if (! cond
580 || XEXP (cond, 0) != ev_reg
581 || GET_CODE (XEXP (cond, 1)) != CONST_INT)
582 continue;
584 /* Substitute and simplify. Given that the expression we're
585 building involves two constants, we should wind up with either
586 true or false. */
587 cond = gen_rtx_fmt_ee (GET_CODE (cond), VOIDmode,
588 XEXP (ev, 1), XEXP (cond, 1));
589 cond = simplify_rtx (cond);
591 /* Turn the condition into a scaled branch probability. */
592 if (cond != const_true_rtx && cond != const0_rtx)
593 abort ();
594 predict_insn_def (insn, PRED_BUILTIN_EXPECT,
595 cond == const_true_rtx ? TAKEN : NOT_TAKEN);
599 /* This is used to carry information about basic blocks. It is
600 attached to the AUX field of the standard CFG block. */
602 typedef struct block_info_def
604 /* Estimated frequency of execution of basic_block. */
605 volatile double frequency;
607 /* To keep queue of basic blocks to process. */
608 basic_block next;
610 /* True if block needs to be visited in prop_freqency. */
611 int tovisit:1;
613 /* Number of predecessors we need to visit first. */
614 int npredecessors;
615 } *block_info;
617 /* Similar information for edges. */
618 typedef struct edge_info_def
620 /* In case edge is an loopback edge, the probability edge will be reached
621 in case header is. Estimated number of iterations of the loop can be
622 then computed as 1 / (1 - back_edge_prob).
624 Volatile is needed to avoid differences in the optimized and unoptimized
625 builds on machines where FP registers are wider than double. */
626 volatile double back_edge_prob;
627 /* True if the edge is an loopback edge in the natural loop. */
628 int back_edge:1;
629 } *edge_info;
631 #define BLOCK_INFO(B) ((block_info) (B)->aux)
632 #define EDGE_INFO(E) ((edge_info) (E)->aux)
634 /* Helper function for estimate_bb_frequencies.
635 Propagate the frequencies for loops headed by HEAD. */
636 static void
637 propagate_freq (head)
638 basic_block head;
640 basic_block bb = head;
641 basic_block last = bb;
642 edge e;
643 basic_block nextbb;
644 int n;
646 /* For each basic block we need to visit count number of his predecessors
647 we need to visit first. */
648 for (n = 0; n < n_basic_blocks; n++)
650 basic_block bb = BASIC_BLOCK (n);
651 if (BLOCK_INFO (bb)->tovisit)
653 int count = 0;
654 for (e = bb->pred; e; e = e->pred_next)
655 if (BLOCK_INFO (e->src)->tovisit && !(e->flags & EDGE_DFS_BACK))
656 count++;
657 else if (BLOCK_INFO (e->src)->tovisit
658 && rtl_dump_file && !EDGE_INFO (e)->back_edge)
659 fprintf (rtl_dump_file,
660 "Irreducible region hit, ignoring edge to %i->%i\n",
661 e->src->index, bb->index);
662 BLOCK_INFO (bb)->npredecessors = count;
666 BLOCK_INFO (head)->frequency = 1;
667 for (; bb; bb = nextbb)
669 volatile double cyclic_probability = 0, frequency = 0;
671 nextbb = BLOCK_INFO (bb)->next;
672 BLOCK_INFO (bb)->next = NULL;
674 /* Compute frequency of basic block. */
675 if (bb != head)
677 #ifdef ENABLE_CHECKING
678 for (e = bb->pred; e; e = e->pred_next)
679 if (BLOCK_INFO (e->src)->tovisit && !(e->flags & EDGE_DFS_BACK))
680 abort ();
681 #endif
683 for (e = bb->pred; e; e = e->pred_next)
684 if (EDGE_INFO (e)->back_edge)
685 cyclic_probability += EDGE_INFO (e)->back_edge_prob;
686 else if (!(e->flags & EDGE_DFS_BACK))
687 frequency += (e->probability
688 * BLOCK_INFO (e->src)->frequency /
689 REG_BR_PROB_BASE);
691 if (cyclic_probability > 1.0 - 1.0 / REG_BR_PROB_BASE)
692 cyclic_probability = 1.0 - 1.0 / REG_BR_PROB_BASE;
694 BLOCK_INFO (bb)->frequency = frequency / (1 - cyclic_probability);
697 BLOCK_INFO (bb)->tovisit = 0;
699 /* Compute back edge frequencies. */
700 for (e = bb->succ; e; e = e->succ_next)
701 if (e->dest == head)
702 EDGE_INFO (e)->back_edge_prob = (e->probability
703 * BLOCK_INFO (bb)->frequency
704 / REG_BR_PROB_BASE);
706 /* Propagate to successor blocks. */
707 for (e = bb->succ; e; e = e->succ_next)
708 if (!(e->flags & EDGE_DFS_BACK)
709 && BLOCK_INFO (e->dest)->npredecessors)
711 BLOCK_INFO (e->dest)->npredecessors--;
712 if (!BLOCK_INFO (e->dest)->npredecessors)
714 if (!nextbb)
715 nextbb = e->dest;
716 else
717 BLOCK_INFO (last)->next = e->dest;
718 last = e->dest;
724 /* Estimate probabilities of loopback edges in loops at same nest level. */
725 static void
726 estimate_loops_at_level (first_loop)
727 struct loop *first_loop;
729 struct loop *l, *loop = first_loop;
731 for (loop = first_loop; loop; loop = loop->next)
733 int n;
734 edge e;
736 estimate_loops_at_level (loop->inner);
738 /* Find current loop back edge and mark it. */
739 for (e = loop->latch->succ; e->dest != loop->header; e = e->succ_next);
741 EDGE_INFO (e)->back_edge = 1;
743 /* In case the loop header is shared, ensure that it is the last
744 one sharing the same header, so we avoid redundant work. */
745 if (loop->shared)
747 for (l = loop->next; l; l = l->next)
748 if (l->header == loop->header)
749 break;
750 if (l)
751 continue;
754 /* Now merge all nodes of all loops with given header as not visited. */
755 for (l = loop->shared ? first_loop : loop; l != loop->next; l = l->next)
756 if (loop->header == l->header)
757 EXECUTE_IF_SET_IN_SBITMAP (l->nodes, 0, n,
758 BLOCK_INFO (BASIC_BLOCK (n))->tovisit = 1
760 propagate_freq (loop->header);
764 /* Convert counts measured by profile driven feedback to frequencies. */
765 static void
766 counts_to_freqs ()
768 HOST_WIDEST_INT count_max = 1;
769 int i;
771 for (i = 0; i < n_basic_blocks; i++)
772 if (BASIC_BLOCK (i)->count > count_max)
773 count_max = BASIC_BLOCK (i)->count;
775 for (i = -2; i < n_basic_blocks; i++)
777 basic_block bb;
778 if (i == -2)
779 bb = ENTRY_BLOCK_PTR;
780 else if (i == -1)
781 bb = EXIT_BLOCK_PTR;
782 else
783 bb = BASIC_BLOCK (i);
784 bb->frequency = ((bb->count * BB_FREQ_MAX + count_max / 2)
785 / count_max);
789 /* Return true if function is likely to be expensive, so there is no point
790 to optimizer performance of prologue, epilogue or do inlining at the
791 expense of code size growth. THRESHOLD is the limit of number
792 of isntructions function can execute at average to be still considered
793 not expensive. */
794 bool
795 expensive_function_p (threshold)
796 int threshold;
798 unsigned int sum = 0;
799 int i;
800 unsigned int limit;
802 /* We can not compute accurately for large thresholds due to scaled
803 frequencies. */
804 if (threshold > BB_FREQ_MAX)
805 abort ();
807 /* Frequencies are out of range. This either means that function contains
808 internal loop executing more than BB_FREQ_MAX times or profile feedback
809 is available and function has not been executed at all. */
810 if (ENTRY_BLOCK_PTR->frequency == 0)
811 return true;
813 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
814 limit = ENTRY_BLOCK_PTR->frequency * threshold;
815 for (i = 0; i < n_basic_blocks; i++)
817 basic_block bb = BASIC_BLOCK (i);
818 rtx insn;
820 for (insn = bb->head; insn != NEXT_INSN (bb->end);
821 insn = NEXT_INSN (insn))
823 if (active_insn_p (insn))
825 sum += bb->frequency;
826 if (sum > limit)
827 return true;
831 return false;
834 /* Estimate basic blocks frequency by given branch probabilities. */
835 static void
836 estimate_bb_frequencies (loops)
837 struct loops *loops;
839 int i;
840 double freq_max = 0;
842 mark_dfs_back_edges ();
843 if (flag_branch_probabilities)
845 counts_to_freqs ();
846 return;
849 /* Fill in the probability values in flowgraph based on the REG_BR_PROB
850 notes. */
851 for (i = 0; i < n_basic_blocks; i++)
853 rtx last_insn = BLOCK_END (i);
854 int probability;
855 edge fallthru, branch;
857 if (GET_CODE (last_insn) != JUMP_INSN || !any_condjump_p (last_insn)
858 /* Avoid handling of conditional jumps jumping to fallthru edge. */
859 || BASIC_BLOCK (i)->succ->succ_next == NULL)
861 /* We can predict only conditional jumps at the moment.
862 Expect each edge to be equally probable.
863 ?? In the future we want to make abnormal edges improbable. */
864 int nedges = 0;
865 edge e;
867 for (e = BASIC_BLOCK (i)->succ; e; e = e->succ_next)
869 nedges++;
870 if (e->probability != 0)
871 break;
873 if (!e)
874 for (e = BASIC_BLOCK (i)->succ; e; e = e->succ_next)
875 e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
877 else
879 probability = INTVAL (XEXP (find_reg_note (last_insn,
880 REG_BR_PROB, 0), 0));
881 fallthru = BASIC_BLOCK (i)->succ;
882 if (!fallthru->flags & EDGE_FALLTHRU)
883 fallthru = fallthru->succ_next;
884 branch = BASIC_BLOCK (i)->succ;
885 if (branch->flags & EDGE_FALLTHRU)
886 branch = branch->succ_next;
888 branch->probability = probability;
889 fallthru->probability = REG_BR_PROB_BASE - probability;
892 ENTRY_BLOCK_PTR->succ->probability = REG_BR_PROB_BASE;
894 /* Set up block info for each basic block. */
895 alloc_aux_for_blocks (sizeof (struct block_info_def));
896 alloc_aux_for_edges (sizeof (struct edge_info_def));
897 for (i = -2; i < n_basic_blocks; i++)
899 edge e;
900 basic_block bb;
902 if (i == -2)
903 bb = ENTRY_BLOCK_PTR;
904 else if (i == -1)
905 bb = EXIT_BLOCK_PTR;
906 else
907 bb = BASIC_BLOCK (i);
908 BLOCK_INFO (bb)->tovisit = 0;
909 for (e = bb->succ; e; e = e->succ_next)
910 EDGE_INFO (e)->back_edge_prob = ((double) e->probability
911 / REG_BR_PROB_BASE);
913 /* First compute probabilities locally for each loop from innermost
914 to outermost to examine probabilities for back edges. */
915 estimate_loops_at_level (loops->tree_root);
917 /* Now fake loop around whole function to finalize probabilities. */
918 for (i = 0; i < n_basic_blocks; i++)
919 BLOCK_INFO (BASIC_BLOCK (i))->tovisit = 1;
920 BLOCK_INFO (ENTRY_BLOCK_PTR)->tovisit = 1;
921 BLOCK_INFO (EXIT_BLOCK_PTR)->tovisit = 1;
922 propagate_freq (ENTRY_BLOCK_PTR);
924 for (i = 0; i < n_basic_blocks; i++)
925 if (BLOCK_INFO (BASIC_BLOCK (i))->frequency > freq_max)
926 freq_max = BLOCK_INFO (BASIC_BLOCK (i))->frequency;
927 for (i = -2; i < n_basic_blocks; i++)
929 basic_block bb;
930 if (i == -2)
931 bb = ENTRY_BLOCK_PTR;
932 else if (i == -1)
933 bb = EXIT_BLOCK_PTR;
934 else
935 bb = BASIC_BLOCK (i);
936 bb->frequency = (BLOCK_INFO (bb)->frequency * BB_FREQ_MAX / freq_max
937 + 0.5);
940 free_aux_for_blocks ();
941 free_aux_for_edges ();