* basic-block.h (EDGE_FREQUENCY): New macro.
[official-gcc.git] / gcc / predict.c
blob38b9a5b69285cd3b3bda9a22df7b880ea20e2e5f
1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 /* References:
23 [1] "Branch Prediction for Free"
24 Ball and Larus; PLDI '93.
25 [2] "Static Branch Frequency and Program Profile Analysis"
26 Wu and Larus; MICRO-27.
27 [3] "Corpus-based Static Branch Prediction"
28 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95.
33 #include "config.h"
34 #include "system.h"
35 #include "tree.h"
36 #include "rtl.h"
37 #include "tm_p.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "insn-config.h"
41 #include "regs.h"
42 #include "flags.h"
43 #include "output.h"
44 #include "function.h"
45 #include "except.h"
46 #include "toplev.h"
47 #include "recog.h"
48 #include "expr.h"
49 #include "predict.h"
51 /* Random guesstimation given names. */
52 #define PROB_NEVER (0)
53 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 10 - 1)
54 #define PROB_UNLIKELY (REG_BR_PROB_BASE * 4 / 10 - 1)
55 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
56 #define PROB_LIKELY (REG_BR_PROB_BASE - PROB_UNLIKELY)
57 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
58 #define PROB_ALWAYS (REG_BR_PROB_BASE)
60 static void combine_predictions_for_insn PARAMS ((rtx, basic_block));
61 static void dump_prediction PARAMS ((enum br_predictor, int,
62 basic_block));
63 static void estimate_loops_at_level PARAMS ((struct loop *loop));
64 static void propagate_freq PARAMS ((basic_block));
65 static void estimate_bb_frequencies PARAMS ((struct loops *));
66 static void counts_to_freqs PARAMS ((void));
68 /* Information we hold about each branch predictor.
69 Filled using information from predict.def. */
70 struct predictor_info
72 const char *name; /* Name used in the debugging dumps. */
73 int hitrate; /* Expected hitrate used by
74 predict_insn_def call. */
75 int flags;
78 /* Use given predictor without Dempster-Shaffer theory if it matches
79 using first_match heuristics. */
80 #define PRED_FLAG_FIRST_MATCH 1
82 /* Recompute hitrate in percent to our representation. */
84 #define HITRATE(VAL) ((int)((VAL) * REG_BR_PROB_BASE + 50) / 100)
86 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
87 struct predictor_info predictor_info[] = {
88 #include "predict.def"
90 /* Upper bound on predictors. */
91 {NULL, 0, 0}
93 #undef DEF_PREDICTOR
95 void
96 predict_insn (insn, predictor, probability)
97 rtx insn;
98 int probability;
99 enum br_predictor predictor;
101 if (!any_condjump_p (insn))
102 abort ();
103 REG_NOTES (insn)
104 = gen_rtx_EXPR_LIST (REG_BR_PRED,
105 gen_rtx_CONCAT (VOIDmode,
106 GEN_INT ((int) predictor),
107 GEN_INT ((int) probability)),
108 REG_NOTES (insn));
111 /* Predict insn by given predictor. */
112 void
113 predict_insn_def (insn, predictor, taken)
114 rtx insn;
115 enum br_predictor predictor;
116 enum prediction taken;
118 int probability = predictor_info[(int) predictor].hitrate;
119 if (taken != TAKEN)
120 probability = REG_BR_PROB_BASE - probability;
121 predict_insn (insn, predictor, probability);
124 /* Predict edge E with given probability if possible. */
125 void
126 predict_edge (e, predictor, probability)
127 edge e;
128 int probability;
129 enum br_predictor predictor;
131 rtx last_insn;
132 last_insn = e->src->end;
134 /* We can store the branch prediction information only about
135 conditional jumps. */
136 if (!any_condjump_p (last_insn))
137 return;
139 /* We always store probability of branching. */
140 if (e->flags & EDGE_FALLTHRU)
141 probability = REG_BR_PROB_BASE - probability;
143 predict_insn (last_insn, predictor, probability);
146 /* Predict edge E by given predictor if possible. */
147 void
148 predict_edge_def (e, predictor, taken)
149 edge e;
150 enum br_predictor predictor;
151 enum prediction taken;
153 int probability = predictor_info[(int) predictor].hitrate;
155 if (taken != TAKEN)
156 probability = REG_BR_PROB_BASE - probability;
157 predict_edge (e, predictor, probability);
160 /* Invert all branch predictions or probability notes in the INSN. This needs
161 to be done each time we invert the condition used by the jump. */
162 void
163 invert_br_probabilities (insn)
164 rtx insn;
166 rtx note = REG_NOTES (insn);
168 while (note)
170 if (REG_NOTE_KIND (note) == REG_BR_PROB)
171 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
172 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
173 XEXP (XEXP (note, 0), 1)
174 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
175 note = XEXP (note, 1);
179 /* Dump information about the branch prediction to the output file. */
180 static void
181 dump_prediction (predictor, probability, bb)
182 enum br_predictor predictor;
183 int probability;
184 basic_block bb;
186 edge e = bb->succ;
188 if (!rtl_dump_file)
189 return;
191 while (e->flags & EDGE_FALLTHRU)
192 e = e->succ_next;
194 fprintf (rtl_dump_file, " %s heuristics: %.1f%%",
195 predictor_info[predictor].name,
196 probability * 100.0 / REG_BR_PROB_BASE);
198 if (bb->count)
200 fprintf (rtl_dump_file, " exec ");
201 fprintf (rtl_dump_file, HOST_WIDEST_INT_PRINT_DEC,
202 (HOST_WIDEST_INT) bb->count);
203 fprintf (rtl_dump_file, " hit ");
204 fprintf (rtl_dump_file, HOST_WIDEST_INT_PRINT_DEC,
205 (HOST_WIDEST_INT) e->count);
206 fprintf (rtl_dump_file, " (%.1f%%)",
207 e->count * 100.0 / bb->count);
209 fprintf (rtl_dump_file, "\n");
212 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
213 note if not already present. Remove now useless REG_BR_PRED notes. */
214 static void
215 combine_predictions_for_insn (insn, bb)
216 rtx insn;
217 basic_block bb;
219 rtx prob_note = find_reg_note (insn, REG_BR_PROB, 0);
220 rtx *pnote = &REG_NOTES (insn);
221 int best_probability = PROB_EVEN;
222 int best_predictor = END_PREDICTORS;
223 int combined_probability = REG_BR_PROB_BASE / 2;
224 int d;
226 if (rtl_dump_file)
227 fprintf (rtl_dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
228 bb->index);
230 /* We implement "first match" heuristics and use probability guessed
231 by predictor with smallest index. In future we will use better
232 probability combination techniques. */
233 while (*pnote)
235 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
237 int predictor = INTVAL (XEXP (XEXP (*pnote, 0), 0));
238 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
240 dump_prediction (predictor, probability, bb);
241 if (best_predictor > predictor)
242 best_probability = probability, best_predictor = predictor;
243 *pnote = XEXP (*pnote, 1);
245 d = (combined_probability * probability
246 + (REG_BR_PROB_BASE - combined_probability)
247 * (REG_BR_PROB_BASE - probability));
248 /* An FP math to avoid overflows of 32bit integers. */
249 combined_probability = (((double)combined_probability) * probability
250 * REG_BR_PROB_BASE / d + 0.5);
252 else
253 pnote = &XEXP (*pnote, 1);
255 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
256 combined_probability = best_probability;
257 dump_prediction (PRED_FIRST_MATCH, best_probability, bb);
258 dump_prediction (PRED_COMBINED, combined_probability, bb);
259 if (!prob_note)
261 REG_NOTES (insn)
262 = gen_rtx_EXPR_LIST (REG_BR_PROB,
263 GEN_INT (combined_probability), REG_NOTES (insn));
264 /* Save the prediction into CFG in case we are seeing non-degenerated
265 conditional jump. */
266 if (bb->succ->succ_next)
268 BRANCH_EDGE (bb)->probability = combined_probability;
269 FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - combined_probability;
274 /* Statically estimate the probability that a branch will be taken.
275 ??? In the next revision there will be a number of other predictors added
276 from the above references. Further, each heuristic will be factored out
277 into its own function for clarity (and to facilitate the combination of
278 predictions). */
280 void
281 estimate_probability (loops_info)
282 struct loops *loops_info;
284 sbitmap *dominators, *post_dominators;
285 int i;
286 int found_noreturn = 0;
288 dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
289 post_dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
290 calculate_dominance_info (NULL, dominators, 0);
291 calculate_dominance_info (NULL, post_dominators, 1);
293 /* Try to predict out blocks in a loop that are not part of a
294 natural loop. */
295 for (i = 0; i < loops_info->num; i++)
297 int j;
299 for (j = loops_info->array[i].first->index;
300 j <= loops_info->array[i].last->index;
301 ++j)
303 if (TEST_BIT (loops_info->array[i].nodes, j))
305 int header_found = 0;
306 edge e;
308 /* Loop branch heruistics - predict as taken an edge back to
309 a loop's head. */
310 for (e = BASIC_BLOCK(j)->succ; e; e = e->succ_next)
311 if (e->dest == loops_info->array[i].header
312 && e->src == loops_info->array[i].latch)
314 header_found = 1;
315 predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
317 /* Loop exit heruistics - predict as not taken an edge exiting
318 the loop if the conditinal has no loop header successors */
319 if (!header_found)
320 for (e = BASIC_BLOCK(j)->succ; e; e = e->succ_next)
321 if (e->dest->index <= 0
322 || !TEST_BIT (loops_info->array[i].nodes, e->dest->index))
323 predict_edge_def (e, PRED_LOOP_EXIT, NOT_TAKEN);
328 /* Attempt to predict conditional jumps using a number of heuristics. */
329 for (i = 0; i < n_basic_blocks; i++)
331 basic_block bb = BASIC_BLOCK (i);
332 rtx last_insn = bb->end;
333 rtx cond, earliest;
334 edge e;
336 /* If block has no sucessor, predict all possible paths to
337 it as improbable, as the block contains a call to a noreturn
338 function and thus can be executed only once. */
339 if (bb->succ == NULL && !found_noreturn)
341 int y;
343 /* ??? Postdominator claims each noreturn block to be postdominated
344 by each, so we need to run only once. This needs to be changed
345 once postdominace algorithm is updated to say something more sane.
347 found_noreturn = 1;
348 for (y = 0; y < n_basic_blocks; y++)
349 if (!TEST_BIT (post_dominators[y], i))
351 for (e = BASIC_BLOCK (y)->succ; e; e = e->succ_next)
352 if (e->dest->index >= 0
353 && TEST_BIT (post_dominators[e->dest->index], i))
354 predict_edge_def (e, PRED_NORETURN, NOT_TAKEN);
358 if (GET_CODE (last_insn) != JUMP_INSN
359 || ! any_condjump_p (last_insn))
360 continue;
362 for (e = bb->succ; e; e = e->succ_next)
364 /* Predict edges to blocks that return immediately to be
365 improbable. These are usually used to signal error states. */
366 if (e->dest == EXIT_BLOCK_PTR
367 || (e->dest->succ && !e->dest->succ->succ_next
368 && e->dest->succ->dest == EXIT_BLOCK_PTR))
369 predict_edge_def (e, PRED_ERROR_RETURN, NOT_TAKEN);
371 /* Look for block we are guarding (ie we dominate it,
372 but it doesn't postdominate us). */
373 if (e->dest != EXIT_BLOCK_PTR
374 && e->dest != bb
375 && TEST_BIT (dominators[e->dest->index], e->src->index)
376 && !TEST_BIT (post_dominators[e->src->index], e->dest->index))
378 rtx insn;
379 /* The call heuristic claims that a guarded function call
380 is improbable. This is because such calls are often used
381 to signal exceptional situations such as printing error
382 messages. */
383 for (insn = e->dest->head; insn != NEXT_INSN (e->dest->end);
384 insn = NEXT_INSN (insn))
385 if (GET_CODE (insn) == CALL_INSN
386 /* Constant and pure calls are hardly used to signalize
387 something exceptional. */
388 && ! CONST_CALL_P (insn))
390 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
391 break;
396 cond = get_condition (last_insn, &earliest);
397 if (! cond)
398 continue;
400 /* Try "pointer heuristic."
401 A comparison ptr == 0 is predicted as false.
402 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
403 switch (GET_CODE (cond))
405 case EQ:
406 if (GET_CODE (XEXP (cond, 0)) == REG
407 && REG_POINTER (XEXP (cond, 0))
408 && (XEXP (cond, 1) == const0_rtx
409 || (GET_CODE (XEXP (cond, 1)) == REG
410 && REG_POINTER (XEXP (cond, 1)))))
412 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
413 break;
414 case NE:
415 if (GET_CODE (XEXP (cond, 0)) == REG
416 && REG_POINTER (XEXP (cond, 0))
417 && (XEXP (cond, 1) == const0_rtx
418 || (GET_CODE (XEXP (cond, 1)) == REG
419 && REG_POINTER (XEXP (cond, 1)))))
420 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
421 break;
423 default:
424 break;
427 /* Try "opcode heuristic."
428 EQ tests are usually false and NE tests are usually true. Also,
429 most quantities are positive, so we can make the appropriate guesses
430 about signed comparisons against zero. */
431 switch (GET_CODE (cond))
433 case CONST_INT:
434 /* Unconditional branch. */
435 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
436 cond == const0_rtx ? NOT_TAKEN : TAKEN);
437 break;
439 case EQ:
440 case UNEQ:
441 predict_insn_def (last_insn, PRED_OPCODE, NOT_TAKEN);
442 break;
443 case NE:
444 case LTGT:
445 predict_insn_def (last_insn, PRED_OPCODE, TAKEN);
446 break;
447 case ORDERED:
448 predict_insn_def (last_insn, PRED_OPCODE, TAKEN);
449 break;
450 case UNORDERED:
451 predict_insn_def (last_insn, PRED_OPCODE, NOT_TAKEN);
452 break;
453 case LE:
454 case LT:
455 if (XEXP (cond, 1) == const0_rtx
456 || (GET_CODE (XEXP (cond, 1)) == CONST_INT
457 && INTVAL (XEXP (cond, 1)) == -1))
458 predict_insn_def (last_insn, PRED_OPCODE, NOT_TAKEN);
459 break;
460 case GE:
461 case GT:
462 if (XEXP (cond, 1) == const0_rtx
463 || (GET_CODE (XEXP (cond, 1)) == CONST_INT
464 && INTVAL (XEXP (cond, 1)) == -1))
465 predict_insn_def (last_insn, PRED_OPCODE, TAKEN);
466 break;
468 default:
469 break;
473 /* Attach the combined probability to each conditional jump. */
474 for (i = 0; i < n_basic_blocks; i++)
476 rtx last_insn = BLOCK_END (i);
478 if (GET_CODE (last_insn) != JUMP_INSN
479 || ! any_condjump_p (last_insn))
480 continue;
481 combine_predictions_for_insn (last_insn, BASIC_BLOCK (i));
483 sbitmap_vector_free (post_dominators);
484 sbitmap_vector_free (dominators);
486 estimate_bb_frequencies (loops_info);
489 /* __builtin_expect dropped tokens into the insn stream describing
490 expected values of registers. Generate branch probabilities
491 based off these values. */
493 void
494 expected_value_to_br_prob ()
496 rtx insn, cond, ev = NULL_RTX, ev_reg = NULL_RTX;
498 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
500 switch (GET_CODE (insn))
502 case NOTE:
503 /* Look for expected value notes. */
504 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EXPECTED_VALUE)
506 ev = NOTE_EXPECTED_VALUE (insn);
507 ev_reg = XEXP (ev, 0);
509 continue;
511 case CODE_LABEL:
512 /* Never propagate across labels. */
513 ev = NULL_RTX;
514 continue;
516 default:
517 /* Look for insns that clobber the EV register. */
518 if (ev && reg_set_p (ev_reg, insn))
519 ev = NULL_RTX;
520 continue;
522 case JUMP_INSN:
523 /* Look for simple conditional branches. If we havn't got an
524 expected value yet, no point going further. */
525 if (GET_CODE (insn) != JUMP_INSN || ev == NULL_RTX)
526 continue;
527 if (! any_condjump_p (insn))
528 continue;
529 break;
532 /* Collect the branch condition, hopefully relative to EV_REG. */
533 /* ??? At present we'll miss things like
534 (expected_value (eq r70 0))
535 (set r71 -1)
536 (set r80 (lt r70 r71))
537 (set pc (if_then_else (ne r80 0) ...))
538 as canonicalize_condition will render this to us as
539 (lt r70, r71)
540 Could use cselib to try and reduce this further. */
541 cond = XEXP (SET_SRC (PATTERN (insn)), 0);
542 cond = canonicalize_condition (insn, cond, 0, NULL, ev_reg);
543 if (! cond
544 || XEXP (cond, 0) != ev_reg
545 || GET_CODE (XEXP (cond, 1)) != CONST_INT)
546 continue;
548 /* Substitute and simplify. Given that the expression we're
549 building involves two constants, we should wind up with either
550 true or false. */
551 cond = gen_rtx_fmt_ee (GET_CODE (cond), VOIDmode,
552 XEXP (ev, 1), XEXP (cond, 1));
553 cond = simplify_rtx (cond);
555 /* Turn the condition into a scaled branch probability. */
556 if (cond != const_true_rtx && cond != const0_rtx)
557 abort ();
558 predict_insn_def (insn, PRED_BUILTIN_EXPECT,
559 cond == const_true_rtx ? TAKEN : NOT_TAKEN);
563 /* This is used to carry information about basic blocks. It is
564 attached to the AUX field of the standard CFG block. */
566 typedef struct block_info_def
568 /* Estimated frequency of execution of basic_block. */
569 double frequency;
571 /* To keep queue of basic blocks to process. */
572 basic_block next;
574 /* True if block already converted. */
575 int visited:1;
577 /* Number of block proceeded before adding basic block to the queue. Used
578 to recognize irregular regions. */
579 int nvisited;
580 } *block_info;
582 /* Similar information for edges. */
583 typedef struct edge_info_def
585 /* In case edge is an loopback edge, the probability edge will be reached
586 in case header is. Estimated number of iterations of the loop can be
587 then computed as 1 / (1 - back_edge_prob). */
588 double back_edge_prob;
589 /* True if the edge is an loopback edge in the natural loop. */
590 int back_edge:1;
591 } *edge_info;
593 #define BLOCK_INFO(B) ((block_info) (B)->aux)
594 #define EDGE_INFO(E) ((edge_info) (E)->aux)
596 /* Helper function for estimate_bb_frequencies.
597 Propagate the frequencies for loops headed by HEAD. */
598 static void
599 propagate_freq (head)
600 basic_block head;
602 basic_block bb = head;
603 basic_block last = bb;
604 edge e;
605 basic_block nextbb;
606 int nvisited = 0;
608 BLOCK_INFO (head)->frequency = 1;
609 for (; bb; bb = nextbb)
611 double cyclic_probability = 0, frequency = 0;
613 nextbb = BLOCK_INFO (bb)->next;
614 BLOCK_INFO (bb)->next = NULL;
616 /* Compute frequency of basic block. */
617 if (bb != head)
619 for (e = bb->pred; e; e = e->pred_next)
620 if (!BLOCK_INFO (e->src)->visited && !EDGE_INFO (e)->back_edge)
621 break;
623 /* We didn't proceeded all predecesors of edge e yet. These may
624 be waiting in the queue or we may hit irreducible region.
626 To avoid infinite looping on irrecudible regions, count number
627 of block proceeded at the time basic block has been queued. In the
628 case number didn't changed, we've hit irreducible region and we
629 forget the backward edge. This can increase time complexity
630 by the number of irreducible blocks, but in same way standard the
631 loop does, so it should not result in noticeable slowodwn.
633 Alternativly we may distinquish backward and cross edges in the
634 DFS tree by preprocesing pass and ignore existence of non-loop
635 backward edges. */
636 if (e && BLOCK_INFO (bb)->nvisited != nvisited)
638 if (!nextbb)
639 nextbb = e->dest;
640 else
641 BLOCK_INFO (last)->next = e->dest;
642 BLOCK_INFO (last)->nvisited = nvisited;
643 last = e->dest;
644 continue;
646 else if (e && rtl_dump_file)
647 fprintf (rtl_dump_file, "Irreducible region hit, ignoring edge to bb %i\n",
648 bb->index);
650 for (e = bb->pred; e; e = e->pred_next)
651 if (EDGE_INFO (e)->back_edge)
652 cyclic_probability += EDGE_INFO (e)->back_edge_prob;
653 else if (BLOCK_INFO (e->src)->visited)
654 frequency += (e->probability
655 * BLOCK_INFO (e->src)->frequency /
656 REG_BR_PROB_BASE);
658 if (cyclic_probability > 1.0 - 1.0 / REG_BR_PROB_BASE)
659 cyclic_probability = 1.0 - 1.0 / REG_BR_PROB_BASE;
661 BLOCK_INFO (bb)->frequency = frequency / (1 - cyclic_probability);
664 BLOCK_INFO (bb)->visited = 1;
666 /* Compute back edge frequencies. */
667 for (e = bb->succ; e; e = e->succ_next)
668 if (e->dest == head)
669 EDGE_INFO (e)->back_edge_prob = (e->probability
670 * BLOCK_INFO (bb)->frequency
671 / REG_BR_PROB_BASE);
673 /* Propagate to succesor blocks. */
674 for (e = bb->succ; e; e = e->succ_next)
675 if (!EDGE_INFO (e)->back_edge
676 && !BLOCK_INFO (e->dest)->visited
677 && !BLOCK_INFO (e->dest)->next && e->dest != last)
679 if (!nextbb)
680 nextbb = e->dest;
681 else
682 BLOCK_INFO (last)->next = e->dest;
683 BLOCK_INFO (last)->nvisited = nvisited;
684 last = e->dest;
686 nvisited ++;
690 /* Estimate probabilities of the loopback edges in loops at same nest level. */
691 static void
692 estimate_loops_at_level (first_loop)
693 struct loop *first_loop;
695 struct loop *l, *loop = first_loop;
697 for (loop = first_loop; loop; loop = loop->next)
699 int n;
700 edge e;
702 estimate_loops_at_level (loop->inner);
704 /* find current loop back edge and mark it. */
705 for (e = loop->latch->succ; e->dest != loop->header; e = e->succ_next);
707 EDGE_INFO (e)->back_edge = 1;
709 /* In case loop header is shared, ensure that it is the last one sharing
710 same header, so we avoid redundant work. */
711 if (loop->shared)
713 for (l = loop->next; l; l = l->next)
714 if (l->header == loop->header)
715 break;
716 if (l)
717 continue;
720 /* Now merge all nodes of all loops with given header as not visited. */
721 for (l = loop->shared ? first_loop : loop; l != loop->next; l = l->next)
722 if (loop->header == l->header)
723 EXECUTE_IF_SET_IN_SBITMAP (l->nodes, 0, n,
724 BLOCK_INFO (BASIC_BLOCK (n))->visited =
726 propagate_freq (loop->header);
730 /* Convert counts measured by profile driven feedback to frequencies. */
731 static void
732 counts_to_freqs ()
734 HOST_WIDEST_INT count_max = 1;
735 int i;
737 for (i = 0; i < n_basic_blocks; i++)
738 if (BASIC_BLOCK (i)->count > count_max)
739 count_max = BASIC_BLOCK (i)->count;
741 for (i = -2; i < n_basic_blocks; i++)
743 basic_block bb;
744 if (i == -2)
745 bb = ENTRY_BLOCK_PTR;
746 else if (i == -1)
747 bb = EXIT_BLOCK_PTR;
748 else
749 bb = BASIC_BLOCK (i);
750 bb->frequency = ((bb->count * BB_FREQ_MAX + count_max / 2)
751 / count_max);
755 /* Estimate basic blocks frequency by given branch probabilities. */
756 static void
757 estimate_bb_frequencies (loops)
758 struct loops *loops;
760 block_info bi;
761 edge_info ei;
762 int edgenum = 0;
763 int i;
764 double freq_max = 0;
766 if (flag_branch_probabilities)
768 counts_to_freqs ();
769 return;
772 /* Fill in the probability values in flowgraph based on the REG_BR_PROB
773 notes. */
774 for (i = 0; i < n_basic_blocks; i++)
776 rtx last_insn = BLOCK_END (i);
777 int probability;
778 edge fallthru, branch;
780 if (GET_CODE (last_insn) != JUMP_INSN || !any_condjump_p (last_insn)
781 /* Avoid handling of conditionals jump jumping to fallthru edge. */
782 || BASIC_BLOCK (i)->succ->succ_next == NULL)
784 /* We can predict only conditional jumps at the moment.
785 Expect each edge to be equall probable.
786 ?? In future we want to make abnormal edges improbable. */
787 int nedges = 0;
788 edge e;
790 for (e = BASIC_BLOCK (i)->succ; e; e = e->succ_next)
792 nedges++;
793 if (e->probability != 0)
794 break;
796 if (!e)
797 for (e = BASIC_BLOCK (i)->succ; e; e = e->succ_next)
798 e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
800 else
802 probability = INTVAL (XEXP (find_reg_note (last_insn,
803 REG_BR_PROB, 0), 0));
804 fallthru = BASIC_BLOCK (i)->succ;
805 if (!fallthru->flags & EDGE_FALLTHRU)
806 fallthru = fallthru->succ_next;
807 branch = BASIC_BLOCK (i)->succ;
808 if (branch->flags & EDGE_FALLTHRU)
809 branch = branch->succ_next;
811 branch->probability = probability;
812 fallthru->probability = REG_BR_PROB_BASE - probability;
815 ENTRY_BLOCK_PTR->succ->probability = REG_BR_PROB_BASE;
817 /* Set up block info for each basic block. */
818 bi = (block_info) xcalloc ((n_basic_blocks + 2), sizeof (*bi));
819 ei = (edge_info) xcalloc ((n_edges), sizeof (*ei));
820 for (i = -2; i < n_basic_blocks; i++)
822 edge e;
823 basic_block bb;
825 if (i == -2)
826 bb = ENTRY_BLOCK_PTR;
827 else if (i == -1)
828 bb = EXIT_BLOCK_PTR;
829 else
830 bb = BASIC_BLOCK (i);
831 bb->aux = bi + i + 2;
832 BLOCK_INFO (bb)->visited = 1;
833 for (e = bb->succ; e; e = e->succ_next)
835 e->aux = ei + edgenum, edgenum++;
836 EDGE_INFO (e)->back_edge_prob = ((double) e->probability
837 / REG_BR_PROB_BASE);
840 /* First compute probabilities locally for each loop from innermost
841 to outermost to examine probabilities for back edges. */
842 estimate_loops_at_level (loops->tree_root);
844 /* Now fake loop around whole function to finalize probabilities. */
845 for (i = 0; i < n_basic_blocks; i++)
846 BLOCK_INFO (BASIC_BLOCK (i))->visited = 0;
847 BLOCK_INFO (ENTRY_BLOCK_PTR)->visited = 0;
848 BLOCK_INFO (EXIT_BLOCK_PTR)->visited = 0;
849 propagate_freq (ENTRY_BLOCK_PTR);
851 for (i = 0; i < n_basic_blocks; i++)
852 if (BLOCK_INFO (BASIC_BLOCK (i))->frequency > freq_max)
853 freq_max = BLOCK_INFO (BASIC_BLOCK (i))->frequency;
854 for (i = -2; i < n_basic_blocks; i++)
856 basic_block bb;
857 if (i == -2)
858 bb = ENTRY_BLOCK_PTR;
859 else if (i == -1)
860 bb = EXIT_BLOCK_PTR;
861 else
862 bb = BASIC_BLOCK (i);
863 bb->frequency = (BLOCK_INFO (bb)->frequency * BB_FREQ_MAX / freq_max
864 + 0.5);
867 free (ei);
868 free (bi);