* config.gcc: Remove MASK_JUMP_IN_DELAY from target_cpu_default2.
[official-gcc.git] / gcc / profile.c
blob71b8cbdc97fe928267724b680ccc837e8488d157
1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990-2014 Free Software Foundation, Inc.
3 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
4 based on some ideas from Dain Samples of UC Berkeley.
5 Further mangling by Bob Manson, Cygnus Support.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Generate basic block profile instrumentation and auxiliary files.
24 Profile generation is optimized, so that not all arcs in the basic
25 block graph need instrumenting. First, the BB graph is closed with
26 one entry (function start), and one exit (function exit). Any
27 ABNORMAL_EDGE cannot be instrumented (because there is no control
28 path to place the code). We close the graph by inserting fake
29 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
30 edges that do not go to the exit_block. We ignore such abnormal
31 edges. Naturally these fake edges are never directly traversed,
32 and so *cannot* be directly instrumented. Some other graph
33 massaging is done. To optimize the instrumentation we generate the
34 BB minimal span tree, only edges that are not on the span tree
35 (plus the entry point) need instrumenting. From that information
36 all other edge counts can be deduced. By construction all fake
37 edges must be on the spanning tree. We also attempt to place
38 EDGE_CRITICAL edges on the spanning tree.
40 The auxiliary files generated are <dumpbase>.gcno (at compile time)
41 and <dumpbase>.gcda (at run time). The format is
42 described in full in gcov-io.h. */
44 /* ??? Register allocation should use basic block execution counts to
45 give preference to the most commonly executed blocks. */
47 /* ??? Should calculate branch probabilities before instrumenting code, since
48 then we can use arc counts to help decide which arcs to instrument. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "flags.h"
56 #include "regs.h"
57 #include "expr.h"
58 #include "hashtab.h"
59 #include "hash-set.h"
60 #include "vec.h"
61 #include "machmode.h"
62 #include "hard-reg-set.h"
63 #include "input.h"
64 #include "function.h"
65 #include "basic-block.h"
66 #include "diagnostic-core.h"
67 #include "coverage.h"
68 #include "value-prof.h"
69 #include "tree.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "gimple-expr.h"
73 #include "is-a.h"
74 #include "gimple.h"
75 #include "gimple-iterator.h"
76 #include "tree-cfg.h"
77 #include "cfgloop.h"
78 #include "dumpfile.h"
79 #include "cgraph.h"
81 #include "profile.h"
83 struct bb_profile_info {
84 unsigned int count_valid : 1;
86 /* Number of successor and predecessor edges. */
87 gcov_type succ_count;
88 gcov_type pred_count;
91 #define BB_INFO(b) ((struct bb_profile_info *) (b)->aux)
94 /* Counter summary from the last set of coverage counts read. */
96 const struct gcov_ctr_summary *profile_info;
98 /* Counter working set information computed from the current counter
99 summary. Not initialized unless profile_info summary is non-NULL. */
100 static gcov_working_set_t gcov_working_sets[NUM_GCOV_WORKING_SETS];
102 /* Collect statistics on the performance of this pass for the entire source
103 file. */
105 static int total_num_blocks;
106 static int total_num_edges;
107 static int total_num_edges_ignored;
108 static int total_num_edges_instrumented;
109 static int total_num_blocks_created;
110 static int total_num_passes;
111 static int total_num_times_called;
112 static int total_hist_br_prob[20];
113 static int total_num_branches;
115 /* Helper function to update gcov_working_sets. */
117 void add_working_set (gcov_working_set_t *set) {
118 int i = 0;
119 for (; i < NUM_GCOV_WORKING_SETS; i++)
120 gcov_working_sets[i] = set[i];
123 /* Forward declarations. */
124 static void find_spanning_tree (struct edge_list *);
126 /* Add edge instrumentation code to the entire insn chain.
128 F is the first insn of the chain.
129 NUM_BLOCKS is the number of basic blocks found in F. */
131 static unsigned
132 instrument_edges (struct edge_list *el)
134 unsigned num_instr_edges = 0;
135 int num_edges = NUM_EDGES (el);
136 basic_block bb;
138 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
140 edge e;
141 edge_iterator ei;
143 FOR_EACH_EDGE (e, ei, bb->succs)
145 struct edge_profile_info *inf = EDGE_INFO (e);
147 if (!inf->ignore && !inf->on_tree)
149 gcc_assert (!(e->flags & EDGE_ABNORMAL));
150 if (dump_file)
151 fprintf (dump_file, "Edge %d to %d instrumented%s\n",
152 e->src->index, e->dest->index,
153 EDGE_CRITICAL_P (e) ? " (and split)" : "");
154 gimple_gen_edge_profiler (num_instr_edges++, e);
159 total_num_blocks_created += num_edges;
160 if (dump_file)
161 fprintf (dump_file, "%d edges instrumented\n", num_instr_edges);
162 return num_instr_edges;
165 /* Add code to measure histograms for values in list VALUES. */
166 static void
167 instrument_values (histogram_values values)
169 unsigned i;
171 /* Emit code to generate the histograms before the insns. */
173 for (i = 0; i < values.length (); i++)
175 histogram_value hist = values[i];
176 unsigned t = COUNTER_FOR_HIST_TYPE (hist->type);
178 if (!coverage_counter_alloc (t, hist->n_counters))
179 continue;
181 switch (hist->type)
183 case HIST_TYPE_INTERVAL:
184 gimple_gen_interval_profiler (hist, t, 0);
185 break;
187 case HIST_TYPE_POW2:
188 gimple_gen_pow2_profiler (hist, t, 0);
189 break;
191 case HIST_TYPE_SINGLE_VALUE:
192 gimple_gen_one_value_profiler (hist, t, 0);
193 break;
195 case HIST_TYPE_CONST_DELTA:
196 gimple_gen_const_delta_profiler (hist, t, 0);
197 break;
199 case HIST_TYPE_INDIR_CALL:
200 case HIST_TYPE_INDIR_CALL_TOPN:
201 gimple_gen_ic_profiler (hist, t, 0);
202 break;
204 case HIST_TYPE_AVERAGE:
205 gimple_gen_average_profiler (hist, t, 0);
206 break;
208 case HIST_TYPE_IOR:
209 gimple_gen_ior_profiler (hist, t, 0);
210 break;
212 case HIST_TYPE_TIME_PROFILE:
214 basic_block bb =
215 split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
216 gimple_stmt_iterator gsi = gsi_start_bb (bb);
218 gimple_gen_time_profiler (t, 0, gsi);
219 break;
222 default:
223 gcc_unreachable ();
229 /* Fill the working set information into the profile_info structure. */
231 void
232 get_working_sets (void)
234 unsigned ws_ix, pctinc, pct;
235 gcov_working_set_t *ws_info;
237 if (!profile_info)
238 return;
240 compute_working_sets (profile_info, gcov_working_sets);
242 if (dump_file)
244 fprintf (dump_file, "Counter working sets:\n");
245 /* Multiply the percentage by 100 to avoid float. */
246 pctinc = 100 * 100 / NUM_GCOV_WORKING_SETS;
247 for (ws_ix = 0, pct = pctinc; ws_ix < NUM_GCOV_WORKING_SETS;
248 ws_ix++, pct += pctinc)
250 if (ws_ix == NUM_GCOV_WORKING_SETS - 1)
251 pct = 9990;
252 ws_info = &gcov_working_sets[ws_ix];
253 /* Print out the percentage using int arithmatic to avoid float. */
254 fprintf (dump_file, "\t\t%u.%02u%%: num counts=%u, min counter="
255 "%"PRId64 "\n",
256 pct / 100, pct - (pct / 100 * 100),
257 ws_info->num_counters,
258 (int64_t)ws_info->min_counter);
263 /* Given a the desired percentage of the full profile (sum_all from the
264 summary), multiplied by 10 to avoid float in PCT_TIMES_10, returns
265 the corresponding working set information. If an exact match for
266 the percentage isn't found, the closest value is used. */
268 gcov_working_set_t *
269 find_working_set (unsigned pct_times_10)
271 unsigned i;
272 if (!profile_info)
273 return NULL;
274 gcc_assert (pct_times_10 <= 1000);
275 if (pct_times_10 >= 999)
276 return &gcov_working_sets[NUM_GCOV_WORKING_SETS - 1];
277 i = pct_times_10 * NUM_GCOV_WORKING_SETS / 1000;
278 if (!i)
279 return &gcov_working_sets[0];
280 return &gcov_working_sets[i - 1];
283 /* Computes hybrid profile for all matching entries in da_file.
285 CFG_CHECKSUM is the precomputed checksum for the CFG. */
287 static gcov_type *
288 get_exec_counts (unsigned cfg_checksum, unsigned lineno_checksum)
290 unsigned num_edges = 0;
291 basic_block bb;
292 gcov_type *counts;
294 /* Count the edges to be (possibly) instrumented. */
295 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
297 edge e;
298 edge_iterator ei;
300 FOR_EACH_EDGE (e, ei, bb->succs)
301 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
302 num_edges++;
305 counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, cfg_checksum,
306 lineno_checksum, &profile_info);
307 if (!counts)
308 return NULL;
310 get_working_sets ();
312 if (dump_file && profile_info)
313 fprintf (dump_file, "Merged %u profiles with maximal count %u.\n",
314 profile_info->runs, (unsigned) profile_info->sum_max);
316 return counts;
320 static bool
321 is_edge_inconsistent (vec<edge, va_gc> *edges)
323 edge e;
324 edge_iterator ei;
325 FOR_EACH_EDGE (e, ei, edges)
327 if (!EDGE_INFO (e)->ignore)
329 if (e->count < 0
330 && (!(e->flags & EDGE_FAKE)
331 || !block_ends_with_call_p (e->src)))
333 if (dump_file)
335 fprintf (dump_file,
336 "Edge %i->%i is inconsistent, count%"PRId64,
337 e->src->index, e->dest->index, e->count);
338 dump_bb (dump_file, e->src, 0, TDF_DETAILS);
339 dump_bb (dump_file, e->dest, 0, TDF_DETAILS);
341 return true;
345 return false;
348 static void
349 correct_negative_edge_counts (void)
351 basic_block bb;
352 edge e;
353 edge_iterator ei;
355 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
357 FOR_EACH_EDGE (e, ei, bb->succs)
359 if (e->count < 0)
360 e->count = 0;
365 /* Check consistency.
366 Return true if inconsistency is found. */
367 static bool
368 is_inconsistent (void)
370 basic_block bb;
371 bool inconsistent = false;
372 FOR_EACH_BB_FN (bb, cfun)
374 inconsistent |= is_edge_inconsistent (bb->preds);
375 if (!dump_file && inconsistent)
376 return true;
377 inconsistent |= is_edge_inconsistent (bb->succs);
378 if (!dump_file && inconsistent)
379 return true;
380 if (bb->count < 0)
382 if (dump_file)
384 fprintf (dump_file, "BB %i count is negative "
385 "%"PRId64,
386 bb->index,
387 bb->count);
388 dump_bb (dump_file, bb, 0, TDF_DETAILS);
390 inconsistent = true;
392 if (bb->count != sum_edge_counts (bb->preds))
394 if (dump_file)
396 fprintf (dump_file, "BB %i count does not match sum of incoming edges "
397 "%"PRId64" should be %"PRId64,
398 bb->index,
399 bb->count,
400 sum_edge_counts (bb->preds));
401 dump_bb (dump_file, bb, 0, TDF_DETAILS);
403 inconsistent = true;
405 if (bb->count != sum_edge_counts (bb->succs) &&
406 ! (find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun)) != NULL
407 && block_ends_with_call_p (bb)))
409 if (dump_file)
411 fprintf (dump_file, "BB %i count does not match sum of outgoing edges "
412 "%"PRId64" should be %"PRId64,
413 bb->index,
414 bb->count,
415 sum_edge_counts (bb->succs));
416 dump_bb (dump_file, bb, 0, TDF_DETAILS);
418 inconsistent = true;
420 if (!dump_file && inconsistent)
421 return true;
424 return inconsistent;
427 /* Set each basic block count to the sum of its outgoing edge counts */
428 static void
429 set_bb_counts (void)
431 basic_block bb;
432 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
434 bb->count = sum_edge_counts (bb->succs);
435 gcc_assert (bb->count >= 0);
439 /* Reads profile data and returns total number of edge counts read */
440 static int
441 read_profile_edge_counts (gcov_type *exec_counts)
443 basic_block bb;
444 int num_edges = 0;
445 int exec_counts_pos = 0;
446 /* For each edge not on the spanning tree, set its execution count from
447 the .da file. */
448 /* The first count in the .da file is the number of times that the function
449 was entered. This is the exec_count for block zero. */
451 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
453 edge e;
454 edge_iterator ei;
456 FOR_EACH_EDGE (e, ei, bb->succs)
457 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
459 num_edges++;
460 if (exec_counts)
462 e->count = exec_counts[exec_counts_pos++];
463 if (e->count > profile_info->sum_max)
465 if (flag_profile_correction)
467 static bool informed = 0;
468 if (dump_enabled_p () && !informed)
469 dump_printf_loc (MSG_NOTE, input_location,
470 "corrupted profile info: edge count"
471 " exceeds maximal count\n");
472 informed = 1;
474 else
475 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
476 bb->index, e->dest->index);
479 else
480 e->count = 0;
482 EDGE_INFO (e)->count_valid = 1;
483 BB_INFO (bb)->succ_count--;
484 BB_INFO (e->dest)->pred_count--;
485 if (dump_file)
487 fprintf (dump_file, "\nRead edge from %i to %i, count:",
488 bb->index, e->dest->index);
489 fprintf (dump_file, "%"PRId64,
490 (int64_t) e->count);
495 return num_edges;
498 #define OVERLAP_BASE 10000
500 /* Compare the static estimated profile to the actual profile, and
501 return the "degree of overlap" measure between them.
503 Degree of overlap is a number between 0 and OVERLAP_BASE. It is
504 the sum of each basic block's minimum relative weights between
505 two profiles. And overlap of OVERLAP_BASE means two profiles are
506 identical. */
508 static int
509 compute_frequency_overlap (void)
511 gcov_type count_total = 0, freq_total = 0;
512 int overlap = 0;
513 basic_block bb;
515 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
517 count_total += bb->count;
518 freq_total += bb->frequency;
521 if (count_total == 0 || freq_total == 0)
522 return 0;
524 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
525 overlap += MIN (bb->count * OVERLAP_BASE / count_total,
526 bb->frequency * OVERLAP_BASE / freq_total);
528 return overlap;
531 /* Compute the branch probabilities for the various branches.
532 Annotate them accordingly.
534 CFG_CHECKSUM is the precomputed checksum for the CFG. */
536 static void
537 compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
539 basic_block bb;
540 int i;
541 int num_edges = 0;
542 int changes;
543 int passes;
544 int hist_br_prob[20];
545 int num_branches;
546 gcov_type *exec_counts = get_exec_counts (cfg_checksum, lineno_checksum);
547 int inconsistent = 0;
549 /* Very simple sanity checks so we catch bugs in our profiling code. */
550 if (!profile_info)
551 return;
553 if (profile_info->sum_all < profile_info->sum_max)
555 error ("corrupted profile info: sum_all is smaller than sum_max");
556 exec_counts = NULL;
559 /* Attach extra info block to each bb. */
560 alloc_aux_for_blocks (sizeof (struct bb_profile_info));
561 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
563 edge e;
564 edge_iterator ei;
566 FOR_EACH_EDGE (e, ei, bb->succs)
567 if (!EDGE_INFO (e)->ignore)
568 BB_INFO (bb)->succ_count++;
569 FOR_EACH_EDGE (e, ei, bb->preds)
570 if (!EDGE_INFO (e)->ignore)
571 BB_INFO (bb)->pred_count++;
574 /* Avoid predicting entry on exit nodes. */
575 BB_INFO (EXIT_BLOCK_PTR_FOR_FN (cfun))->succ_count = 2;
576 BB_INFO (ENTRY_BLOCK_PTR_FOR_FN (cfun))->pred_count = 2;
578 num_edges = read_profile_edge_counts (exec_counts);
580 if (dump_file)
581 fprintf (dump_file, "\n%d edge counts read\n", num_edges);
583 /* For every block in the file,
584 - if every exit/entrance edge has a known count, then set the block count
585 - if the block count is known, and every exit/entrance edge but one has
586 a known execution count, then set the count of the remaining edge
588 As edge counts are set, decrement the succ/pred count, but don't delete
589 the edge, that way we can easily tell when all edges are known, or only
590 one edge is unknown. */
592 /* The order that the basic blocks are iterated through is important.
593 Since the code that finds spanning trees starts with block 0, low numbered
594 edges are put on the spanning tree in preference to high numbered edges.
595 Hence, most instrumented edges are at the end. Graph solving works much
596 faster if we propagate numbers from the end to the start.
598 This takes an average of slightly more than 3 passes. */
600 changes = 1;
601 passes = 0;
602 while (changes)
604 passes++;
605 changes = 0;
606 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, prev_bb)
608 struct bb_profile_info *bi = BB_INFO (bb);
609 if (! bi->count_valid)
611 if (bi->succ_count == 0)
613 edge e;
614 edge_iterator ei;
615 gcov_type total = 0;
617 FOR_EACH_EDGE (e, ei, bb->succs)
618 total += e->count;
619 bb->count = total;
620 bi->count_valid = 1;
621 changes = 1;
623 else if (bi->pred_count == 0)
625 edge e;
626 edge_iterator ei;
627 gcov_type total = 0;
629 FOR_EACH_EDGE (e, ei, bb->preds)
630 total += e->count;
631 bb->count = total;
632 bi->count_valid = 1;
633 changes = 1;
636 if (bi->count_valid)
638 if (bi->succ_count == 1)
640 edge e;
641 edge_iterator ei;
642 gcov_type total = 0;
644 /* One of the counts will be invalid, but it is zero,
645 so adding it in also doesn't hurt. */
646 FOR_EACH_EDGE (e, ei, bb->succs)
647 total += e->count;
649 /* Search for the invalid edge, and set its count. */
650 FOR_EACH_EDGE (e, ei, bb->succs)
651 if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
652 break;
654 /* Calculate count for remaining edge by conservation. */
655 total = bb->count - total;
657 gcc_assert (e);
658 EDGE_INFO (e)->count_valid = 1;
659 e->count = total;
660 bi->succ_count--;
662 BB_INFO (e->dest)->pred_count--;
663 changes = 1;
665 if (bi->pred_count == 1)
667 edge e;
668 edge_iterator ei;
669 gcov_type total = 0;
671 /* One of the counts will be invalid, but it is zero,
672 so adding it in also doesn't hurt. */
673 FOR_EACH_EDGE (e, ei, bb->preds)
674 total += e->count;
676 /* Search for the invalid edge, and set its count. */
677 FOR_EACH_EDGE (e, ei, bb->preds)
678 if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
679 break;
681 /* Calculate count for remaining edge by conservation. */
682 total = bb->count - total + e->count;
684 gcc_assert (e);
685 EDGE_INFO (e)->count_valid = 1;
686 e->count = total;
687 bi->pred_count--;
689 BB_INFO (e->src)->succ_count--;
690 changes = 1;
695 if (dump_file)
697 int overlap = compute_frequency_overlap ();
698 gimple_dump_cfg (dump_file, dump_flags);
699 fprintf (dump_file, "Static profile overlap: %d.%d%%\n",
700 overlap / (OVERLAP_BASE / 100),
701 overlap % (OVERLAP_BASE / 100));
704 total_num_passes += passes;
705 if (dump_file)
706 fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
708 /* If the graph has been correctly solved, every block will have a
709 succ and pred count of zero. */
710 FOR_EACH_BB_FN (bb, cfun)
712 gcc_assert (!BB_INFO (bb)->succ_count && !BB_INFO (bb)->pred_count);
715 /* Check for inconsistent basic block counts */
716 inconsistent = is_inconsistent ();
718 if (inconsistent)
720 if (flag_profile_correction)
722 /* Inconsistency detected. Make it flow-consistent. */
723 static int informed = 0;
724 if (dump_enabled_p () && informed == 0)
726 informed = 1;
727 dump_printf_loc (MSG_NOTE, input_location,
728 "correcting inconsistent profile data\n");
730 correct_negative_edge_counts ();
731 /* Set bb counts to the sum of the outgoing edge counts */
732 set_bb_counts ();
733 if (dump_file)
734 fprintf (dump_file, "\nCalling mcf_smooth_cfg\n");
735 mcf_smooth_cfg ();
737 else
738 error ("corrupted profile info: profile data is not flow-consistent");
741 /* For every edge, calculate its branch probability and add a reg_note
742 to the branch insn to indicate this. */
744 for (i = 0; i < 20; i++)
745 hist_br_prob[i] = 0;
746 num_branches = 0;
748 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
750 edge e;
751 edge_iterator ei;
753 if (bb->count < 0)
755 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
756 bb->index, (int)bb->count);
757 bb->count = 0;
759 FOR_EACH_EDGE (e, ei, bb->succs)
761 /* Function may return twice in the cased the called function is
762 setjmp or calls fork, but we can't represent this by extra
763 edge from the entry, since extra edge from the exit is
764 already present. We get negative frequency from the entry
765 point. */
766 if ((e->count < 0
767 && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
768 || (e->count > bb->count
769 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)))
771 if (block_ends_with_call_p (bb))
772 e->count = e->count < 0 ? 0 : bb->count;
774 if (e->count < 0 || e->count > bb->count)
776 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
777 e->src->index, e->dest->index,
778 (int)e->count);
779 e->count = bb->count / 2;
782 if (bb->count)
784 FOR_EACH_EDGE (e, ei, bb->succs)
785 e->probability = GCOV_COMPUTE_SCALE (e->count, bb->count);
786 if (bb->index >= NUM_FIXED_BLOCKS
787 && block_ends_with_condjump_p (bb)
788 && EDGE_COUNT (bb->succs) >= 2)
790 int prob;
791 edge e;
792 int index;
794 /* Find the branch edge. It is possible that we do have fake
795 edges here. */
796 FOR_EACH_EDGE (e, ei, bb->succs)
797 if (!(e->flags & (EDGE_FAKE | EDGE_FALLTHRU)))
798 break;
800 prob = e->probability;
801 index = prob * 20 / REG_BR_PROB_BASE;
803 if (index == 20)
804 index = 19;
805 hist_br_prob[index]++;
807 num_branches++;
810 /* As a last resort, distribute the probabilities evenly.
811 Use simple heuristics that if there are normal edges,
812 give all abnormals frequency of 0, otherwise distribute the
813 frequency over abnormals (this is the case of noreturn
814 calls). */
815 else if (profile_status_for_fn (cfun) == PROFILE_ABSENT)
817 int total = 0;
819 FOR_EACH_EDGE (e, ei, bb->succs)
820 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
821 total ++;
822 if (total)
824 FOR_EACH_EDGE (e, ei, bb->succs)
825 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
826 e->probability = REG_BR_PROB_BASE / total;
827 else
828 e->probability = 0;
830 else
832 total += EDGE_COUNT (bb->succs);
833 FOR_EACH_EDGE (e, ei, bb->succs)
834 e->probability = REG_BR_PROB_BASE / total;
836 if (bb->index >= NUM_FIXED_BLOCKS
837 && block_ends_with_condjump_p (bb)
838 && EDGE_COUNT (bb->succs) >= 2)
839 num_branches++;
842 counts_to_freqs ();
843 profile_status_for_fn (cfun) = PROFILE_READ;
844 compute_function_frequency ();
846 if (dump_file)
848 fprintf (dump_file, "%d branches\n", num_branches);
849 if (num_branches)
850 for (i = 0; i < 10; i++)
851 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
852 (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
853 5 * i, 5 * i + 5);
855 total_num_branches += num_branches;
856 for (i = 0; i < 20; i++)
857 total_hist_br_prob[i] += hist_br_prob[i];
859 fputc ('\n', dump_file);
860 fputc ('\n', dump_file);
863 free_aux_for_blocks ();
866 /* Load value histograms values whose description is stored in VALUES array
867 from .gcda file.
869 CFG_CHECKSUM is the precomputed checksum for the CFG. */
871 static void
872 compute_value_histograms (histogram_values values, unsigned cfg_checksum,
873 unsigned lineno_checksum)
875 unsigned i, j, t, any;
876 unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS];
877 gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS];
878 gcov_type *act_count[GCOV_N_VALUE_COUNTERS];
879 gcov_type *aact_count;
880 struct cgraph_node *node;
882 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
883 n_histogram_counters[t] = 0;
885 for (i = 0; i < values.length (); i++)
887 histogram_value hist = values[i];
888 n_histogram_counters[(int) hist->type] += hist->n_counters;
891 any = 0;
892 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
894 if (!n_histogram_counters[t])
896 histogram_counts[t] = NULL;
897 continue;
900 histogram_counts[t] =
901 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t),
902 n_histogram_counters[t], cfg_checksum,
903 lineno_checksum, NULL);
904 if (histogram_counts[t])
905 any = 1;
906 act_count[t] = histogram_counts[t];
908 if (!any)
909 return;
911 for (i = 0; i < values.length (); i++)
913 histogram_value hist = values[i];
914 gimple stmt = hist->hvalue.stmt;
916 t = (int) hist->type;
918 aact_count = act_count[t];
920 if (act_count[t])
921 act_count[t] += hist->n_counters;
923 gimple_add_histogram_value (cfun, stmt, hist);
924 hist->hvalue.counters = XNEWVEC (gcov_type, hist->n_counters);
925 for (j = 0; j < hist->n_counters; j++)
926 if (aact_count)
927 hist->hvalue.counters[j] = aact_count[j];
928 else
929 hist->hvalue.counters[j] = 0;
931 /* Time profiler counter is not related to any statement,
932 so that we have to read the counter and set the value to
933 the corresponding call graph node. */
934 if (hist->type == HIST_TYPE_TIME_PROFILE)
936 node = cgraph_node::get (hist->fun->decl);
937 node->tp_first_run = hist->hvalue.counters[0];
939 if (dump_file)
940 fprintf (dump_file, "Read tp_first_run: %d\n", node->tp_first_run);
944 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
945 free (histogram_counts[t]);
948 /* When passed NULL as file_name, initialize.
949 When passed something else, output the necessary commands to change
950 line to LINE and offset to FILE_NAME. */
951 static void
952 output_location (char const *file_name, int line,
953 gcov_position_t *offset, basic_block bb)
955 static char const *prev_file_name;
956 static int prev_line;
957 bool name_differs, line_differs;
959 if (!file_name)
961 prev_file_name = NULL;
962 prev_line = -1;
963 return;
966 name_differs = !prev_file_name || filename_cmp (file_name, prev_file_name);
967 line_differs = prev_line != line;
969 if (name_differs || line_differs)
971 if (!*offset)
973 *offset = gcov_write_tag (GCOV_TAG_LINES);
974 gcov_write_unsigned (bb->index);
975 name_differs = line_differs=true;
978 /* If this is a new source file, then output the
979 file's name to the .bb file. */
980 if (name_differs)
982 prev_file_name = file_name;
983 gcov_write_unsigned (0);
984 gcov_write_string (prev_file_name);
986 if (line_differs)
988 gcov_write_unsigned (line);
989 prev_line = line;
994 /* Instrument and/or analyze program behavior based on program the CFG.
996 This function creates a representation of the control flow graph (of
997 the function being compiled) that is suitable for the instrumentation
998 of edges and/or converting measured edge counts to counts on the
999 complete CFG.
1001 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
1002 the flow graph that are needed to reconstruct the dynamic behavior of the
1003 flow graph. This data is written to the gcno file for gcov.
1005 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
1006 information from the gcda file containing edge count information from
1007 previous executions of the function being compiled. In this case, the
1008 control flow graph is annotated with actual execution counts by
1009 compute_branch_probabilities().
1011 Main entry point of this file. */
1013 void
1014 branch_prob (void)
1016 basic_block bb;
1017 unsigned i;
1018 unsigned num_edges, ignored_edges;
1019 unsigned num_instrumented;
1020 struct edge_list *el;
1021 histogram_values values = histogram_values ();
1022 unsigned cfg_checksum, lineno_checksum;
1024 total_num_times_called++;
1026 flow_call_edges_add (NULL);
1027 add_noreturn_fake_exit_edges ();
1029 /* We can't handle cyclic regions constructed using abnormal edges.
1030 To avoid these we replace every source of abnormal edge by a fake
1031 edge from entry node and every destination by fake edge to exit.
1032 This keeps graph acyclic and our calculation exact for all normal
1033 edges except for exit and entrance ones.
1035 We also add fake exit edges for each call and asm statement in the
1036 basic, since it may not return. */
1038 FOR_EACH_BB_FN (bb, cfun)
1040 int need_exit_edge = 0, need_entry_edge = 0;
1041 int have_exit_edge = 0, have_entry_edge = 0;
1042 edge e;
1043 edge_iterator ei;
1045 /* Functions returning multiple times are not handled by extra edges.
1046 Instead we simply allow negative counts on edges from exit to the
1047 block past call and corresponding probabilities. We can't go
1048 with the extra edges because that would result in flowgraph that
1049 needs to have fake edges outside the spanning tree. */
1051 FOR_EACH_EDGE (e, ei, bb->succs)
1053 gimple_stmt_iterator gsi;
1054 gimple last = NULL;
1056 /* It may happen that there are compiler generated statements
1057 without a locus at all. Go through the basic block from the
1058 last to the first statement looking for a locus. */
1059 for (gsi = gsi_last_nondebug_bb (bb);
1060 !gsi_end_p (gsi);
1061 gsi_prev_nondebug (&gsi))
1063 last = gsi_stmt (gsi);
1064 if (gimple_has_location (last))
1065 break;
1068 /* Edge with goto locus might get wrong coverage info unless
1069 it is the only edge out of BB.
1070 Don't do that when the locuses match, so
1071 if (blah) goto something;
1072 is not computed twice. */
1073 if (last
1074 && gimple_has_location (last)
1075 && LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
1076 && !single_succ_p (bb)
1077 && (LOCATION_FILE (e->goto_locus)
1078 != LOCATION_FILE (gimple_location (last))
1079 || (LOCATION_LINE (e->goto_locus)
1080 != LOCATION_LINE (gimple_location (last)))))
1082 basic_block new_bb = split_edge (e);
1083 edge ne = single_succ_edge (new_bb);
1084 ne->goto_locus = e->goto_locus;
1086 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1087 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1088 need_exit_edge = 1;
1089 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1090 have_exit_edge = 1;
1092 FOR_EACH_EDGE (e, ei, bb->preds)
1094 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1095 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1096 need_entry_edge = 1;
1097 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1098 have_entry_edge = 1;
1101 if (need_exit_edge && !have_exit_edge)
1103 if (dump_file)
1104 fprintf (dump_file, "Adding fake exit edge to bb %i\n",
1105 bb->index);
1106 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
1108 if (need_entry_edge && !have_entry_edge)
1110 if (dump_file)
1111 fprintf (dump_file, "Adding fake entry edge to bb %i\n",
1112 bb->index);
1113 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FAKE);
1114 /* Avoid bbs that have both fake entry edge and also some
1115 exit edge. One of those edges wouldn't be added to the
1116 spanning tree, but we can't instrument any of them. */
1117 if (have_exit_edge || need_exit_edge)
1119 gimple_stmt_iterator gsi;
1120 gimple first;
1122 gsi = gsi_start_nondebug_after_labels_bb (bb);
1123 gcc_checking_assert (!gsi_end_p (gsi));
1124 first = gsi_stmt (gsi);
1125 /* Don't split the bbs containing __builtin_setjmp_receiver
1126 or ABNORMAL_DISPATCHER calls. These are very
1127 special and don't expect anything to be inserted before
1128 them. */
1129 if (is_gimple_call (first)
1130 && (gimple_call_builtin_p (first, BUILT_IN_SETJMP_RECEIVER)
1131 || (gimple_call_flags (first) & ECF_RETURNS_TWICE)
1132 || (gimple_call_internal_p (first)
1133 && (gimple_call_internal_fn (first)
1134 == IFN_ABNORMAL_DISPATCHER))))
1135 continue;
1137 if (dump_file)
1138 fprintf (dump_file, "Splitting bb %i after labels\n",
1139 bb->index);
1140 split_block_after_labels (bb);
1145 el = create_edge_list ();
1146 num_edges = NUM_EDGES (el);
1147 alloc_aux_for_edges (sizeof (struct edge_profile_info));
1149 /* The basic blocks are expected to be numbered sequentially. */
1150 compact_blocks ();
1152 ignored_edges = 0;
1153 for (i = 0 ; i < num_edges ; i++)
1155 edge e = INDEX_EDGE (el, i);
1156 e->count = 0;
1158 /* Mark edges we've replaced by fake edges above as ignored. */
1159 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1160 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1161 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1163 EDGE_INFO (e)->ignore = 1;
1164 ignored_edges++;
1168 /* Create spanning tree from basic block graph, mark each edge that is
1169 on the spanning tree. We insert as many abnormal and critical edges
1170 as possible to minimize number of edge splits necessary. */
1172 find_spanning_tree (el);
1174 /* Fake edges that are not on the tree will not be instrumented, so
1175 mark them ignored. */
1176 for (num_instrumented = i = 0; i < num_edges; i++)
1178 edge e = INDEX_EDGE (el, i);
1179 struct edge_profile_info *inf = EDGE_INFO (e);
1181 if (inf->ignore || inf->on_tree)
1182 /*NOP*/;
1183 else if (e->flags & EDGE_FAKE)
1185 inf->ignore = 1;
1186 ignored_edges++;
1188 else
1189 num_instrumented++;
1192 total_num_blocks += n_basic_blocks_for_fn (cfun);
1193 if (dump_file)
1194 fprintf (dump_file, "%d basic blocks\n", n_basic_blocks_for_fn (cfun));
1196 total_num_edges += num_edges;
1197 if (dump_file)
1198 fprintf (dump_file, "%d edges\n", num_edges);
1200 total_num_edges_ignored += ignored_edges;
1201 if (dump_file)
1202 fprintf (dump_file, "%d ignored edges\n", ignored_edges);
1204 total_num_edges_instrumented += num_instrumented;
1205 if (dump_file)
1206 fprintf (dump_file, "%d instrumentation edges\n", num_instrumented);
1208 /* Compute two different checksums. Note that we want to compute
1209 the checksum in only once place, since it depends on the shape
1210 of the control flow which can change during
1211 various transformations. */
1212 cfg_checksum = coverage_compute_cfg_checksum (cfun);
1213 lineno_checksum = coverage_compute_lineno_checksum ();
1215 /* Write the data from which gcov can reconstruct the basic block
1216 graph and function line numbers (the gcno file). */
1217 if (coverage_begin_function (lineno_checksum, cfg_checksum))
1219 gcov_position_t offset;
1221 /* Basic block flags */
1222 offset = gcov_write_tag (GCOV_TAG_BLOCKS);
1223 for (i = 0; i != (unsigned) (n_basic_blocks_for_fn (cfun)); i++)
1224 gcov_write_unsigned (0);
1225 gcov_write_length (offset);
1227 /* Arcs */
1228 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
1229 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1231 edge e;
1232 edge_iterator ei;
1234 offset = gcov_write_tag (GCOV_TAG_ARCS);
1235 gcov_write_unsigned (bb->index);
1237 FOR_EACH_EDGE (e, ei, bb->succs)
1239 struct edge_profile_info *i = EDGE_INFO (e);
1240 if (!i->ignore)
1242 unsigned flag_bits = 0;
1244 if (i->on_tree)
1245 flag_bits |= GCOV_ARC_ON_TREE;
1246 if (e->flags & EDGE_FAKE)
1247 flag_bits |= GCOV_ARC_FAKE;
1248 if (e->flags & EDGE_FALLTHRU)
1249 flag_bits |= GCOV_ARC_FALLTHROUGH;
1250 /* On trees we don't have fallthru flags, but we can
1251 recompute them from CFG shape. */
1252 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)
1253 && e->src->next_bb == e->dest)
1254 flag_bits |= GCOV_ARC_FALLTHROUGH;
1256 gcov_write_unsigned (e->dest->index);
1257 gcov_write_unsigned (flag_bits);
1261 gcov_write_length (offset);
1264 /* Line numbers. */
1265 /* Initialize the output. */
1266 output_location (NULL, 0, NULL, NULL);
1268 FOR_EACH_BB_FN (bb, cfun)
1270 gimple_stmt_iterator gsi;
1271 gcov_position_t offset = 0;
1273 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
1275 expanded_location curr_location =
1276 expand_location (DECL_SOURCE_LOCATION (current_function_decl));
1277 output_location (curr_location.file, curr_location.line,
1278 &offset, bb);
1281 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1283 gimple stmt = gsi_stmt (gsi);
1284 if (gimple_has_location (stmt))
1285 output_location (gimple_filename (stmt), gimple_lineno (stmt),
1286 &offset, bb);
1289 /* Notice GOTO expressions eliminated while constructing the CFG. */
1290 if (single_succ_p (bb)
1291 && LOCATION_LOCUS (single_succ_edge (bb)->goto_locus)
1292 != UNKNOWN_LOCATION)
1294 expanded_location curr_location
1295 = expand_location (single_succ_edge (bb)->goto_locus);
1296 output_location (curr_location.file, curr_location.line,
1297 &offset, bb);
1300 if (offset)
1302 /* A file of NULL indicates the end of run. */
1303 gcov_write_unsigned (0);
1304 gcov_write_string (NULL);
1305 gcov_write_length (offset);
1310 if (flag_profile_values)
1311 gimple_find_values_to_profile (&values);
1313 if (flag_branch_probabilities)
1315 compute_branch_probabilities (cfg_checksum, lineno_checksum);
1316 if (flag_profile_values)
1317 compute_value_histograms (values, cfg_checksum, lineno_checksum);
1320 remove_fake_edges ();
1322 /* For each edge not on the spanning tree, add counting code. */
1323 if (profile_arc_flag
1324 && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
1326 unsigned n_instrumented;
1328 gimple_init_edge_profiler ();
1330 n_instrumented = instrument_edges (el);
1332 gcc_assert (n_instrumented == num_instrumented);
1334 if (flag_profile_values)
1335 instrument_values (values);
1337 /* Commit changes done by instrumentation. */
1338 gsi_commit_edge_inserts ();
1341 free_aux_for_edges ();
1343 values.release ();
1344 free_edge_list (el);
1345 coverage_end_function (lineno_checksum, cfg_checksum);
1348 /* Union find algorithm implementation for the basic blocks using
1349 aux fields. */
1351 static basic_block
1352 find_group (basic_block bb)
1354 basic_block group = bb, bb1;
1356 while ((basic_block) group->aux != group)
1357 group = (basic_block) group->aux;
1359 /* Compress path. */
1360 while ((basic_block) bb->aux != group)
1362 bb1 = (basic_block) bb->aux;
1363 bb->aux = (void *) group;
1364 bb = bb1;
1366 return group;
1369 static void
1370 union_groups (basic_block bb1, basic_block bb2)
1372 basic_block bb1g = find_group (bb1);
1373 basic_block bb2g = find_group (bb2);
1375 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1376 this code is unlikely going to be performance problem anyway. */
1377 gcc_assert (bb1g != bb2g);
1379 bb1g->aux = bb2g;
1382 /* This function searches all of the edges in the program flow graph, and puts
1383 as many bad edges as possible onto the spanning tree. Bad edges include
1384 abnormals edges, which can't be instrumented at the moment. Since it is
1385 possible for fake edges to form a cycle, we will have to develop some
1386 better way in the future. Also put critical edges to the tree, since they
1387 are more expensive to instrument. */
1389 static void
1390 find_spanning_tree (struct edge_list *el)
1392 int i;
1393 int num_edges = NUM_EDGES (el);
1394 basic_block bb;
1396 /* We use aux field for standard union-find algorithm. */
1397 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
1398 bb->aux = bb;
1400 /* Add fake edge exit to entry we can't instrument. */
1401 union_groups (EXIT_BLOCK_PTR_FOR_FN (cfun), ENTRY_BLOCK_PTR_FOR_FN (cfun));
1403 /* First add all abnormal edges to the tree unless they form a cycle. Also
1404 add all edges to the exit block to avoid inserting profiling code behind
1405 setting return value from function. */
1406 for (i = 0; i < num_edges; i++)
1408 edge e = INDEX_EDGE (el, i);
1409 if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
1410 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1411 && !EDGE_INFO (e)->ignore
1412 && (find_group (e->src) != find_group (e->dest)))
1414 if (dump_file)
1415 fprintf (dump_file, "Abnormal edge %d to %d put to tree\n",
1416 e->src->index, e->dest->index);
1417 EDGE_INFO (e)->on_tree = 1;
1418 union_groups (e->src, e->dest);
1422 /* Now insert all critical edges to the tree unless they form a cycle. */
1423 for (i = 0; i < num_edges; i++)
1425 edge e = INDEX_EDGE (el, i);
1426 if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore
1427 && find_group (e->src) != find_group (e->dest))
1429 if (dump_file)
1430 fprintf (dump_file, "Critical edge %d to %d put to tree\n",
1431 e->src->index, e->dest->index);
1432 EDGE_INFO (e)->on_tree = 1;
1433 union_groups (e->src, e->dest);
1437 /* And now the rest. */
1438 for (i = 0; i < num_edges; i++)
1440 edge e = INDEX_EDGE (el, i);
1441 if (!EDGE_INFO (e)->ignore
1442 && find_group (e->src) != find_group (e->dest))
1444 if (dump_file)
1445 fprintf (dump_file, "Normal edge %d to %d put to tree\n",
1446 e->src->index, e->dest->index);
1447 EDGE_INFO (e)->on_tree = 1;
1448 union_groups (e->src, e->dest);
1452 clear_aux_for_blocks ();
1455 /* Perform file-level initialization for branch-prob processing. */
1457 void
1458 init_branch_prob (void)
1460 int i;
1462 total_num_blocks = 0;
1463 total_num_edges = 0;
1464 total_num_edges_ignored = 0;
1465 total_num_edges_instrumented = 0;
1466 total_num_blocks_created = 0;
1467 total_num_passes = 0;
1468 total_num_times_called = 0;
1469 total_num_branches = 0;
1470 for (i = 0; i < 20; i++)
1471 total_hist_br_prob[i] = 0;
1474 /* Performs file-level cleanup after branch-prob processing
1475 is completed. */
1477 void
1478 end_branch_prob (void)
1480 if (dump_file)
1482 fprintf (dump_file, "\n");
1483 fprintf (dump_file, "Total number of blocks: %d\n",
1484 total_num_blocks);
1485 fprintf (dump_file, "Total number of edges: %d\n", total_num_edges);
1486 fprintf (dump_file, "Total number of ignored edges: %d\n",
1487 total_num_edges_ignored);
1488 fprintf (dump_file, "Total number of instrumented edges: %d\n",
1489 total_num_edges_instrumented);
1490 fprintf (dump_file, "Total number of blocks created: %d\n",
1491 total_num_blocks_created);
1492 fprintf (dump_file, "Total number of graph solution passes: %d\n",
1493 total_num_passes);
1494 if (total_num_times_called != 0)
1495 fprintf (dump_file, "Average number of graph solution passes: %d\n",
1496 (total_num_passes + (total_num_times_called >> 1))
1497 / total_num_times_called);
1498 fprintf (dump_file, "Total number of branches: %d\n",
1499 total_num_branches);
1500 if (total_num_branches)
1502 int i;
1504 for (i = 0; i < 10; i++)
1505 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1506 (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1507 / total_num_branches, 5*i, 5*i+5);