gcc/
[official-gcc.git] / gcc / profile.c
blobd0063d178ec090eaa91b955680f2581c0f61c17c
1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990-2014 Free Software Foundation, Inc.
3 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
4 based on some ideas from Dain Samples of UC Berkeley.
5 Further mangling by Bob Manson, Cygnus Support.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Generate basic block profile instrumentation and auxiliary files.
24 Profile generation is optimized, so that not all arcs in the basic
25 block graph need instrumenting. First, the BB graph is closed with
26 one entry (function start), and one exit (function exit). Any
27 ABNORMAL_EDGE cannot be instrumented (because there is no control
28 path to place the code). We close the graph by inserting fake
29 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
30 edges that do not go to the exit_block. We ignore such abnormal
31 edges. Naturally these fake edges are never directly traversed,
32 and so *cannot* be directly instrumented. Some other graph
33 massaging is done. To optimize the instrumentation we generate the
34 BB minimal span tree, only edges that are not on the span tree
35 (plus the entry point) need instrumenting. From that information
36 all other edge counts can be deduced. By construction all fake
37 edges must be on the spanning tree. We also attempt to place
38 EDGE_CRITICAL edges on the spanning tree.
40 The auxiliary files generated are <dumpbase>.gcno (at compile time)
41 and <dumpbase>.gcda (at run time). The format is
42 described in full in gcov-io.h. */
44 /* ??? Register allocation should use basic block execution counts to
45 give preference to the most commonly executed blocks. */
47 /* ??? Should calculate branch probabilities before instrumenting code, since
48 then we can use arc counts to help decide which arcs to instrument. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "flags.h"
56 #include "regs.h"
57 #include "expr.h"
58 #include "hashtab.h"
59 #include "hash-set.h"
60 #include "vec.h"
61 #include "machmode.h"
62 #include "hard-reg-set.h"
63 #include "input.h"
64 #include "function.h"
65 #include "predict.h"
66 #include "dominance.h"
67 #include "cfg.h"
68 #include "cfganal.h"
69 #include "basic-block.h"
70 #include "diagnostic-core.h"
71 #include "coverage.h"
72 #include "value-prof.h"
73 #include "tree.h"
74 #include "tree-ssa-alias.h"
75 #include "internal-fn.h"
76 #include "gimple-expr.h"
77 #include "is-a.h"
78 #include "gimple.h"
79 #include "gimple-iterator.h"
80 #include "tree-cfg.h"
81 #include "cfgloop.h"
82 #include "dumpfile.h"
83 #include "cgraph.h"
85 #include "profile.h"
87 struct bb_profile_info {
88 unsigned int count_valid : 1;
90 /* Number of successor and predecessor edges. */
91 gcov_type succ_count;
92 gcov_type pred_count;
95 #define BB_INFO(b) ((struct bb_profile_info *) (b)->aux)
98 /* Counter summary from the last set of coverage counts read. */
100 const struct gcov_ctr_summary *profile_info;
102 /* Counter working set information computed from the current counter
103 summary. Not initialized unless profile_info summary is non-NULL. */
104 static gcov_working_set_t gcov_working_sets[NUM_GCOV_WORKING_SETS];
106 /* Collect statistics on the performance of this pass for the entire source
107 file. */
109 static int total_num_blocks;
110 static int total_num_edges;
111 static int total_num_edges_ignored;
112 static int total_num_edges_instrumented;
113 static int total_num_blocks_created;
114 static int total_num_passes;
115 static int total_num_times_called;
116 static int total_hist_br_prob[20];
117 static int total_num_branches;
119 /* Helper function to update gcov_working_sets. */
121 void add_working_set (gcov_working_set_t *set) {
122 int i = 0;
123 for (; i < NUM_GCOV_WORKING_SETS; i++)
124 gcov_working_sets[i] = set[i];
127 /* Forward declarations. */
128 static void find_spanning_tree (struct edge_list *);
130 /* Add edge instrumentation code to the entire insn chain.
132 F is the first insn of the chain.
133 NUM_BLOCKS is the number of basic blocks found in F. */
135 static unsigned
136 instrument_edges (struct edge_list *el)
138 unsigned num_instr_edges = 0;
139 int num_edges = NUM_EDGES (el);
140 basic_block bb;
142 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
144 edge e;
145 edge_iterator ei;
147 FOR_EACH_EDGE (e, ei, bb->succs)
149 struct edge_profile_info *inf = EDGE_INFO (e);
151 if (!inf->ignore && !inf->on_tree)
153 gcc_assert (!(e->flags & EDGE_ABNORMAL));
154 if (dump_file)
155 fprintf (dump_file, "Edge %d to %d instrumented%s\n",
156 e->src->index, e->dest->index,
157 EDGE_CRITICAL_P (e) ? " (and split)" : "");
158 gimple_gen_edge_profiler (num_instr_edges++, e);
163 total_num_blocks_created += num_edges;
164 if (dump_file)
165 fprintf (dump_file, "%d edges instrumented\n", num_instr_edges);
166 return num_instr_edges;
169 /* Add code to measure histograms for values in list VALUES. */
170 static void
171 instrument_values (histogram_values values)
173 unsigned i;
175 /* Emit code to generate the histograms before the insns. */
177 for (i = 0; i < values.length (); i++)
179 histogram_value hist = values[i];
180 unsigned t = COUNTER_FOR_HIST_TYPE (hist->type);
182 if (!coverage_counter_alloc (t, hist->n_counters))
183 continue;
185 switch (hist->type)
187 case HIST_TYPE_INTERVAL:
188 gimple_gen_interval_profiler (hist, t, 0);
189 break;
191 case HIST_TYPE_POW2:
192 gimple_gen_pow2_profiler (hist, t, 0);
193 break;
195 case HIST_TYPE_SINGLE_VALUE:
196 gimple_gen_one_value_profiler (hist, t, 0);
197 break;
199 case HIST_TYPE_CONST_DELTA:
200 gimple_gen_const_delta_profiler (hist, t, 0);
201 break;
203 case HIST_TYPE_INDIR_CALL:
204 case HIST_TYPE_INDIR_CALL_TOPN:
205 gimple_gen_ic_profiler (hist, t, 0);
206 break;
208 case HIST_TYPE_AVERAGE:
209 gimple_gen_average_profiler (hist, t, 0);
210 break;
212 case HIST_TYPE_IOR:
213 gimple_gen_ior_profiler (hist, t, 0);
214 break;
216 case HIST_TYPE_TIME_PROFILE:
218 basic_block bb =
219 split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
220 gimple_stmt_iterator gsi = gsi_start_bb (bb);
222 gimple_gen_time_profiler (t, 0, gsi);
223 break;
226 default:
227 gcc_unreachable ();
233 /* Fill the working set information into the profile_info structure. */
235 void
236 get_working_sets (void)
238 unsigned ws_ix, pctinc, pct;
239 gcov_working_set_t *ws_info;
241 if (!profile_info)
242 return;
244 compute_working_sets (profile_info, gcov_working_sets);
246 if (dump_file)
248 fprintf (dump_file, "Counter working sets:\n");
249 /* Multiply the percentage by 100 to avoid float. */
250 pctinc = 100 * 100 / NUM_GCOV_WORKING_SETS;
251 for (ws_ix = 0, pct = pctinc; ws_ix < NUM_GCOV_WORKING_SETS;
252 ws_ix++, pct += pctinc)
254 if (ws_ix == NUM_GCOV_WORKING_SETS - 1)
255 pct = 9990;
256 ws_info = &gcov_working_sets[ws_ix];
257 /* Print out the percentage using int arithmatic to avoid float. */
258 fprintf (dump_file, "\t\t%u.%02u%%: num counts=%u, min counter="
259 "%"PRId64 "\n",
260 pct / 100, pct - (pct / 100 * 100),
261 ws_info->num_counters,
262 (int64_t)ws_info->min_counter);
267 /* Given a the desired percentage of the full profile (sum_all from the
268 summary), multiplied by 10 to avoid float in PCT_TIMES_10, returns
269 the corresponding working set information. If an exact match for
270 the percentage isn't found, the closest value is used. */
272 gcov_working_set_t *
273 find_working_set (unsigned pct_times_10)
275 unsigned i;
276 if (!profile_info)
277 return NULL;
278 gcc_assert (pct_times_10 <= 1000);
279 if (pct_times_10 >= 999)
280 return &gcov_working_sets[NUM_GCOV_WORKING_SETS - 1];
281 i = pct_times_10 * NUM_GCOV_WORKING_SETS / 1000;
282 if (!i)
283 return &gcov_working_sets[0];
284 return &gcov_working_sets[i - 1];
287 /* Computes hybrid profile for all matching entries in da_file.
289 CFG_CHECKSUM is the precomputed checksum for the CFG. */
291 static gcov_type *
292 get_exec_counts (unsigned cfg_checksum, unsigned lineno_checksum)
294 unsigned num_edges = 0;
295 basic_block bb;
296 gcov_type *counts;
298 /* Count the edges to be (possibly) instrumented. */
299 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
301 edge e;
302 edge_iterator ei;
304 FOR_EACH_EDGE (e, ei, bb->succs)
305 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
306 num_edges++;
309 counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, cfg_checksum,
310 lineno_checksum, &profile_info);
311 if (!counts)
312 return NULL;
314 get_working_sets ();
316 if (dump_file && profile_info)
317 fprintf (dump_file, "Merged %u profiles with maximal count %u.\n",
318 profile_info->runs, (unsigned) profile_info->sum_max);
320 return counts;
324 static bool
325 is_edge_inconsistent (vec<edge, va_gc> *edges)
327 edge e;
328 edge_iterator ei;
329 FOR_EACH_EDGE (e, ei, edges)
331 if (!EDGE_INFO (e)->ignore)
333 if (e->count < 0
334 && (!(e->flags & EDGE_FAKE)
335 || !block_ends_with_call_p (e->src)))
337 if (dump_file)
339 fprintf (dump_file,
340 "Edge %i->%i is inconsistent, count%"PRId64,
341 e->src->index, e->dest->index, e->count);
342 dump_bb (dump_file, e->src, 0, TDF_DETAILS);
343 dump_bb (dump_file, e->dest, 0, TDF_DETAILS);
345 return true;
349 return false;
352 static void
353 correct_negative_edge_counts (void)
355 basic_block bb;
356 edge e;
357 edge_iterator ei;
359 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
361 FOR_EACH_EDGE (e, ei, bb->succs)
363 if (e->count < 0)
364 e->count = 0;
369 /* Check consistency.
370 Return true if inconsistency is found. */
371 static bool
372 is_inconsistent (void)
374 basic_block bb;
375 bool inconsistent = false;
376 FOR_EACH_BB_FN (bb, cfun)
378 inconsistent |= is_edge_inconsistent (bb->preds);
379 if (!dump_file && inconsistent)
380 return true;
381 inconsistent |= is_edge_inconsistent (bb->succs);
382 if (!dump_file && inconsistent)
383 return true;
384 if (bb->count < 0)
386 if (dump_file)
388 fprintf (dump_file, "BB %i count is negative "
389 "%"PRId64,
390 bb->index,
391 bb->count);
392 dump_bb (dump_file, bb, 0, TDF_DETAILS);
394 inconsistent = true;
396 if (bb->count != sum_edge_counts (bb->preds))
398 if (dump_file)
400 fprintf (dump_file, "BB %i count does not match sum of incoming edges "
401 "%"PRId64" should be %"PRId64,
402 bb->index,
403 bb->count,
404 sum_edge_counts (bb->preds));
405 dump_bb (dump_file, bb, 0, TDF_DETAILS);
407 inconsistent = true;
409 if (bb->count != sum_edge_counts (bb->succs) &&
410 ! (find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun)) != NULL
411 && block_ends_with_call_p (bb)))
413 if (dump_file)
415 fprintf (dump_file, "BB %i count does not match sum of outgoing edges "
416 "%"PRId64" should be %"PRId64,
417 bb->index,
418 bb->count,
419 sum_edge_counts (bb->succs));
420 dump_bb (dump_file, bb, 0, TDF_DETAILS);
422 inconsistent = true;
424 if (!dump_file && inconsistent)
425 return true;
428 return inconsistent;
431 /* Set each basic block count to the sum of its outgoing edge counts */
432 static void
433 set_bb_counts (void)
435 basic_block bb;
436 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
438 bb->count = sum_edge_counts (bb->succs);
439 gcc_assert (bb->count >= 0);
443 /* Reads profile data and returns total number of edge counts read */
444 static int
445 read_profile_edge_counts (gcov_type *exec_counts)
447 basic_block bb;
448 int num_edges = 0;
449 int exec_counts_pos = 0;
450 /* For each edge not on the spanning tree, set its execution count from
451 the .da file. */
452 /* The first count in the .da file is the number of times that the function
453 was entered. This is the exec_count for block zero. */
455 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
457 edge e;
458 edge_iterator ei;
460 FOR_EACH_EDGE (e, ei, bb->succs)
461 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
463 num_edges++;
464 if (exec_counts)
466 e->count = exec_counts[exec_counts_pos++];
467 if (e->count > profile_info->sum_max)
469 if (flag_profile_correction)
471 static bool informed = 0;
472 if (dump_enabled_p () && !informed)
473 dump_printf_loc (MSG_NOTE, input_location,
474 "corrupted profile info: edge count"
475 " exceeds maximal count\n");
476 informed = 1;
478 else
479 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
480 bb->index, e->dest->index);
483 else
484 e->count = 0;
486 EDGE_INFO (e)->count_valid = 1;
487 BB_INFO (bb)->succ_count--;
488 BB_INFO (e->dest)->pred_count--;
489 if (dump_file)
491 fprintf (dump_file, "\nRead edge from %i to %i, count:",
492 bb->index, e->dest->index);
493 fprintf (dump_file, "%"PRId64,
494 (int64_t) e->count);
499 return num_edges;
502 #define OVERLAP_BASE 10000
504 /* Compare the static estimated profile to the actual profile, and
505 return the "degree of overlap" measure between them.
507 Degree of overlap is a number between 0 and OVERLAP_BASE. It is
508 the sum of each basic block's minimum relative weights between
509 two profiles. And overlap of OVERLAP_BASE means two profiles are
510 identical. */
512 static int
513 compute_frequency_overlap (void)
515 gcov_type count_total = 0, freq_total = 0;
516 int overlap = 0;
517 basic_block bb;
519 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
521 count_total += bb->count;
522 freq_total += bb->frequency;
525 if (count_total == 0 || freq_total == 0)
526 return 0;
528 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
529 overlap += MIN (bb->count * OVERLAP_BASE / count_total,
530 bb->frequency * OVERLAP_BASE / freq_total);
532 return overlap;
535 /* Compute the branch probabilities for the various branches.
536 Annotate them accordingly.
538 CFG_CHECKSUM is the precomputed checksum for the CFG. */
540 static void
541 compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
543 basic_block bb;
544 int i;
545 int num_edges = 0;
546 int changes;
547 int passes;
548 int hist_br_prob[20];
549 int num_branches;
550 gcov_type *exec_counts = get_exec_counts (cfg_checksum, lineno_checksum);
551 int inconsistent = 0;
553 /* Very simple sanity checks so we catch bugs in our profiling code. */
554 if (!profile_info)
555 return;
557 if (profile_info->sum_all < profile_info->sum_max)
559 error ("corrupted profile info: sum_all is smaller than sum_max");
560 exec_counts = NULL;
563 /* Attach extra info block to each bb. */
564 alloc_aux_for_blocks (sizeof (struct bb_profile_info));
565 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
567 edge e;
568 edge_iterator ei;
570 FOR_EACH_EDGE (e, ei, bb->succs)
571 if (!EDGE_INFO (e)->ignore)
572 BB_INFO (bb)->succ_count++;
573 FOR_EACH_EDGE (e, ei, bb->preds)
574 if (!EDGE_INFO (e)->ignore)
575 BB_INFO (bb)->pred_count++;
578 /* Avoid predicting entry on exit nodes. */
579 BB_INFO (EXIT_BLOCK_PTR_FOR_FN (cfun))->succ_count = 2;
580 BB_INFO (ENTRY_BLOCK_PTR_FOR_FN (cfun))->pred_count = 2;
582 num_edges = read_profile_edge_counts (exec_counts);
584 if (dump_file)
585 fprintf (dump_file, "\n%d edge counts read\n", num_edges);
587 /* For every block in the file,
588 - if every exit/entrance edge has a known count, then set the block count
589 - if the block count is known, and every exit/entrance edge but one has
590 a known execution count, then set the count of the remaining edge
592 As edge counts are set, decrement the succ/pred count, but don't delete
593 the edge, that way we can easily tell when all edges are known, or only
594 one edge is unknown. */
596 /* The order that the basic blocks are iterated through is important.
597 Since the code that finds spanning trees starts with block 0, low numbered
598 edges are put on the spanning tree in preference to high numbered edges.
599 Hence, most instrumented edges are at the end. Graph solving works much
600 faster if we propagate numbers from the end to the start.
602 This takes an average of slightly more than 3 passes. */
604 changes = 1;
605 passes = 0;
606 while (changes)
608 passes++;
609 changes = 0;
610 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, prev_bb)
612 struct bb_profile_info *bi = BB_INFO (bb);
613 if (! bi->count_valid)
615 if (bi->succ_count == 0)
617 edge e;
618 edge_iterator ei;
619 gcov_type total = 0;
621 FOR_EACH_EDGE (e, ei, bb->succs)
622 total += e->count;
623 bb->count = total;
624 bi->count_valid = 1;
625 changes = 1;
627 else if (bi->pred_count == 0)
629 edge e;
630 edge_iterator ei;
631 gcov_type total = 0;
633 FOR_EACH_EDGE (e, ei, bb->preds)
634 total += e->count;
635 bb->count = total;
636 bi->count_valid = 1;
637 changes = 1;
640 if (bi->count_valid)
642 if (bi->succ_count == 1)
644 edge e;
645 edge_iterator ei;
646 gcov_type total = 0;
648 /* One of the counts will be invalid, but it is zero,
649 so adding it in also doesn't hurt. */
650 FOR_EACH_EDGE (e, ei, bb->succs)
651 total += e->count;
653 /* Search for the invalid edge, and set its count. */
654 FOR_EACH_EDGE (e, ei, bb->succs)
655 if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
656 break;
658 /* Calculate count for remaining edge by conservation. */
659 total = bb->count - total;
661 gcc_assert (e);
662 EDGE_INFO (e)->count_valid = 1;
663 e->count = total;
664 bi->succ_count--;
666 BB_INFO (e->dest)->pred_count--;
667 changes = 1;
669 if (bi->pred_count == 1)
671 edge e;
672 edge_iterator ei;
673 gcov_type total = 0;
675 /* One of the counts will be invalid, but it is zero,
676 so adding it in also doesn't hurt. */
677 FOR_EACH_EDGE (e, ei, bb->preds)
678 total += e->count;
680 /* Search for the invalid edge, and set its count. */
681 FOR_EACH_EDGE (e, ei, bb->preds)
682 if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
683 break;
685 /* Calculate count for remaining edge by conservation. */
686 total = bb->count - total + e->count;
688 gcc_assert (e);
689 EDGE_INFO (e)->count_valid = 1;
690 e->count = total;
691 bi->pred_count--;
693 BB_INFO (e->src)->succ_count--;
694 changes = 1;
699 if (dump_file)
701 int overlap = compute_frequency_overlap ();
702 gimple_dump_cfg (dump_file, dump_flags);
703 fprintf (dump_file, "Static profile overlap: %d.%d%%\n",
704 overlap / (OVERLAP_BASE / 100),
705 overlap % (OVERLAP_BASE / 100));
708 total_num_passes += passes;
709 if (dump_file)
710 fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
712 /* If the graph has been correctly solved, every block will have a
713 succ and pred count of zero. */
714 FOR_EACH_BB_FN (bb, cfun)
716 gcc_assert (!BB_INFO (bb)->succ_count && !BB_INFO (bb)->pred_count);
719 /* Check for inconsistent basic block counts */
720 inconsistent = is_inconsistent ();
722 if (inconsistent)
724 if (flag_profile_correction)
726 /* Inconsistency detected. Make it flow-consistent. */
727 static int informed = 0;
728 if (dump_enabled_p () && informed == 0)
730 informed = 1;
731 dump_printf_loc (MSG_NOTE, input_location,
732 "correcting inconsistent profile data\n");
734 correct_negative_edge_counts ();
735 /* Set bb counts to the sum of the outgoing edge counts */
736 set_bb_counts ();
737 if (dump_file)
738 fprintf (dump_file, "\nCalling mcf_smooth_cfg\n");
739 mcf_smooth_cfg ();
741 else
742 error ("corrupted profile info: profile data is not flow-consistent");
745 /* For every edge, calculate its branch probability and add a reg_note
746 to the branch insn to indicate this. */
748 for (i = 0; i < 20; i++)
749 hist_br_prob[i] = 0;
750 num_branches = 0;
752 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
754 edge e;
755 edge_iterator ei;
757 if (bb->count < 0)
759 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
760 bb->index, (int)bb->count);
761 bb->count = 0;
763 FOR_EACH_EDGE (e, ei, bb->succs)
765 /* Function may return twice in the cased the called function is
766 setjmp or calls fork, but we can't represent this by extra
767 edge from the entry, since extra edge from the exit is
768 already present. We get negative frequency from the entry
769 point. */
770 if ((e->count < 0
771 && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
772 || (e->count > bb->count
773 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)))
775 if (block_ends_with_call_p (bb))
776 e->count = e->count < 0 ? 0 : bb->count;
778 if (e->count < 0 || e->count > bb->count)
780 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
781 e->src->index, e->dest->index,
782 (int)e->count);
783 e->count = bb->count / 2;
786 if (bb->count)
788 FOR_EACH_EDGE (e, ei, bb->succs)
789 e->probability = GCOV_COMPUTE_SCALE (e->count, bb->count);
790 if (bb->index >= NUM_FIXED_BLOCKS
791 && block_ends_with_condjump_p (bb)
792 && EDGE_COUNT (bb->succs) >= 2)
794 int prob;
795 edge e;
796 int index;
798 /* Find the branch edge. It is possible that we do have fake
799 edges here. */
800 FOR_EACH_EDGE (e, ei, bb->succs)
801 if (!(e->flags & (EDGE_FAKE | EDGE_FALLTHRU)))
802 break;
804 prob = e->probability;
805 index = prob * 20 / REG_BR_PROB_BASE;
807 if (index == 20)
808 index = 19;
809 hist_br_prob[index]++;
811 num_branches++;
814 /* As a last resort, distribute the probabilities evenly.
815 Use simple heuristics that if there are normal edges,
816 give all abnormals frequency of 0, otherwise distribute the
817 frequency over abnormals (this is the case of noreturn
818 calls). */
819 else if (profile_status_for_fn (cfun) == PROFILE_ABSENT)
821 int total = 0;
823 FOR_EACH_EDGE (e, ei, bb->succs)
824 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
825 total ++;
826 if (total)
828 FOR_EACH_EDGE (e, ei, bb->succs)
829 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
830 e->probability = REG_BR_PROB_BASE / total;
831 else
832 e->probability = 0;
834 else
836 total += EDGE_COUNT (bb->succs);
837 FOR_EACH_EDGE (e, ei, bb->succs)
838 e->probability = REG_BR_PROB_BASE / total;
840 if (bb->index >= NUM_FIXED_BLOCKS
841 && block_ends_with_condjump_p (bb)
842 && EDGE_COUNT (bb->succs) >= 2)
843 num_branches++;
846 counts_to_freqs ();
847 profile_status_for_fn (cfun) = PROFILE_READ;
848 compute_function_frequency ();
850 if (dump_file)
852 fprintf (dump_file, "%d branches\n", num_branches);
853 if (num_branches)
854 for (i = 0; i < 10; i++)
855 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
856 (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
857 5 * i, 5 * i + 5);
859 total_num_branches += num_branches;
860 for (i = 0; i < 20; i++)
861 total_hist_br_prob[i] += hist_br_prob[i];
863 fputc ('\n', dump_file);
864 fputc ('\n', dump_file);
867 free_aux_for_blocks ();
870 /* Load value histograms values whose description is stored in VALUES array
871 from .gcda file.
873 CFG_CHECKSUM is the precomputed checksum for the CFG. */
875 static void
876 compute_value_histograms (histogram_values values, unsigned cfg_checksum,
877 unsigned lineno_checksum)
879 unsigned i, j, t, any;
880 unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS];
881 gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS];
882 gcov_type *act_count[GCOV_N_VALUE_COUNTERS];
883 gcov_type *aact_count;
884 struct cgraph_node *node;
886 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
887 n_histogram_counters[t] = 0;
889 for (i = 0; i < values.length (); i++)
891 histogram_value hist = values[i];
892 n_histogram_counters[(int) hist->type] += hist->n_counters;
895 any = 0;
896 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
898 if (!n_histogram_counters[t])
900 histogram_counts[t] = NULL;
901 continue;
904 histogram_counts[t] =
905 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t),
906 n_histogram_counters[t], cfg_checksum,
907 lineno_checksum, NULL);
908 if (histogram_counts[t])
909 any = 1;
910 act_count[t] = histogram_counts[t];
912 if (!any)
913 return;
915 for (i = 0; i < values.length (); i++)
917 histogram_value hist = values[i];
918 gimple stmt = hist->hvalue.stmt;
920 t = (int) hist->type;
922 aact_count = act_count[t];
924 if (act_count[t])
925 act_count[t] += hist->n_counters;
927 gimple_add_histogram_value (cfun, stmt, hist);
928 hist->hvalue.counters = XNEWVEC (gcov_type, hist->n_counters);
929 for (j = 0; j < hist->n_counters; j++)
930 if (aact_count)
931 hist->hvalue.counters[j] = aact_count[j];
932 else
933 hist->hvalue.counters[j] = 0;
935 /* Time profiler counter is not related to any statement,
936 so that we have to read the counter and set the value to
937 the corresponding call graph node. */
938 if (hist->type == HIST_TYPE_TIME_PROFILE)
940 node = cgraph_node::get (hist->fun->decl);
941 node->tp_first_run = hist->hvalue.counters[0];
943 if (dump_file)
944 fprintf (dump_file, "Read tp_first_run: %d\n", node->tp_first_run);
948 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
949 free (histogram_counts[t]);
952 /* When passed NULL as file_name, initialize.
953 When passed something else, output the necessary commands to change
954 line to LINE and offset to FILE_NAME. */
955 static void
956 output_location (char const *file_name, int line,
957 gcov_position_t *offset, basic_block bb)
959 static char const *prev_file_name;
960 static int prev_line;
961 bool name_differs, line_differs;
963 if (!file_name)
965 prev_file_name = NULL;
966 prev_line = -1;
967 return;
970 name_differs = !prev_file_name || filename_cmp (file_name, prev_file_name);
971 line_differs = prev_line != line;
973 if (name_differs || line_differs)
975 if (!*offset)
977 *offset = gcov_write_tag (GCOV_TAG_LINES);
978 gcov_write_unsigned (bb->index);
979 name_differs = line_differs=true;
982 /* If this is a new source file, then output the
983 file's name to the .bb file. */
984 if (name_differs)
986 prev_file_name = file_name;
987 gcov_write_unsigned (0);
988 gcov_write_string (prev_file_name);
990 if (line_differs)
992 gcov_write_unsigned (line);
993 prev_line = line;
998 /* Instrument and/or analyze program behavior based on program the CFG.
1000 This function creates a representation of the control flow graph (of
1001 the function being compiled) that is suitable for the instrumentation
1002 of edges and/or converting measured edge counts to counts on the
1003 complete CFG.
1005 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
1006 the flow graph that are needed to reconstruct the dynamic behavior of the
1007 flow graph. This data is written to the gcno file for gcov.
1009 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
1010 information from the gcda file containing edge count information from
1011 previous executions of the function being compiled. In this case, the
1012 control flow graph is annotated with actual execution counts by
1013 compute_branch_probabilities().
1015 Main entry point of this file. */
1017 void
1018 branch_prob (void)
1020 basic_block bb;
1021 unsigned i;
1022 unsigned num_edges, ignored_edges;
1023 unsigned num_instrumented;
1024 struct edge_list *el;
1025 histogram_values values = histogram_values ();
1026 unsigned cfg_checksum, lineno_checksum;
1028 total_num_times_called++;
1030 flow_call_edges_add (NULL);
1031 add_noreturn_fake_exit_edges ();
1033 /* We can't handle cyclic regions constructed using abnormal edges.
1034 To avoid these we replace every source of abnormal edge by a fake
1035 edge from entry node and every destination by fake edge to exit.
1036 This keeps graph acyclic and our calculation exact for all normal
1037 edges except for exit and entrance ones.
1039 We also add fake exit edges for each call and asm statement in the
1040 basic, since it may not return. */
1042 FOR_EACH_BB_FN (bb, cfun)
1044 int need_exit_edge = 0, need_entry_edge = 0;
1045 int have_exit_edge = 0, have_entry_edge = 0;
1046 edge e;
1047 edge_iterator ei;
1049 /* Functions returning multiple times are not handled by extra edges.
1050 Instead we simply allow negative counts on edges from exit to the
1051 block past call and corresponding probabilities. We can't go
1052 with the extra edges because that would result in flowgraph that
1053 needs to have fake edges outside the spanning tree. */
1055 FOR_EACH_EDGE (e, ei, bb->succs)
1057 gimple_stmt_iterator gsi;
1058 gimple last = NULL;
1060 /* It may happen that there are compiler generated statements
1061 without a locus at all. Go through the basic block from the
1062 last to the first statement looking for a locus. */
1063 for (gsi = gsi_last_nondebug_bb (bb);
1064 !gsi_end_p (gsi);
1065 gsi_prev_nondebug (&gsi))
1067 last = gsi_stmt (gsi);
1068 if (gimple_has_location (last))
1069 break;
1072 /* Edge with goto locus might get wrong coverage info unless
1073 it is the only edge out of BB.
1074 Don't do that when the locuses match, so
1075 if (blah) goto something;
1076 is not computed twice. */
1077 if (last
1078 && gimple_has_location (last)
1079 && LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
1080 && !single_succ_p (bb)
1081 && (LOCATION_FILE (e->goto_locus)
1082 != LOCATION_FILE (gimple_location (last))
1083 || (LOCATION_LINE (e->goto_locus)
1084 != LOCATION_LINE (gimple_location (last)))))
1086 basic_block new_bb = split_edge (e);
1087 edge ne = single_succ_edge (new_bb);
1088 ne->goto_locus = e->goto_locus;
1090 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1091 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1092 need_exit_edge = 1;
1093 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1094 have_exit_edge = 1;
1096 FOR_EACH_EDGE (e, ei, bb->preds)
1098 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1099 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1100 need_entry_edge = 1;
1101 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1102 have_entry_edge = 1;
1105 if (need_exit_edge && !have_exit_edge)
1107 if (dump_file)
1108 fprintf (dump_file, "Adding fake exit edge to bb %i\n",
1109 bb->index);
1110 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
1112 if (need_entry_edge && !have_entry_edge)
1114 if (dump_file)
1115 fprintf (dump_file, "Adding fake entry edge to bb %i\n",
1116 bb->index);
1117 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FAKE);
1118 /* Avoid bbs that have both fake entry edge and also some
1119 exit edge. One of those edges wouldn't be added to the
1120 spanning tree, but we can't instrument any of them. */
1121 if (have_exit_edge || need_exit_edge)
1123 gimple_stmt_iterator gsi;
1124 gimple first;
1126 gsi = gsi_start_nondebug_after_labels_bb (bb);
1127 gcc_checking_assert (!gsi_end_p (gsi));
1128 first = gsi_stmt (gsi);
1129 /* Don't split the bbs containing __builtin_setjmp_receiver
1130 or ABNORMAL_DISPATCHER calls. These are very
1131 special and don't expect anything to be inserted before
1132 them. */
1133 if (is_gimple_call (first)
1134 && (gimple_call_builtin_p (first, BUILT_IN_SETJMP_RECEIVER)
1135 || (gimple_call_flags (first) & ECF_RETURNS_TWICE)
1136 || (gimple_call_internal_p (first)
1137 && (gimple_call_internal_fn (first)
1138 == IFN_ABNORMAL_DISPATCHER))))
1139 continue;
1141 if (dump_file)
1142 fprintf (dump_file, "Splitting bb %i after labels\n",
1143 bb->index);
1144 split_block_after_labels (bb);
1149 el = create_edge_list ();
1150 num_edges = NUM_EDGES (el);
1151 alloc_aux_for_edges (sizeof (struct edge_profile_info));
1153 /* The basic blocks are expected to be numbered sequentially. */
1154 compact_blocks ();
1156 ignored_edges = 0;
1157 for (i = 0 ; i < num_edges ; i++)
1159 edge e = INDEX_EDGE (el, i);
1160 e->count = 0;
1162 /* Mark edges we've replaced by fake edges above as ignored. */
1163 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1164 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1165 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1167 EDGE_INFO (e)->ignore = 1;
1168 ignored_edges++;
1172 /* Create spanning tree from basic block graph, mark each edge that is
1173 on the spanning tree. We insert as many abnormal and critical edges
1174 as possible to minimize number of edge splits necessary. */
1176 find_spanning_tree (el);
1178 /* Fake edges that are not on the tree will not be instrumented, so
1179 mark them ignored. */
1180 for (num_instrumented = i = 0; i < num_edges; i++)
1182 edge e = INDEX_EDGE (el, i);
1183 struct edge_profile_info *inf = EDGE_INFO (e);
1185 if (inf->ignore || inf->on_tree)
1186 /*NOP*/;
1187 else if (e->flags & EDGE_FAKE)
1189 inf->ignore = 1;
1190 ignored_edges++;
1192 else
1193 num_instrumented++;
1196 total_num_blocks += n_basic_blocks_for_fn (cfun);
1197 if (dump_file)
1198 fprintf (dump_file, "%d basic blocks\n", n_basic_blocks_for_fn (cfun));
1200 total_num_edges += num_edges;
1201 if (dump_file)
1202 fprintf (dump_file, "%d edges\n", num_edges);
1204 total_num_edges_ignored += ignored_edges;
1205 if (dump_file)
1206 fprintf (dump_file, "%d ignored edges\n", ignored_edges);
1208 total_num_edges_instrumented += num_instrumented;
1209 if (dump_file)
1210 fprintf (dump_file, "%d instrumentation edges\n", num_instrumented);
1212 /* Compute two different checksums. Note that we want to compute
1213 the checksum in only once place, since it depends on the shape
1214 of the control flow which can change during
1215 various transformations. */
1216 cfg_checksum = coverage_compute_cfg_checksum (cfun);
1217 lineno_checksum = coverage_compute_lineno_checksum ();
1219 /* Write the data from which gcov can reconstruct the basic block
1220 graph and function line numbers (the gcno file). */
1221 if (coverage_begin_function (lineno_checksum, cfg_checksum))
1223 gcov_position_t offset;
1225 /* Basic block flags */
1226 offset = gcov_write_tag (GCOV_TAG_BLOCKS);
1227 for (i = 0; i != (unsigned) (n_basic_blocks_for_fn (cfun)); i++)
1228 gcov_write_unsigned (0);
1229 gcov_write_length (offset);
1231 /* Arcs */
1232 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
1233 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1235 edge e;
1236 edge_iterator ei;
1238 offset = gcov_write_tag (GCOV_TAG_ARCS);
1239 gcov_write_unsigned (bb->index);
1241 FOR_EACH_EDGE (e, ei, bb->succs)
1243 struct edge_profile_info *i = EDGE_INFO (e);
1244 if (!i->ignore)
1246 unsigned flag_bits = 0;
1248 if (i->on_tree)
1249 flag_bits |= GCOV_ARC_ON_TREE;
1250 if (e->flags & EDGE_FAKE)
1251 flag_bits |= GCOV_ARC_FAKE;
1252 if (e->flags & EDGE_FALLTHRU)
1253 flag_bits |= GCOV_ARC_FALLTHROUGH;
1254 /* On trees we don't have fallthru flags, but we can
1255 recompute them from CFG shape. */
1256 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)
1257 && e->src->next_bb == e->dest)
1258 flag_bits |= GCOV_ARC_FALLTHROUGH;
1260 gcov_write_unsigned (e->dest->index);
1261 gcov_write_unsigned (flag_bits);
1265 gcov_write_length (offset);
1268 /* Line numbers. */
1269 /* Initialize the output. */
1270 output_location (NULL, 0, NULL, NULL);
1272 FOR_EACH_BB_FN (bb, cfun)
1274 gimple_stmt_iterator gsi;
1275 gcov_position_t offset = 0;
1277 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
1279 expanded_location curr_location =
1280 expand_location (DECL_SOURCE_LOCATION (current_function_decl));
1281 output_location (curr_location.file, curr_location.line,
1282 &offset, bb);
1285 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1287 gimple stmt = gsi_stmt (gsi);
1288 if (gimple_has_location (stmt))
1289 output_location (gimple_filename (stmt), gimple_lineno (stmt),
1290 &offset, bb);
1293 /* Notice GOTO expressions eliminated while constructing the CFG. */
1294 if (single_succ_p (bb)
1295 && LOCATION_LOCUS (single_succ_edge (bb)->goto_locus)
1296 != UNKNOWN_LOCATION)
1298 expanded_location curr_location
1299 = expand_location (single_succ_edge (bb)->goto_locus);
1300 output_location (curr_location.file, curr_location.line,
1301 &offset, bb);
1304 if (offset)
1306 /* A file of NULL indicates the end of run. */
1307 gcov_write_unsigned (0);
1308 gcov_write_string (NULL);
1309 gcov_write_length (offset);
1314 if (flag_profile_values)
1315 gimple_find_values_to_profile (&values);
1317 if (flag_branch_probabilities)
1319 compute_branch_probabilities (cfg_checksum, lineno_checksum);
1320 if (flag_profile_values)
1321 compute_value_histograms (values, cfg_checksum, lineno_checksum);
1324 remove_fake_edges ();
1326 /* For each edge not on the spanning tree, add counting code. */
1327 if (profile_arc_flag
1328 && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
1330 unsigned n_instrumented;
1332 gimple_init_edge_profiler ();
1334 n_instrumented = instrument_edges (el);
1336 gcc_assert (n_instrumented == num_instrumented);
1338 if (flag_profile_values)
1339 instrument_values (values);
1341 /* Commit changes done by instrumentation. */
1342 gsi_commit_edge_inserts ();
1345 free_aux_for_edges ();
1347 values.release ();
1348 free_edge_list (el);
1349 coverage_end_function (lineno_checksum, cfg_checksum);
1352 /* Union find algorithm implementation for the basic blocks using
1353 aux fields. */
1355 static basic_block
1356 find_group (basic_block bb)
1358 basic_block group = bb, bb1;
1360 while ((basic_block) group->aux != group)
1361 group = (basic_block) group->aux;
1363 /* Compress path. */
1364 while ((basic_block) bb->aux != group)
1366 bb1 = (basic_block) bb->aux;
1367 bb->aux = (void *) group;
1368 bb = bb1;
1370 return group;
1373 static void
1374 union_groups (basic_block bb1, basic_block bb2)
1376 basic_block bb1g = find_group (bb1);
1377 basic_block bb2g = find_group (bb2);
1379 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1380 this code is unlikely going to be performance problem anyway. */
1381 gcc_assert (bb1g != bb2g);
1383 bb1g->aux = bb2g;
1386 /* This function searches all of the edges in the program flow graph, and puts
1387 as many bad edges as possible onto the spanning tree. Bad edges include
1388 abnormals edges, which can't be instrumented at the moment. Since it is
1389 possible for fake edges to form a cycle, we will have to develop some
1390 better way in the future. Also put critical edges to the tree, since they
1391 are more expensive to instrument. */
1393 static void
1394 find_spanning_tree (struct edge_list *el)
1396 int i;
1397 int num_edges = NUM_EDGES (el);
1398 basic_block bb;
1400 /* We use aux field for standard union-find algorithm. */
1401 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
1402 bb->aux = bb;
1404 /* Add fake edge exit to entry we can't instrument. */
1405 union_groups (EXIT_BLOCK_PTR_FOR_FN (cfun), ENTRY_BLOCK_PTR_FOR_FN (cfun));
1407 /* First add all abnormal edges to the tree unless they form a cycle. Also
1408 add all edges to the exit block to avoid inserting profiling code behind
1409 setting return value from function. */
1410 for (i = 0; i < num_edges; i++)
1412 edge e = INDEX_EDGE (el, i);
1413 if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
1414 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1415 && !EDGE_INFO (e)->ignore
1416 && (find_group (e->src) != find_group (e->dest)))
1418 if (dump_file)
1419 fprintf (dump_file, "Abnormal edge %d to %d put to tree\n",
1420 e->src->index, e->dest->index);
1421 EDGE_INFO (e)->on_tree = 1;
1422 union_groups (e->src, e->dest);
1426 /* Now insert all critical edges to the tree unless they form a cycle. */
1427 for (i = 0; i < num_edges; i++)
1429 edge e = INDEX_EDGE (el, i);
1430 if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore
1431 && find_group (e->src) != find_group (e->dest))
1433 if (dump_file)
1434 fprintf (dump_file, "Critical edge %d to %d put to tree\n",
1435 e->src->index, e->dest->index);
1436 EDGE_INFO (e)->on_tree = 1;
1437 union_groups (e->src, e->dest);
1441 /* And now the rest. */
1442 for (i = 0; i < num_edges; i++)
1444 edge e = INDEX_EDGE (el, i);
1445 if (!EDGE_INFO (e)->ignore
1446 && find_group (e->src) != find_group (e->dest))
1448 if (dump_file)
1449 fprintf (dump_file, "Normal edge %d to %d put to tree\n",
1450 e->src->index, e->dest->index);
1451 EDGE_INFO (e)->on_tree = 1;
1452 union_groups (e->src, e->dest);
1456 clear_aux_for_blocks ();
1459 /* Perform file-level initialization for branch-prob processing. */
1461 void
1462 init_branch_prob (void)
1464 int i;
1466 total_num_blocks = 0;
1467 total_num_edges = 0;
1468 total_num_edges_ignored = 0;
1469 total_num_edges_instrumented = 0;
1470 total_num_blocks_created = 0;
1471 total_num_passes = 0;
1472 total_num_times_called = 0;
1473 total_num_branches = 0;
1474 for (i = 0; i < 20; i++)
1475 total_hist_br_prob[i] = 0;
1478 /* Performs file-level cleanup after branch-prob processing
1479 is completed. */
1481 void
1482 end_branch_prob (void)
1484 if (dump_file)
1486 fprintf (dump_file, "\n");
1487 fprintf (dump_file, "Total number of blocks: %d\n",
1488 total_num_blocks);
1489 fprintf (dump_file, "Total number of edges: %d\n", total_num_edges);
1490 fprintf (dump_file, "Total number of ignored edges: %d\n",
1491 total_num_edges_ignored);
1492 fprintf (dump_file, "Total number of instrumented edges: %d\n",
1493 total_num_edges_instrumented);
1494 fprintf (dump_file, "Total number of blocks created: %d\n",
1495 total_num_blocks_created);
1496 fprintf (dump_file, "Total number of graph solution passes: %d\n",
1497 total_num_passes);
1498 if (total_num_times_called != 0)
1499 fprintf (dump_file, "Average number of graph solution passes: %d\n",
1500 (total_num_passes + (total_num_times_called >> 1))
1501 / total_num_times_called);
1502 fprintf (dump_file, "Total number of branches: %d\n",
1503 total_num_branches);
1504 if (total_num_branches)
1506 int i;
1508 for (i = 0; i < 10; i++)
1509 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1510 (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1511 / total_num_branches, 5*i, 5*i+5);