Merge from trunk: 215733-215743
[official-gcc.git] / gcc-4_9 / gcc / profile.c
blob7c59c43939faa697660c965c952d3e677dd99098
1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990-2014 Free Software Foundation, Inc.
3 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
4 based on some ideas from Dain Samples of UC Berkeley.
5 Further mangling by Bob Manson, Cygnus Support.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Generate basic block profile instrumentation and auxiliary files.
24 Profile generation is optimized, so that not all arcs in the basic
25 block graph need instrumenting. First, the BB graph is closed with
26 one entry (function start), and one exit (function exit). Any
27 ABNORMAL_EDGE cannot be instrumented (because there is no control
28 path to place the code). We close the graph by inserting fake
29 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
30 edges that do not go to the exit_block. We ignore such abnormal
31 edges. Naturally these fake edges are never directly traversed,
32 and so *cannot* be directly instrumented. Some other graph
33 massaging is done. To optimize the instrumentation we generate the
34 BB minimal span tree, only edges that are not on the span tree
35 (plus the entry point) need instrumenting. From that information
36 all other edge counts can be deduced. By construction all fake
37 edges must be on the spanning tree. We also attempt to place
38 EDGE_CRITICAL edges on the spanning tree.
40 The auxiliary files generated are <dumpbase>.gcno (at compile time)
41 and <dumpbase>.gcda (at run time). The format is
42 described in full in gcov-io.h. */
44 /* ??? Register allocation should use basic block execution counts to
45 give preference to the most commonly executed blocks. */
47 /* ??? Should calculate branch probabilities before instrumenting code, since
48 then we can use arc counts to help decide which arcs to instrument. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "flags.h"
56 #include "regs.h"
57 #include "expr.h"
58 #include "function.h"
59 #include "basic-block.h"
60 #include "diagnostic-core.h"
61 #include "coverage.h"
62 #include "value-prof.h"
63 #include "tree.h"
64 #include "tree-ssa-alias.h"
65 #include "internal-fn.h"
66 #include "gimple-expr.h"
67 #include "is-a.h"
68 #include "gimple.h"
69 #include "gimple-iterator.h"
70 #include "tree-cfg.h"
71 #include "cfgloop.h"
72 #include "dumpfile.h"
73 #include "params.h"
74 #include "cgraph.h"
76 #include "profile.h"
78 struct bb_info {
79 unsigned int count_valid : 1;
81 /* Number of successor and predecessor edges. */
82 gcov_type succ_count;
83 gcov_type pred_count;
86 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
89 /* Counter summary from the last set of coverage counts read. */
91 const struct gcov_ctr_summary *profile_info;
93 /* Counter working set information computed from the current counter
94 summary. Not initialized unless profile_info summary is non-NULL. */
95 static gcov_working_set_t gcov_working_sets[NUM_GCOV_WORKING_SETS];
97 /* Collect statistics on the performance of this pass for the entire source
98 file. */
100 static int total_num_blocks;
101 static int total_num_edges;
102 static int total_num_edges_ignored;
103 static int total_num_edges_instrumented;
104 static int total_num_blocks_created;
105 static int total_num_passes;
106 static int total_num_times_called;
107 static int total_hist_br_prob[20];
108 static int total_num_branches;
110 void add_working_set (gcov_working_set_t *set) {
111 int i = 0;
112 for (; i < NUM_GCOV_WORKING_SETS; i++)
113 gcov_working_sets[i] = set[i];
116 /* Forward declarations. */
117 static void find_spanning_tree (struct edge_list *);
119 /* Add edge instrumentation code to the entire insn chain.
121 F is the first insn of the chain.
122 NUM_BLOCKS is the number of basic blocks found in F. */
124 static unsigned
125 instrument_edges (struct edge_list *el)
127 unsigned num_instr_edges = 0;
128 int num_edges = NUM_EDGES (el);
129 basic_block bb;
131 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
133 edge e;
134 edge_iterator ei;
136 FOR_EACH_EDGE (e, ei, bb->succs)
138 struct edge_info *inf = EDGE_INFO (e);
140 if (!inf->ignore && !inf->on_tree)
142 gcc_assert (!(e->flags & EDGE_ABNORMAL));
143 if (dump_file)
144 fprintf (dump_file, "Edge %d to %d instrumented%s\n",
145 e->src->index, e->dest->index,
146 EDGE_CRITICAL_P (e) ? " (and split)" : "");
147 gimple_gen_edge_profiler (num_instr_edges++, e);
152 total_num_blocks_created += num_edges;
153 if (dump_file)
154 fprintf (dump_file, "%d edges instrumented\n", num_instr_edges);
155 return num_instr_edges;
158 /* Add code to measure histograms for values in list VALUES. */
159 static void
160 instrument_values (histogram_values values)
162 unsigned i;
164 /* Emit code to generate the histograms before the insns. */
166 for (i = 0; i < values.length (); i++)
168 histogram_value hist = values[i];
169 unsigned t = COUNTER_FOR_HIST_TYPE (hist->type);
171 /* See condition in gimple_gen_ic_func_topn_profiler */
172 if (t == GCOV_COUNTER_ICALL_TOPNV
173 && (DECL_STATIC_CONSTRUCTOR (current_function_decl)
174 || DECL_STATIC_CONSTRUCTOR (current_function_decl)
175 || DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (
176 current_function_decl)))
177 continue;
179 if (!coverage_counter_alloc (t, hist->n_counters))
180 continue;
182 switch (hist->type)
184 case HIST_TYPE_INTERVAL:
185 gimple_gen_interval_profiler (hist, t, 0);
186 break;
188 case HIST_TYPE_POW2:
189 gimple_gen_pow2_profiler (hist, t, 0);
190 break;
192 case HIST_TYPE_SINGLE_VALUE:
193 gimple_gen_one_value_profiler (hist, t, 0);
194 break;
196 case HIST_TYPE_CONST_DELTA:
197 gimple_gen_const_delta_profiler (hist, t, 0);
198 break;
200 case HIST_TYPE_INDIR_CALL:
201 case HIST_TYPE_INDIR_CALL_TOPN:
202 gimple_gen_ic_profiler (hist, t, 0);
203 break;
205 case HIST_TYPE_AVERAGE:
206 gimple_gen_average_profiler (hist, t, 0);
207 break;
209 case HIST_TYPE_IOR:
210 gimple_gen_ior_profiler (hist, t, 0);
211 break;
213 case HIST_TYPE_TIME_PROFILE:
215 basic_block bb =
216 split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
217 gimple_stmt_iterator gsi = gsi_start_bb (bb);
219 gimple_gen_time_profiler (t, 0, gsi);
220 break;
223 default:
224 gcc_unreachable ();
230 /* Fill the working set information into the profile_info structure. */
232 void
233 get_working_sets (void)
235 unsigned ws_ix, pctinc, pct;
236 gcov_working_set_t *ws_info;
238 if (!profile_info)
239 return;
241 compute_working_sets (profile_info, gcov_working_sets);
243 if (dump_file)
245 fprintf (dump_file, "Counter working sets:\n");
246 /* Multiply the percentage by 100 to avoid float. */
247 pctinc = 100 * 100 / NUM_GCOV_WORKING_SETS;
248 for (ws_ix = 0, pct = pctinc; ws_ix < NUM_GCOV_WORKING_SETS;
249 ws_ix++, pct += pctinc)
251 if (ws_ix == NUM_GCOV_WORKING_SETS - 1)
252 pct = 9990;
253 ws_info = &gcov_working_sets[ws_ix];
254 /* Print out the percentage using int arithmatic to avoid float. */
255 fprintf (dump_file, "\t\t%u.%02u%%: num counts=%u, min counter="
256 HOST_WIDEST_INT_PRINT_DEC "\n",
257 pct / 100, pct - (pct / 100 * 100),
258 ws_info->num_counters,
259 (HOST_WIDEST_INT)ws_info->min_counter);
264 /* Given a the desired percentage of the full profile (sum_all from the
265 summary), multiplied by 10 to avoid float in PCT_TIMES_10, returns
266 the corresponding working set information. If an exact match for
267 the percentage isn't found, the closest value is used. */
269 gcov_working_set_t *
270 find_working_set (unsigned pct_times_10)
272 unsigned i;
273 if (!profile_info)
274 return NULL;
275 gcc_assert (pct_times_10 <= 1000);
276 if (pct_times_10 >= 999)
277 return &gcov_working_sets[NUM_GCOV_WORKING_SETS - 1];
278 i = pct_times_10 * NUM_GCOV_WORKING_SETS / 1000;
279 if (!i)
280 return &gcov_working_sets[0];
281 return &gcov_working_sets[i - 1];
284 /* Computes hybrid profile for all matching entries in da_file.
286 CFG_CHECKSUM is the precomputed checksum for the CFG. */
288 static gcov_type *
289 get_exec_counts (unsigned cfg_checksum, unsigned lineno_checksum)
291 unsigned num_edges = 0;
292 basic_block bb;
293 gcov_type *counts;
295 /* Count the edges to be (possibly) instrumented. */
296 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
298 edge e;
299 edge_iterator ei;
301 FOR_EACH_EDGE (e, ei, bb->succs)
302 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
303 num_edges++;
306 counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, cfg_checksum,
307 lineno_checksum, &profile_info);
308 if (!counts)
309 return NULL;
311 get_working_sets ();
313 if (dump_file && profile_info)
314 fprintf (dump_file, "Merged %u profiles with maximal count %u.\n",
315 profile_info->runs, (unsigned) profile_info->sum_max);
317 return counts;
321 static bool
322 is_edge_inconsistent (vec<edge, va_gc> *edges)
324 edge e;
325 edge_iterator ei;
326 FOR_EACH_EDGE (e, ei, edges)
328 if (!EDGE_INFO (e)->ignore)
330 if (e->count < 0
331 && (!(e->flags & EDGE_FAKE)
332 || e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
333 || !block_ends_with_call_p (e->src)))
335 if (dump_file)
337 fprintf (dump_file,
338 "Edge %i->%i is inconsistent, count"HOST_WIDEST_INT_PRINT_DEC,
339 e->src->index, e->dest->index, e->count);
340 dump_bb (dump_file, e->src, 0, TDF_DETAILS);
341 dump_bb (dump_file, e->dest, 0, TDF_DETAILS);
343 return true;
347 return false;
350 static void
351 correct_negative_edge_counts (void)
353 basic_block bb;
354 edge e;
355 edge_iterator ei;
357 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
359 FOR_EACH_EDGE (e, ei, bb->succs)
361 if (e->count < 0)
362 e->count = 0;
367 /* Check consistency.
368 Return true if inconsistency is found. */
369 static bool
370 is_inconsistent (void)
372 basic_block bb;
373 bool inconsistent = false;
374 FOR_EACH_BB_FN (bb, cfun)
376 inconsistent |= is_edge_inconsistent (bb->preds);
377 if (!dump_file && inconsistent)
378 return true;
379 inconsistent |= is_edge_inconsistent (bb->succs);
380 if (!dump_file && inconsistent)
381 return true;
382 if (bb->count < 0)
384 if (dump_file)
386 fprintf (dump_file, "BB %i count is negative "
387 HOST_WIDEST_INT_PRINT_DEC,
388 bb->index,
389 bb->count);
390 dump_bb (dump_file, bb, 0, TDF_DETAILS);
392 inconsistent = true;
394 if (bb->count != sum_edge_counts (bb->preds))
396 if (dump_file)
398 fprintf (dump_file, "BB %i count does not match sum of incoming edges "
399 HOST_WIDEST_INT_PRINT_DEC" should be " HOST_WIDEST_INT_PRINT_DEC,
400 bb->index,
401 bb->count,
402 sum_edge_counts (bb->preds));
403 dump_bb (dump_file, bb, 0, TDF_DETAILS);
405 inconsistent = true;
407 if (bb->count != sum_edge_counts (bb->succs) &&
408 ! (find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun)) != NULL
409 && block_ends_with_call_p (bb)))
411 if (dump_file)
413 fprintf (dump_file, "BB %i count does not match sum of outgoing edges "
414 HOST_WIDEST_INT_PRINT_DEC" should be " HOST_WIDEST_INT_PRINT_DEC,
415 bb->index,
416 bb->count,
417 sum_edge_counts (bb->succs));
418 dump_bb (dump_file, bb, 0, TDF_DETAILS);
420 inconsistent = true;
422 if (!dump_file && inconsistent)
423 return true;
426 return inconsistent;
429 /* Set each basic block count to the sum of its outgoing edge counts */
430 static void
431 set_bb_counts (void)
433 basic_block bb;
434 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
436 bb->count = sum_edge_counts (bb->succs);
437 gcc_assert (bb->count >= 0);
441 /* Reads profile data and returns total number of edge counts read */
442 static int
443 read_profile_edge_counts (gcov_type *exec_counts)
445 basic_block bb;
446 int num_edges = 0;
447 int exec_counts_pos = 0;
448 /* For each edge not on the spanning tree, set its execution count from
449 the .da file. */
450 /* The first count in the .da file is the number of times that the function
451 was entered. This is the exec_count for block zero. */
453 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
455 edge e;
456 edge_iterator ei;
458 FOR_EACH_EDGE (e, ei, bb->succs)
459 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
461 num_edges++;
462 if (exec_counts)
464 e->count = exec_counts[exec_counts_pos++];
465 if (e->count > profile_info->sum_max)
467 if (flag_profile_correction)
469 static bool informed = 0;
470 if (dump_enabled_p () && !informed)
471 dump_printf_loc (MSG_NOTE, input_location,
472 "corrupted profile info: edge count"
473 " exceeds maximal count\n");
474 informed = 1;
476 else
477 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
478 bb->index, e->dest->index);
481 else
482 e->count = 0;
484 EDGE_INFO (e)->count_valid = 1;
485 BB_INFO (bb)->succ_count--;
486 BB_INFO (e->dest)->pred_count--;
487 if (dump_file)
489 fprintf (dump_file, "\nRead edge from %i to %i, count:",
490 bb->index, e->dest->index);
491 fprintf (dump_file, HOST_WIDEST_INT_PRINT_DEC,
492 (HOST_WIDEST_INT) e->count);
497 return num_edges;
500 #define OVERLAP_BASE 10000
502 /* Compare the static estimated profile to the actual profile, and
503 return the "degree of overlap" measure between them.
505 Degree of overlap is a number between 0 and OVERLAP_BASE. It is
506 the sum of each basic block's minimum relative weights between
507 two profiles. And overlap of OVERLAP_BASE means two profiles are
508 identical. */
510 static int
511 compute_frequency_overlap (void)
513 gcov_type count_total = 0, freq_total = 0;
514 int overlap = 0;
515 basic_block bb;
517 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
519 count_total += bb->count;
520 freq_total += bb->frequency;
523 if (count_total == 0 || freq_total == 0)
524 return 0;
526 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
527 overlap += MIN (bb->count * OVERLAP_BASE / count_total,
528 bb->frequency * OVERLAP_BASE / freq_total);
530 return overlap;
533 /* Compute the branch probabilities for the various branches.
534 Annotate them accordingly.
536 CFG_CHECKSUM is the precomputed checksum for the CFG. */
538 static void
539 compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
541 basic_block bb;
542 int i;
543 int num_edges = 0;
544 int changes;
545 int passes;
546 int hist_br_prob[20];
547 int num_branches;
548 gcov_type *exec_counts = get_exec_counts (cfg_checksum, lineno_checksum);
549 int inconsistent = 0;
551 /* Very simple sanity checks so we catch bugs in our profiling code. */
552 if (!profile_info)
553 return;
555 if (profile_info->sum_all < profile_info->sum_max)
557 error ("corrupted profile info: sum_all is smaller than sum_max");
558 exec_counts = NULL;
561 /* Attach extra info block to each bb. */
562 alloc_aux_for_blocks (sizeof (struct bb_info));
563 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
565 edge e;
566 edge_iterator ei;
568 FOR_EACH_EDGE (e, ei, bb->succs)
569 if (!EDGE_INFO (e)->ignore)
570 BB_INFO (bb)->succ_count++;
571 FOR_EACH_EDGE (e, ei, bb->preds)
572 if (!EDGE_INFO (e)->ignore)
573 BB_INFO (bb)->pred_count++;
576 /* Avoid predicting entry on exit nodes. */
577 BB_INFO (EXIT_BLOCK_PTR_FOR_FN (cfun))->succ_count = 2;
578 BB_INFO (ENTRY_BLOCK_PTR_FOR_FN (cfun))->pred_count = 2;
580 num_edges = read_profile_edge_counts (exec_counts);
582 if (dump_file)
583 fprintf (dump_file, "\n%d edge counts read\n", num_edges);
585 /* For every block in the file,
586 - if every exit/entrance edge has a known count, then set the block count
587 - if the block count is known, and every exit/entrance edge but one has
588 a known execution count, then set the count of the remaining edge
590 As edge counts are set, decrement the succ/pred count, but don't delete
591 the edge, that way we can easily tell when all edges are known, or only
592 one edge is unknown. */
594 /* The order that the basic blocks are iterated through is important.
595 Since the code that finds spanning trees starts with block 0, low numbered
596 edges are put on the spanning tree in preference to high numbered edges.
597 Hence, most instrumented edges are at the end. Graph solving works much
598 faster if we propagate numbers from the end to the start.
600 This takes an average of slightly more than 3 passes. */
602 changes = 1;
603 passes = 0;
604 while (changes)
606 passes++;
607 changes = 0;
608 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, prev_bb)
610 struct bb_info *bi = BB_INFO (bb);
611 if (! bi->count_valid)
613 if (bi->succ_count == 0)
615 edge e;
616 edge_iterator ei;
617 gcov_type total = 0;
619 FOR_EACH_EDGE (e, ei, bb->succs)
620 total += e->count;
621 bb->count = total;
622 bi->count_valid = 1;
623 changes = 1;
625 else if (bi->pred_count == 0)
627 edge e;
628 edge_iterator ei;
629 gcov_type total = 0;
631 FOR_EACH_EDGE (e, ei, bb->preds)
632 total += e->count;
633 bb->count = total;
634 bi->count_valid = 1;
635 changes = 1;
638 if (bi->count_valid)
640 if (bi->succ_count == 1)
642 edge e;
643 edge_iterator ei;
644 gcov_type total = 0;
646 /* One of the counts will be invalid, but it is zero,
647 so adding it in also doesn't hurt. */
648 FOR_EACH_EDGE (e, ei, bb->succs)
649 total += e->count;
651 /* Search for the invalid edge, and set its count. */
652 FOR_EACH_EDGE (e, ei, bb->succs)
653 if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
654 break;
656 /* Calculate count for remaining edge by conservation. */
657 total = bb->count - total;
659 gcc_assert (e);
660 EDGE_INFO (e)->count_valid = 1;
661 e->count = total;
662 bi->succ_count--;
664 BB_INFO (e->dest)->pred_count--;
665 changes = 1;
667 if (bi->pred_count == 1)
669 edge e;
670 edge_iterator ei;
671 gcov_type total = 0;
673 /* One of the counts will be invalid, but it is zero,
674 so adding it in also doesn't hurt. */
675 FOR_EACH_EDGE (e, ei, bb->preds)
676 total += e->count;
678 /* Search for the invalid edge, and set its count. */
679 FOR_EACH_EDGE (e, ei, bb->preds)
680 if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
681 break;
683 /* Calculate count for remaining edge by conservation. */
684 total = bb->count - total + e->count;
686 gcc_assert (e);
687 EDGE_INFO (e)->count_valid = 1;
688 e->count = total;
689 bi->pred_count--;
691 BB_INFO (e->src)->succ_count--;
692 changes = 1;
697 if (dump_file)
699 int overlap = compute_frequency_overlap ();
700 gimple_dump_cfg (dump_file, dump_flags);
701 fprintf (dump_file, "Static profile overlap: %d.%d%%\n",
702 overlap / (OVERLAP_BASE / 100),
703 overlap % (OVERLAP_BASE / 100));
706 total_num_passes += passes;
707 if (dump_file)
708 fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
710 /* If the graph has been correctly solved, every block will have a
711 succ and pred count of zero. */
712 FOR_EACH_BB_FN (bb, cfun)
714 gcc_assert (!BB_INFO (bb)->succ_count && !BB_INFO (bb)->pred_count);
717 /* Check for inconsistent basic block counts */
718 inconsistent = is_inconsistent ();
720 if (inconsistent)
722 if (flag_profile_correction)
724 /* Inconsistency detected. Make it flow-consistent. */
725 static int informed = 0;
726 if (dump_enabled_p () && informed == 0)
728 informed = 1;
729 dump_printf_loc (MSG_NOTE, input_location,
730 "correcting inconsistent profile data\n");
732 correct_negative_edge_counts ();
733 /* Set bb counts to the sum of the outgoing edge counts */
734 set_bb_counts ();
735 if (dump_file)
736 fprintf (dump_file, "\nCalling mcf_smooth_cfg\n");
737 mcf_smooth_cfg ();
739 else
740 error ("corrupted profile info: profile data is not flow-consistent");
743 /* For every edge, calculate its branch probability and add a reg_note
744 to the branch insn to indicate this. */
746 for (i = 0; i < 20; i++)
747 hist_br_prob[i] = 0;
748 num_branches = 0;
750 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
752 edge e;
753 edge_iterator ei;
755 if (bb->count < 0)
757 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
758 bb->index, (int)bb->count);
759 bb->count = 0;
761 FOR_EACH_EDGE (e, ei, bb->succs)
763 /* Function may return twice in the cased the called function is
764 setjmp or calls fork, but we can't represent this by extra
765 edge from the entry, since extra edge from the exit is
766 already present. We get negative frequency from the entry
767 point. */
768 if ((e->count < 0
769 && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
770 || (e->count > bb->count
771 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)))
773 if (block_ends_with_call_p (bb))
774 e->count = e->count < 0 ? 0 : bb->count;
776 if (e->count < 0 || e->count > bb->count)
778 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
779 e->src->index, e->dest->index,
780 (int)e->count);
781 e->count = bb->count / 2;
784 if (bb->count)
786 FOR_EACH_EDGE (e, ei, bb->succs)
787 e->probability = GCOV_COMPUTE_SCALE (e->count, bb->count);
788 if (bb->index >= NUM_FIXED_BLOCKS
789 && block_ends_with_condjump_p (bb)
790 && EDGE_COUNT (bb->succs) >= 2)
792 int prob;
793 edge e;
794 int index;
796 /* Find the branch edge. It is possible that we do have fake
797 edges here. */
798 FOR_EACH_EDGE (e, ei, bb->succs)
799 if (!(e->flags & (EDGE_FAKE | EDGE_FALLTHRU)))
800 break;
802 prob = e->probability;
803 index = prob * 20 / REG_BR_PROB_BASE;
805 if (index == 20)
806 index = 19;
807 hist_br_prob[index]++;
809 num_branches++;
812 /* As a last resort, distribute the probabilities evenly.
813 Use simple heuristics that if there are normal edges,
814 give all abnormals frequency of 0, otherwise distribute the
815 frequency over abnormals (this is the case of noreturn
816 calls). */
817 else if (profile_status_for_fn (cfun) == PROFILE_ABSENT)
819 int total = 0;
821 FOR_EACH_EDGE (e, ei, bb->succs)
822 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
823 total ++;
824 if (total)
826 FOR_EACH_EDGE (e, ei, bb->succs)
827 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
828 e->probability = REG_BR_PROB_BASE / total;
829 else
830 e->probability = 0;
832 else
834 total += EDGE_COUNT (bb->succs);
835 FOR_EACH_EDGE (e, ei, bb->succs)
836 e->probability = REG_BR_PROB_BASE / total;
838 if (bb->index >= NUM_FIXED_BLOCKS
839 && block_ends_with_condjump_p (bb)
840 && EDGE_COUNT (bb->succs) >= 2)
841 num_branches++;
844 counts_to_freqs ();
845 profile_status_for_fn (cfun) = PROFILE_READ;
846 compute_function_frequency ();
848 if (dump_file)
850 fprintf (dump_file, "%d branches\n", num_branches);
851 if (num_branches)
852 for (i = 0; i < 10; i++)
853 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
854 (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
855 5 * i, 5 * i + 5);
857 total_num_branches += num_branches;
858 for (i = 0; i < 20; i++)
859 total_hist_br_prob[i] += hist_br_prob[i];
861 fputc ('\n', dump_file);
862 fputc ('\n', dump_file);
865 free_aux_for_blocks ();
868 /* Load value histograms values whose description is stored in VALUES array
869 from .gcda file.
871 CFG_CHECKSUM is the precomputed checksum for the CFG. */
873 static void
874 compute_value_histograms (histogram_values values, unsigned cfg_checksum,
875 unsigned lineno_checksum)
877 unsigned i, j, t, any;
878 unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS];
879 gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS];
880 gcov_type *act_count[GCOV_N_VALUE_COUNTERS];
881 gcov_type *aact_count;
882 bool warned[GCOV_N_VALUE_COUNTERS];
883 #define DEF_GCOV_COUNTER(COUNTER, NAME, FN_TYPE) NAME,
884 const char *const ctr_names[GCOV_COUNTERS] = {
885 #include "gcov-counter.def"
887 #undef DEF_GCOV_COUNTER
888 struct cgraph_node *node;
890 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
892 n_histogram_counters[t] = 0;
893 warned[t] = 0;
896 for (i = 0; i < values.length (); i++)
898 histogram_value hist = values[i];
899 n_histogram_counters[(int) hist->type] += hist->n_counters;
902 any = 0;
903 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
905 if (!n_histogram_counters[t])
907 histogram_counts[t] = NULL;
908 continue;
911 histogram_counts[t] =
912 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t),
913 n_histogram_counters[t], cfg_checksum,
914 lineno_checksum, NULL);
915 if (histogram_counts[t])
916 any = 1;
917 act_count[t] = histogram_counts[t];
919 if (!any)
920 return;
922 for (i = 0; i < values.length (); i++)
924 histogram_value hist = values[i];
925 gimple stmt = hist->hvalue.stmt;
927 t = (int) hist->type;
929 aact_count = act_count[t];
930 /* If the counter cannot be found in gcda file, skip this
931 histogram and give a warning. */
932 if (aact_count == 0)
934 if (!warned[t])
935 warning (0, "cannot find %s counters in function %s.",
936 ctr_names[COUNTER_FOR_HIST_TYPE(t)],
937 IDENTIFIER_POINTER (
938 DECL_ASSEMBLER_NAME (current_function_decl)));
939 hist->n_counters = 0;
940 warned[t] = true;
941 continue;
944 if (act_count[t])
945 act_count[t] += hist->n_counters;
947 gimple_add_histogram_value (cfun, stmt, hist);
948 hist->hvalue.counters = XNEWVEC (gcov_type, hist->n_counters);
949 for (j = 0; j < hist->n_counters; j++)
950 if (aact_count)
951 hist->hvalue.counters[j] = aact_count[j];
952 else
953 hist->hvalue.counters[j] = 0;
955 /* Time profiler counter is not related to any statement,
956 so that we have to read the counter and set the value to
957 the corresponding call graph node. */
958 if (hist->type == HIST_TYPE_TIME_PROFILE)
960 node = cgraph_get_node (hist->fun->decl);
962 node->tp_first_run = hist->hvalue.counters[0];
964 if (dump_file)
965 fprintf (dump_file, "Read tp_first_run: %d\n", node->tp_first_run);
969 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
970 free (histogram_counts[t]);
973 /* When passed NULL as file_name, initialize.
974 When passed something else, output the necessary commands to change
975 line to LINE and offset to FILE_NAME. */
976 static void
977 output_location (char const *file_name, int line,
978 gcov_position_t *offset, basic_block bb)
980 static char const *prev_file_name;
981 static int prev_line;
982 bool name_differs, line_differs;
984 if (!file_name)
986 prev_file_name = NULL;
987 prev_line = -1;
988 return;
991 name_differs = !prev_file_name || filename_cmp (file_name, prev_file_name);
992 line_differs = prev_line != line;
994 if (name_differs || line_differs)
996 if (!*offset)
998 *offset = gcov_write_tag (GCOV_TAG_LINES);
999 gcov_write_unsigned (bb->index);
1000 name_differs = line_differs=true;
1003 /* If this is a new source file, then output the
1004 file's name to the .bb file. */
1005 if (name_differs)
1007 prev_file_name = file_name;
1008 gcov_write_unsigned (0);
1009 gcov_write_string (prev_file_name);
1011 if (line_differs)
1013 gcov_write_unsigned (line);
1014 prev_line = line;
1019 /* Instrument and/or analyze program behavior based on program the CFG.
1021 This function creates a representation of the control flow graph (of
1022 the function being compiled) that is suitable for the instrumentation
1023 of edges and/or converting measured edge counts to counts on the
1024 complete CFG.
1026 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
1027 the flow graph that are needed to reconstruct the dynamic behavior of the
1028 flow graph. This data is written to the gcno file for gcov.
1030 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
1031 information from the gcda file containing edge count information from
1032 previous executions of the function being compiled. In this case, the
1033 control flow graph is annotated with actual execution counts by
1034 compute_branch_probabilities().
1036 Main entry point of this file. */
1038 void
1039 branch_prob (void)
1041 basic_block bb;
1042 unsigned i;
1043 unsigned num_edges, ignored_edges;
1044 unsigned num_instrumented;
1045 struct edge_list *el;
1046 histogram_values values = histogram_values ();
1047 unsigned cfg_checksum, lineno_checksum;
1049 total_num_times_called++;
1051 flow_call_edges_add (NULL);
1052 add_noreturn_fake_exit_edges ();
1054 /* We can't handle cyclic regions constructed using abnormal edges.
1055 To avoid these we replace every source of abnormal edge by a fake
1056 edge from entry node and every destination by fake edge to exit.
1057 This keeps graph acyclic and our calculation exact for all normal
1058 edges except for exit and entrance ones.
1060 We also add fake exit edges for each call and asm statement in the
1061 basic, since it may not return. */
1063 FOR_EACH_BB_FN (bb, cfun)
1065 int need_exit_edge = 0, need_entry_edge = 0;
1066 int have_exit_edge = 0, have_entry_edge = 0;
1067 edge e;
1068 edge_iterator ei;
1070 /* Functions returning multiple times are not handled by extra edges.
1071 Instead we simply allow negative counts on edges from exit to the
1072 block past call and corresponding probabilities. We can't go
1073 with the extra edges because that would result in flowgraph that
1074 needs to have fake edges outside the spanning tree. */
1076 FOR_EACH_EDGE (e, ei, bb->succs)
1078 gimple_stmt_iterator gsi;
1079 gimple last = NULL;
1081 /* It may happen that there are compiler generated statements
1082 without a locus at all. Go through the basic block from the
1083 last to the first statement looking for a locus. */
1084 for (gsi = gsi_last_nondebug_bb (bb);
1085 !gsi_end_p (gsi);
1086 gsi_prev_nondebug (&gsi))
1088 last = gsi_stmt (gsi);
1089 if (gimple_has_location (last))
1090 break;
1093 /* Edge with goto locus might get wrong coverage info unless
1094 it is the only edge out of BB.
1095 Don't do that when the locuses match, so
1096 if (blah) goto something;
1097 is not computed twice. */
1098 if (last
1099 && gimple_has_location (last)
1100 && LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
1101 && !single_succ_p (bb)
1102 && (LOCATION_FILE (e->goto_locus)
1103 != LOCATION_FILE (gimple_location (last))
1104 || (LOCATION_LINE (e->goto_locus)
1105 != LOCATION_LINE (gimple_location (last)))))
1107 basic_block new_bb = split_edge (e);
1108 edge ne = single_succ_edge (new_bb);
1109 ne->goto_locus = e->goto_locus;
1111 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1112 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1113 need_exit_edge = 1;
1114 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1115 have_exit_edge = 1;
1117 FOR_EACH_EDGE (e, ei, bb->preds)
1119 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1120 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1121 need_entry_edge = 1;
1122 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1123 have_entry_edge = 1;
1126 if (need_exit_edge && !have_exit_edge)
1128 if (dump_file)
1129 fprintf (dump_file, "Adding fake exit edge to bb %i\n",
1130 bb->index);
1131 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
1133 if (need_entry_edge && !have_entry_edge)
1135 if (dump_file)
1136 fprintf (dump_file, "Adding fake entry edge to bb %i\n",
1137 bb->index);
1138 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FAKE);
1139 /* Avoid bbs that have both fake entry edge and also some
1140 exit edge. One of those edges wouldn't be added to the
1141 spanning tree, but we can't instrument any of them. */
1142 if (have_exit_edge || need_exit_edge)
1144 gimple_stmt_iterator gsi;
1145 gimple first;
1147 gsi = gsi_start_nondebug_after_labels_bb (bb);
1148 gcc_checking_assert (!gsi_end_p (gsi));
1149 first = gsi_stmt (gsi);
1150 /* Don't split the bbs containing __builtin_setjmp_receiver
1151 or ABNORMAL_DISPATCHER calls. These are very
1152 special and don't expect anything to be inserted before
1153 them. */
1154 if (is_gimple_call (first)
1155 && (gimple_call_builtin_p (first, BUILT_IN_SETJMP_RECEIVER)
1156 || (gimple_call_flags (first) & ECF_RETURNS_TWICE)
1157 || (gimple_call_internal_p (first)
1158 && (gimple_call_internal_fn (first)
1159 == IFN_ABNORMAL_DISPATCHER))))
1160 continue;
1162 if (dump_file)
1163 fprintf (dump_file, "Splitting bb %i after labels\n",
1164 bb->index);
1165 split_block_after_labels (bb);
1170 el = create_edge_list ();
1171 num_edges = NUM_EDGES (el);
1172 alloc_aux_for_edges (sizeof (struct edge_info));
1174 /* The basic blocks are expected to be numbered sequentially. */
1175 compact_blocks ();
1177 ignored_edges = 0;
1178 for (i = 0 ; i < num_edges ; i++)
1180 edge e = INDEX_EDGE (el, i);
1181 e->count = 0;
1183 /* Mark edges we've replaced by fake edges above as ignored. */
1184 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1185 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1186 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1188 EDGE_INFO (e)->ignore = 1;
1189 ignored_edges++;
1193 /* Create spanning tree from basic block graph, mark each edge that is
1194 on the spanning tree. We insert as many abnormal and critical edges
1195 as possible to minimize number of edge splits necessary. */
1197 find_spanning_tree (el);
1199 /* Fake edges that are not on the tree will not be instrumented, so
1200 mark them ignored. */
1201 for (num_instrumented = i = 0; i < num_edges; i++)
1203 edge e = INDEX_EDGE (el, i);
1204 struct edge_info *inf = EDGE_INFO (e);
1206 if (inf->ignore || inf->on_tree)
1207 /*NOP*/;
1208 else if (e->flags & EDGE_FAKE)
1210 inf->ignore = 1;
1211 ignored_edges++;
1213 else
1214 num_instrumented++;
1217 total_num_blocks += n_basic_blocks_for_fn (cfun);
1218 if (dump_file)
1219 fprintf (dump_file, "%d basic blocks\n", n_basic_blocks_for_fn (cfun));
1221 total_num_edges += num_edges;
1222 if (dump_file)
1223 fprintf (dump_file, "%d edges\n", num_edges);
1225 total_num_edges_ignored += ignored_edges;
1226 if (dump_file)
1227 fprintf (dump_file, "%d ignored edges\n", ignored_edges);
1229 total_num_edges_instrumented += num_instrumented;
1230 if (dump_file)
1231 fprintf (dump_file, "%d instrumentation edges\n", num_instrumented);
1233 /* Compute two different checksums. Note that we want to compute
1234 the checksum in only once place, since it depends on the shape
1235 of the control flow which can change during
1236 various transformations. */
1237 cfg_checksum = coverage_compute_cfg_checksum ();
1238 lineno_checksum = coverage_compute_lineno_checksum ();
1240 /* Write the data from which gcov can reconstruct the basic block
1241 graph and function line numbers (the gcno file). */
1242 if (coverage_begin_function (lineno_checksum, cfg_checksum))
1244 gcov_position_t offset;
1246 /* Basic block flags */
1247 offset = gcov_write_tag (GCOV_TAG_BLOCKS);
1248 for (i = 0; i != (unsigned) (n_basic_blocks_for_fn (cfun)); i++)
1249 gcov_write_unsigned (0);
1250 gcov_write_length (offset);
1252 /* Arcs */
1253 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
1254 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1256 edge e;
1257 edge_iterator ei;
1259 offset = gcov_write_tag (GCOV_TAG_ARCS);
1260 gcov_write_unsigned (bb->index);
1262 FOR_EACH_EDGE (e, ei, bb->succs)
1264 struct edge_info *i = EDGE_INFO (e);
1265 if (!i->ignore)
1267 unsigned flag_bits = 0;
1269 if (i->on_tree)
1270 flag_bits |= GCOV_ARC_ON_TREE;
1271 if (e->flags & EDGE_FAKE)
1272 flag_bits |= GCOV_ARC_FAKE;
1273 if (e->flags & EDGE_FALLTHRU)
1274 flag_bits |= GCOV_ARC_FALLTHROUGH;
1275 /* On trees we don't have fallthru flags, but we can
1276 recompute them from CFG shape. */
1277 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)
1278 && e->src->next_bb == e->dest)
1279 flag_bits |= GCOV_ARC_FALLTHROUGH;
1281 gcov_write_unsigned (e->dest->index);
1282 gcov_write_unsigned (flag_bits);
1286 gcov_write_length (offset);
1289 /* Line numbers. */
1290 /* Initialize the output. */
1291 output_location (NULL, 0, NULL, NULL);
1293 FOR_EACH_BB_FN (bb, cfun)
1295 gimple_stmt_iterator gsi;
1296 gcov_position_t offset = 0;
1298 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
1300 expanded_location curr_location =
1301 expand_location (DECL_SOURCE_LOCATION (current_function_decl));
1302 output_location (curr_location.file, curr_location.line,
1303 &offset, bb);
1306 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1308 gimple stmt = gsi_stmt (gsi);
1309 if (gimple_has_location (stmt))
1310 output_location (gimple_filename (stmt), gimple_lineno (stmt),
1311 &offset, bb);
1314 /* Notice GOTO expressions eliminated while constructing the CFG. */
1315 if (single_succ_p (bb)
1316 && LOCATION_LOCUS (single_succ_edge (bb)->goto_locus)
1317 != UNKNOWN_LOCATION)
1319 expanded_location curr_location
1320 = expand_location (single_succ_edge (bb)->goto_locus);
1321 output_location (curr_location.file, curr_location.line,
1322 &offset, bb);
1325 if (offset)
1327 /* A file of NULL indicates the end of run. */
1328 gcov_write_unsigned (0);
1329 gcov_write_string (NULL);
1330 gcov_write_length (offset);
1335 if (flag_profile_values)
1336 gimple_find_values_to_profile (&values);
1338 if (flag_branch_probabilities)
1340 compute_branch_probabilities (cfg_checksum, lineno_checksum);
1341 if (flag_profile_values)
1342 compute_value_histograms (values, cfg_checksum, lineno_checksum);
1345 remove_fake_edges ();
1347 /* For each edge not on the spanning tree, add counting code. */
1348 if (profile_arc_flag
1349 && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
1351 unsigned n_instrumented;
1353 gimple_init_edge_profiler ();
1355 n_instrumented = instrument_edges (el);
1357 gcc_assert (n_instrumented == num_instrumented);
1359 if (flag_profile_values)
1360 instrument_values (values);
1362 /* Commit changes done by instrumentation. */
1363 gsi_commit_edge_inserts ();
1365 if (flag_profile_generate_sampling
1366 || PARAM_VALUE (PARAM_COVERAGE_EXEC_ONCE))
1367 add_sampling_to_edge_counters ();
1370 free_aux_for_edges ();
1372 values.release ();
1373 free_edge_list (el);
1374 coverage_end_function (lineno_checksum, cfg_checksum);
1377 /* Union find algorithm implementation for the basic blocks using
1378 aux fields. */
1380 static basic_block
1381 find_group (basic_block bb)
1383 basic_block group = bb, bb1;
1385 while ((basic_block) group->aux != group)
1386 group = (basic_block) group->aux;
1388 /* Compress path. */
1389 while ((basic_block) bb->aux != group)
1391 bb1 = (basic_block) bb->aux;
1392 bb->aux = (void *) group;
1393 bb = bb1;
1395 return group;
1398 static void
1399 union_groups (basic_block bb1, basic_block bb2)
1401 basic_block bb1g = find_group (bb1);
1402 basic_block bb2g = find_group (bb2);
1404 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1405 this code is unlikely going to be performance problem anyway. */
1406 gcc_assert (bb1g != bb2g);
1408 bb1g->aux = bb2g;
1411 /* This function searches all of the edges in the program flow graph, and puts
1412 as many bad edges as possible onto the spanning tree. Bad edges include
1413 abnormals edges, which can't be instrumented at the moment. Since it is
1414 possible for fake edges to form a cycle, we will have to develop some
1415 better way in the future. Also put critical edges to the tree, since they
1416 are more expensive to instrument. */
1418 static void
1419 find_spanning_tree (struct edge_list *el)
1421 int i;
1422 int num_edges = NUM_EDGES (el);
1423 basic_block bb;
1425 /* We use aux field for standard union-find algorithm. */
1426 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
1427 bb->aux = bb;
1429 /* Add fake edge exit to entry we can't instrument. */
1430 union_groups (EXIT_BLOCK_PTR_FOR_FN (cfun), ENTRY_BLOCK_PTR_FOR_FN (cfun));
1432 /* First add all abnormal edges to the tree unless they form a cycle. Also
1433 add all edges to the exit block to avoid inserting profiling code behind
1434 setting return value from function. */
1435 for (i = 0; i < num_edges; i++)
1437 edge e = INDEX_EDGE (el, i);
1438 if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
1439 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1440 && !EDGE_INFO (e)->ignore
1441 && (find_group (e->src) != find_group (e->dest)))
1443 if (dump_file)
1444 fprintf (dump_file, "Abnormal edge %d to %d put to tree\n",
1445 e->src->index, e->dest->index);
1446 EDGE_INFO (e)->on_tree = 1;
1447 union_groups (e->src, e->dest);
1451 /* Now insert all critical edges to the tree unless they form a cycle. */
1452 for (i = 0; i < num_edges; i++)
1454 edge e = INDEX_EDGE (el, i);
1455 if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore
1456 && find_group (e->src) != find_group (e->dest))
1458 if (dump_file)
1459 fprintf (dump_file, "Critical edge %d to %d put to tree\n",
1460 e->src->index, e->dest->index);
1461 EDGE_INFO (e)->on_tree = 1;
1462 union_groups (e->src, e->dest);
1466 /* And now the rest. */
1467 for (i = 0; i < num_edges; i++)
1469 edge e = INDEX_EDGE (el, i);
1470 if (!EDGE_INFO (e)->ignore
1471 && find_group (e->src) != find_group (e->dest))
1473 if (dump_file)
1474 fprintf (dump_file, "Normal edge %d to %d put to tree\n",
1475 e->src->index, e->dest->index);
1476 EDGE_INFO (e)->on_tree = 1;
1477 union_groups (e->src, e->dest);
1481 clear_aux_for_blocks ();
1484 /* Perform file-level initialization for branch-prob processing. */
1486 void
1487 init_branch_prob (void)
1489 int i;
1491 total_num_blocks = 0;
1492 total_num_edges = 0;
1493 total_num_edges_ignored = 0;
1494 total_num_edges_instrumented = 0;
1495 total_num_blocks_created = 0;
1496 total_num_passes = 0;
1497 total_num_times_called = 0;
1498 total_num_branches = 0;
1499 for (i = 0; i < 20; i++)
1500 total_hist_br_prob[i] = 0;
1503 /* Performs file-level cleanup after branch-prob processing
1504 is completed. */
1506 void
1507 end_branch_prob (void)
1509 if (dump_file)
1511 fprintf (dump_file, "\n");
1512 fprintf (dump_file, "Total number of blocks: %d\n",
1513 total_num_blocks);
1514 fprintf (dump_file, "Total number of edges: %d\n", total_num_edges);
1515 fprintf (dump_file, "Total number of ignored edges: %d\n",
1516 total_num_edges_ignored);
1517 fprintf (dump_file, "Total number of instrumented edges: %d\n",
1518 total_num_edges_instrumented);
1519 fprintf (dump_file, "Total number of blocks created: %d\n",
1520 total_num_blocks_created);
1521 fprintf (dump_file, "Total number of graph solution passes: %d\n",
1522 total_num_passes);
1523 if (total_num_times_called != 0)
1524 fprintf (dump_file, "Average number of graph solution passes: %d\n",
1525 (total_num_passes + (total_num_times_called >> 1))
1526 / total_num_times_called);
1527 fprintf (dump_file, "Total number of branches: %d\n",
1528 total_num_branches);
1529 if (total_num_branches)
1531 int i;
1533 for (i = 0; i < 10; i++)
1534 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1535 (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1536 / total_num_branches, 5*i, 5*i+5);