gcc/testsuite/
[official-gcc.git] / gcc / profile.c
blobbcff41105224078b5d2be9b9fa6802f91a13d699
1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990-2014 Free Software Foundation, Inc.
3 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
4 based on some ideas from Dain Samples of UC Berkeley.
5 Further mangling by Bob Manson, Cygnus Support.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Generate basic block profile instrumentation and auxiliary files.
24 Profile generation is optimized, so that not all arcs in the basic
25 block graph need instrumenting. First, the BB graph is closed with
26 one entry (function start), and one exit (function exit). Any
27 ABNORMAL_EDGE cannot be instrumented (because there is no control
28 path to place the code). We close the graph by inserting fake
29 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
30 edges that do not go to the exit_block. We ignore such abnormal
31 edges. Naturally these fake edges are never directly traversed,
32 and so *cannot* be directly instrumented. Some other graph
33 massaging is done. To optimize the instrumentation we generate the
34 BB minimal span tree, only edges that are not on the span tree
35 (plus the entry point) need instrumenting. From that information
36 all other edge counts can be deduced. By construction all fake
37 edges must be on the spanning tree. We also attempt to place
38 EDGE_CRITICAL edges on the spanning tree.
40 The auxiliary files generated are <dumpbase>.gcno (at compile time)
41 and <dumpbase>.gcda (at run time). The format is
42 described in full in gcov-io.h. */
44 /* ??? Register allocation should use basic block execution counts to
45 give preference to the most commonly executed blocks. */
47 /* ??? Should calculate branch probabilities before instrumenting code, since
48 then we can use arc counts to help decide which arcs to instrument. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "flags.h"
56 #include "regs.h"
57 #include "expr.h"
58 #include "hashtab.h"
59 #include "hash-set.h"
60 #include "vec.h"
61 #include "machmode.h"
62 #include "hard-reg-set.h"
63 #include "input.h"
64 #include "function.h"
65 #include "basic-block.h"
66 #include "diagnostic-core.h"
67 #include "coverage.h"
68 #include "value-prof.h"
69 #include "tree.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "gimple-expr.h"
73 #include "is-a.h"
74 #include "gimple.h"
75 #include "gimple-iterator.h"
76 #include "tree-cfg.h"
77 #include "cfgloop.h"
78 #include "dumpfile.h"
79 #include "cgraph.h"
81 #include "profile.h"
83 struct bb_profile_info {
84 unsigned int count_valid : 1;
86 /* Number of successor and predecessor edges. */
87 gcov_type succ_count;
88 gcov_type pred_count;
91 #define BB_INFO(b) ((struct bb_profile_info *) (b)->aux)
94 /* Counter summary from the last set of coverage counts read. */
96 const struct gcov_ctr_summary *profile_info;
98 /* Counter working set information computed from the current counter
99 summary. Not initialized unless profile_info summary is non-NULL. */
100 static gcov_working_set_t gcov_working_sets[NUM_GCOV_WORKING_SETS];
102 /* Collect statistics on the performance of this pass for the entire source
103 file. */
105 static int total_num_blocks;
106 static int total_num_edges;
107 static int total_num_edges_ignored;
108 static int total_num_edges_instrumented;
109 static int total_num_blocks_created;
110 static int total_num_passes;
111 static int total_num_times_called;
112 static int total_hist_br_prob[20];
113 static int total_num_branches;
115 /* Forward declarations. */
116 static void find_spanning_tree (struct edge_list *);
118 /* Add edge instrumentation code to the entire insn chain.
120 F is the first insn of the chain.
121 NUM_BLOCKS is the number of basic blocks found in F. */
123 static unsigned
124 instrument_edges (struct edge_list *el)
126 unsigned num_instr_edges = 0;
127 int num_edges = NUM_EDGES (el);
128 basic_block bb;
130 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
132 edge e;
133 edge_iterator ei;
135 FOR_EACH_EDGE (e, ei, bb->succs)
137 struct edge_profile_info *inf = EDGE_INFO (e);
139 if (!inf->ignore && !inf->on_tree)
141 gcc_assert (!(e->flags & EDGE_ABNORMAL));
142 if (dump_file)
143 fprintf (dump_file, "Edge %d to %d instrumented%s\n",
144 e->src->index, e->dest->index,
145 EDGE_CRITICAL_P (e) ? " (and split)" : "");
146 gimple_gen_edge_profiler (num_instr_edges++, e);
151 total_num_blocks_created += num_edges;
152 if (dump_file)
153 fprintf (dump_file, "%d edges instrumented\n", num_instr_edges);
154 return num_instr_edges;
157 /* Add code to measure histograms for values in list VALUES. */
158 static void
159 instrument_values (histogram_values values)
161 unsigned i;
163 /* Emit code to generate the histograms before the insns. */
165 for (i = 0; i < values.length (); i++)
167 histogram_value hist = values[i];
168 unsigned t = COUNTER_FOR_HIST_TYPE (hist->type);
170 if (!coverage_counter_alloc (t, hist->n_counters))
171 continue;
173 switch (hist->type)
175 case HIST_TYPE_INTERVAL:
176 gimple_gen_interval_profiler (hist, t, 0);
177 break;
179 case HIST_TYPE_POW2:
180 gimple_gen_pow2_profiler (hist, t, 0);
181 break;
183 case HIST_TYPE_SINGLE_VALUE:
184 gimple_gen_one_value_profiler (hist, t, 0);
185 break;
187 case HIST_TYPE_CONST_DELTA:
188 gimple_gen_const_delta_profiler (hist, t, 0);
189 break;
191 case HIST_TYPE_INDIR_CALL:
192 case HIST_TYPE_INDIR_CALL_TOPN:
193 gimple_gen_ic_profiler (hist, t, 0);
194 break;
196 case HIST_TYPE_AVERAGE:
197 gimple_gen_average_profiler (hist, t, 0);
198 break;
200 case HIST_TYPE_IOR:
201 gimple_gen_ior_profiler (hist, t, 0);
202 break;
204 case HIST_TYPE_TIME_PROFILE:
206 basic_block bb =
207 split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
208 gimple_stmt_iterator gsi = gsi_start_bb (bb);
210 gimple_gen_time_profiler (t, 0, gsi);
211 break;
214 default:
215 gcc_unreachable ();
221 /* Fill the working set information into the profile_info structure. */
223 void
224 get_working_sets (void)
226 unsigned ws_ix, pctinc, pct;
227 gcov_working_set_t *ws_info;
229 if (!profile_info)
230 return;
232 compute_working_sets (profile_info, gcov_working_sets);
234 if (dump_file)
236 fprintf (dump_file, "Counter working sets:\n");
237 /* Multiply the percentage by 100 to avoid float. */
238 pctinc = 100 * 100 / NUM_GCOV_WORKING_SETS;
239 for (ws_ix = 0, pct = pctinc; ws_ix < NUM_GCOV_WORKING_SETS;
240 ws_ix++, pct += pctinc)
242 if (ws_ix == NUM_GCOV_WORKING_SETS - 1)
243 pct = 9990;
244 ws_info = &gcov_working_sets[ws_ix];
245 /* Print out the percentage using int arithmatic to avoid float. */
246 fprintf (dump_file, "\t\t%u.%02u%%: num counts=%u, min counter="
247 "%"PRId64 "\n",
248 pct / 100, pct - (pct / 100 * 100),
249 ws_info->num_counters,
250 (int64_t)ws_info->min_counter);
255 /* Given a the desired percentage of the full profile (sum_all from the
256 summary), multiplied by 10 to avoid float in PCT_TIMES_10, returns
257 the corresponding working set information. If an exact match for
258 the percentage isn't found, the closest value is used. */
260 gcov_working_set_t *
261 find_working_set (unsigned pct_times_10)
263 unsigned i;
264 if (!profile_info)
265 return NULL;
266 gcc_assert (pct_times_10 <= 1000);
267 if (pct_times_10 >= 999)
268 return &gcov_working_sets[NUM_GCOV_WORKING_SETS - 1];
269 i = pct_times_10 * NUM_GCOV_WORKING_SETS / 1000;
270 if (!i)
271 return &gcov_working_sets[0];
272 return &gcov_working_sets[i - 1];
275 /* Computes hybrid profile for all matching entries in da_file.
277 CFG_CHECKSUM is the precomputed checksum for the CFG. */
279 static gcov_type *
280 get_exec_counts (unsigned cfg_checksum, unsigned lineno_checksum)
282 unsigned num_edges = 0;
283 basic_block bb;
284 gcov_type *counts;
286 /* Count the edges to be (possibly) instrumented. */
287 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
289 edge e;
290 edge_iterator ei;
292 FOR_EACH_EDGE (e, ei, bb->succs)
293 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
294 num_edges++;
297 counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, cfg_checksum,
298 lineno_checksum, &profile_info);
299 if (!counts)
300 return NULL;
302 get_working_sets ();
304 if (dump_file && profile_info)
305 fprintf (dump_file, "Merged %u profiles with maximal count %u.\n",
306 profile_info->runs, (unsigned) profile_info->sum_max);
308 return counts;
312 static bool
313 is_edge_inconsistent (vec<edge, va_gc> *edges)
315 edge e;
316 edge_iterator ei;
317 FOR_EACH_EDGE (e, ei, edges)
319 if (!EDGE_INFO (e)->ignore)
321 if (e->count < 0
322 && (!(e->flags & EDGE_FAKE)
323 || !block_ends_with_call_p (e->src)))
325 if (dump_file)
327 fprintf (dump_file,
328 "Edge %i->%i is inconsistent, count%"PRId64,
329 e->src->index, e->dest->index, e->count);
330 dump_bb (dump_file, e->src, 0, TDF_DETAILS);
331 dump_bb (dump_file, e->dest, 0, TDF_DETAILS);
333 return true;
337 return false;
340 static void
341 correct_negative_edge_counts (void)
343 basic_block bb;
344 edge e;
345 edge_iterator ei;
347 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
349 FOR_EACH_EDGE (e, ei, bb->succs)
351 if (e->count < 0)
352 e->count = 0;
357 /* Check consistency.
358 Return true if inconsistency is found. */
359 static bool
360 is_inconsistent (void)
362 basic_block bb;
363 bool inconsistent = false;
364 FOR_EACH_BB_FN (bb, cfun)
366 inconsistent |= is_edge_inconsistent (bb->preds);
367 if (!dump_file && inconsistent)
368 return true;
369 inconsistent |= is_edge_inconsistent (bb->succs);
370 if (!dump_file && inconsistent)
371 return true;
372 if (bb->count < 0)
374 if (dump_file)
376 fprintf (dump_file, "BB %i count is negative "
377 "%"PRId64,
378 bb->index,
379 bb->count);
380 dump_bb (dump_file, bb, 0, TDF_DETAILS);
382 inconsistent = true;
384 if (bb->count != sum_edge_counts (bb->preds))
386 if (dump_file)
388 fprintf (dump_file, "BB %i count does not match sum of incoming edges "
389 "%"PRId64" should be %"PRId64,
390 bb->index,
391 bb->count,
392 sum_edge_counts (bb->preds));
393 dump_bb (dump_file, bb, 0, TDF_DETAILS);
395 inconsistent = true;
397 if (bb->count != sum_edge_counts (bb->succs) &&
398 ! (find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun)) != NULL
399 && block_ends_with_call_p (bb)))
401 if (dump_file)
403 fprintf (dump_file, "BB %i count does not match sum of outgoing edges "
404 "%"PRId64" should be %"PRId64,
405 bb->index,
406 bb->count,
407 sum_edge_counts (bb->succs));
408 dump_bb (dump_file, bb, 0, TDF_DETAILS);
410 inconsistent = true;
412 if (!dump_file && inconsistent)
413 return true;
416 return inconsistent;
419 /* Set each basic block count to the sum of its outgoing edge counts */
420 static void
421 set_bb_counts (void)
423 basic_block bb;
424 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
426 bb->count = sum_edge_counts (bb->succs);
427 gcc_assert (bb->count >= 0);
431 /* Reads profile data and returns total number of edge counts read */
432 static int
433 read_profile_edge_counts (gcov_type *exec_counts)
435 basic_block bb;
436 int num_edges = 0;
437 int exec_counts_pos = 0;
438 /* For each edge not on the spanning tree, set its execution count from
439 the .da file. */
440 /* The first count in the .da file is the number of times that the function
441 was entered. This is the exec_count for block zero. */
443 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
445 edge e;
446 edge_iterator ei;
448 FOR_EACH_EDGE (e, ei, bb->succs)
449 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
451 num_edges++;
452 if (exec_counts)
454 e->count = exec_counts[exec_counts_pos++];
455 if (e->count > profile_info->sum_max)
457 if (flag_profile_correction)
459 static bool informed = 0;
460 if (dump_enabled_p () && !informed)
461 dump_printf_loc (MSG_NOTE, input_location,
462 "corrupted profile info: edge count"
463 " exceeds maximal count\n");
464 informed = 1;
466 else
467 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
468 bb->index, e->dest->index);
471 else
472 e->count = 0;
474 EDGE_INFO (e)->count_valid = 1;
475 BB_INFO (bb)->succ_count--;
476 BB_INFO (e->dest)->pred_count--;
477 if (dump_file)
479 fprintf (dump_file, "\nRead edge from %i to %i, count:",
480 bb->index, e->dest->index);
481 fprintf (dump_file, "%"PRId64,
482 (int64_t) e->count);
487 return num_edges;
490 #define OVERLAP_BASE 10000
492 /* Compare the static estimated profile to the actual profile, and
493 return the "degree of overlap" measure between them.
495 Degree of overlap is a number between 0 and OVERLAP_BASE. It is
496 the sum of each basic block's minimum relative weights between
497 two profiles. And overlap of OVERLAP_BASE means two profiles are
498 identical. */
500 static int
501 compute_frequency_overlap (void)
503 gcov_type count_total = 0, freq_total = 0;
504 int overlap = 0;
505 basic_block bb;
507 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
509 count_total += bb->count;
510 freq_total += bb->frequency;
513 if (count_total == 0 || freq_total == 0)
514 return 0;
516 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
517 overlap += MIN (bb->count * OVERLAP_BASE / count_total,
518 bb->frequency * OVERLAP_BASE / freq_total);
520 return overlap;
523 /* Compute the branch probabilities for the various branches.
524 Annotate them accordingly.
526 CFG_CHECKSUM is the precomputed checksum for the CFG. */
528 static void
529 compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
531 basic_block bb;
532 int i;
533 int num_edges = 0;
534 int changes;
535 int passes;
536 int hist_br_prob[20];
537 int num_branches;
538 gcov_type *exec_counts = get_exec_counts (cfg_checksum, lineno_checksum);
539 int inconsistent = 0;
541 /* Very simple sanity checks so we catch bugs in our profiling code. */
542 if (!profile_info)
543 return;
545 if (profile_info->sum_all < profile_info->sum_max)
547 error ("corrupted profile info: sum_all is smaller than sum_max");
548 exec_counts = NULL;
551 /* Attach extra info block to each bb. */
552 alloc_aux_for_blocks (sizeof (struct bb_profile_info));
553 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
555 edge e;
556 edge_iterator ei;
558 FOR_EACH_EDGE (e, ei, bb->succs)
559 if (!EDGE_INFO (e)->ignore)
560 BB_INFO (bb)->succ_count++;
561 FOR_EACH_EDGE (e, ei, bb->preds)
562 if (!EDGE_INFO (e)->ignore)
563 BB_INFO (bb)->pred_count++;
566 /* Avoid predicting entry on exit nodes. */
567 BB_INFO (EXIT_BLOCK_PTR_FOR_FN (cfun))->succ_count = 2;
568 BB_INFO (ENTRY_BLOCK_PTR_FOR_FN (cfun))->pred_count = 2;
570 num_edges = read_profile_edge_counts (exec_counts);
572 if (dump_file)
573 fprintf (dump_file, "\n%d edge counts read\n", num_edges);
575 /* For every block in the file,
576 - if every exit/entrance edge has a known count, then set the block count
577 - if the block count is known, and every exit/entrance edge but one has
578 a known execution count, then set the count of the remaining edge
580 As edge counts are set, decrement the succ/pred count, but don't delete
581 the edge, that way we can easily tell when all edges are known, or only
582 one edge is unknown. */
584 /* The order that the basic blocks are iterated through is important.
585 Since the code that finds spanning trees starts with block 0, low numbered
586 edges are put on the spanning tree in preference to high numbered edges.
587 Hence, most instrumented edges are at the end. Graph solving works much
588 faster if we propagate numbers from the end to the start.
590 This takes an average of slightly more than 3 passes. */
592 changes = 1;
593 passes = 0;
594 while (changes)
596 passes++;
597 changes = 0;
598 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, prev_bb)
600 struct bb_profile_info *bi = BB_INFO (bb);
601 if (! bi->count_valid)
603 if (bi->succ_count == 0)
605 edge e;
606 edge_iterator ei;
607 gcov_type total = 0;
609 FOR_EACH_EDGE (e, ei, bb->succs)
610 total += e->count;
611 bb->count = total;
612 bi->count_valid = 1;
613 changes = 1;
615 else if (bi->pred_count == 0)
617 edge e;
618 edge_iterator ei;
619 gcov_type total = 0;
621 FOR_EACH_EDGE (e, ei, bb->preds)
622 total += e->count;
623 bb->count = total;
624 bi->count_valid = 1;
625 changes = 1;
628 if (bi->count_valid)
630 if (bi->succ_count == 1)
632 edge e;
633 edge_iterator ei;
634 gcov_type total = 0;
636 /* One of the counts will be invalid, but it is zero,
637 so adding it in also doesn't hurt. */
638 FOR_EACH_EDGE (e, ei, bb->succs)
639 total += e->count;
641 /* Search for the invalid edge, and set its count. */
642 FOR_EACH_EDGE (e, ei, bb->succs)
643 if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
644 break;
646 /* Calculate count for remaining edge by conservation. */
647 total = bb->count - total;
649 gcc_assert (e);
650 EDGE_INFO (e)->count_valid = 1;
651 e->count = total;
652 bi->succ_count--;
654 BB_INFO (e->dest)->pred_count--;
655 changes = 1;
657 if (bi->pred_count == 1)
659 edge e;
660 edge_iterator ei;
661 gcov_type total = 0;
663 /* One of the counts will be invalid, but it is zero,
664 so adding it in also doesn't hurt. */
665 FOR_EACH_EDGE (e, ei, bb->preds)
666 total += e->count;
668 /* Search for the invalid edge, and set its count. */
669 FOR_EACH_EDGE (e, ei, bb->preds)
670 if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
671 break;
673 /* Calculate count for remaining edge by conservation. */
674 total = bb->count - total + e->count;
676 gcc_assert (e);
677 EDGE_INFO (e)->count_valid = 1;
678 e->count = total;
679 bi->pred_count--;
681 BB_INFO (e->src)->succ_count--;
682 changes = 1;
687 if (dump_file)
689 int overlap = compute_frequency_overlap ();
690 gimple_dump_cfg (dump_file, dump_flags);
691 fprintf (dump_file, "Static profile overlap: %d.%d%%\n",
692 overlap / (OVERLAP_BASE / 100),
693 overlap % (OVERLAP_BASE / 100));
696 total_num_passes += passes;
697 if (dump_file)
698 fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
700 /* If the graph has been correctly solved, every block will have a
701 succ and pred count of zero. */
702 FOR_EACH_BB_FN (bb, cfun)
704 gcc_assert (!BB_INFO (bb)->succ_count && !BB_INFO (bb)->pred_count);
707 /* Check for inconsistent basic block counts */
708 inconsistent = is_inconsistent ();
710 if (inconsistent)
712 if (flag_profile_correction)
714 /* Inconsistency detected. Make it flow-consistent. */
715 static int informed = 0;
716 if (dump_enabled_p () && informed == 0)
718 informed = 1;
719 dump_printf_loc (MSG_NOTE, input_location,
720 "correcting inconsistent profile data\n");
722 correct_negative_edge_counts ();
723 /* Set bb counts to the sum of the outgoing edge counts */
724 set_bb_counts ();
725 if (dump_file)
726 fprintf (dump_file, "\nCalling mcf_smooth_cfg\n");
727 mcf_smooth_cfg ();
729 else
730 error ("corrupted profile info: profile data is not flow-consistent");
733 /* For every edge, calculate its branch probability and add a reg_note
734 to the branch insn to indicate this. */
736 for (i = 0; i < 20; i++)
737 hist_br_prob[i] = 0;
738 num_branches = 0;
740 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
742 edge e;
743 edge_iterator ei;
745 if (bb->count < 0)
747 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
748 bb->index, (int)bb->count);
749 bb->count = 0;
751 FOR_EACH_EDGE (e, ei, bb->succs)
753 /* Function may return twice in the cased the called function is
754 setjmp or calls fork, but we can't represent this by extra
755 edge from the entry, since extra edge from the exit is
756 already present. We get negative frequency from the entry
757 point. */
758 if ((e->count < 0
759 && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
760 || (e->count > bb->count
761 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)))
763 if (block_ends_with_call_p (bb))
764 e->count = e->count < 0 ? 0 : bb->count;
766 if (e->count < 0 || e->count > bb->count)
768 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
769 e->src->index, e->dest->index,
770 (int)e->count);
771 e->count = bb->count / 2;
774 if (bb->count)
776 FOR_EACH_EDGE (e, ei, bb->succs)
777 e->probability = GCOV_COMPUTE_SCALE (e->count, bb->count);
778 if (bb->index >= NUM_FIXED_BLOCKS
779 && block_ends_with_condjump_p (bb)
780 && EDGE_COUNT (bb->succs) >= 2)
782 int prob;
783 edge e;
784 int index;
786 /* Find the branch edge. It is possible that we do have fake
787 edges here. */
788 FOR_EACH_EDGE (e, ei, bb->succs)
789 if (!(e->flags & (EDGE_FAKE | EDGE_FALLTHRU)))
790 break;
792 prob = e->probability;
793 index = prob * 20 / REG_BR_PROB_BASE;
795 if (index == 20)
796 index = 19;
797 hist_br_prob[index]++;
799 num_branches++;
802 /* As a last resort, distribute the probabilities evenly.
803 Use simple heuristics that if there are normal edges,
804 give all abnormals frequency of 0, otherwise distribute the
805 frequency over abnormals (this is the case of noreturn
806 calls). */
807 else if (profile_status_for_fn (cfun) == PROFILE_ABSENT)
809 int total = 0;
811 FOR_EACH_EDGE (e, ei, bb->succs)
812 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
813 total ++;
814 if (total)
816 FOR_EACH_EDGE (e, ei, bb->succs)
817 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
818 e->probability = REG_BR_PROB_BASE / total;
819 else
820 e->probability = 0;
822 else
824 total += EDGE_COUNT (bb->succs);
825 FOR_EACH_EDGE (e, ei, bb->succs)
826 e->probability = REG_BR_PROB_BASE / total;
828 if (bb->index >= NUM_FIXED_BLOCKS
829 && block_ends_with_condjump_p (bb)
830 && EDGE_COUNT (bb->succs) >= 2)
831 num_branches++;
834 counts_to_freqs ();
835 profile_status_for_fn (cfun) = PROFILE_READ;
836 compute_function_frequency ();
838 if (dump_file)
840 fprintf (dump_file, "%d branches\n", num_branches);
841 if (num_branches)
842 for (i = 0; i < 10; i++)
843 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
844 (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
845 5 * i, 5 * i + 5);
847 total_num_branches += num_branches;
848 for (i = 0; i < 20; i++)
849 total_hist_br_prob[i] += hist_br_prob[i];
851 fputc ('\n', dump_file);
852 fputc ('\n', dump_file);
855 free_aux_for_blocks ();
858 /* Load value histograms values whose description is stored in VALUES array
859 from .gcda file.
861 CFG_CHECKSUM is the precomputed checksum for the CFG. */
863 static void
864 compute_value_histograms (histogram_values values, unsigned cfg_checksum,
865 unsigned lineno_checksum)
867 unsigned i, j, t, any;
868 unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS];
869 gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS];
870 gcov_type *act_count[GCOV_N_VALUE_COUNTERS];
871 gcov_type *aact_count;
872 struct cgraph_node *node;
874 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
875 n_histogram_counters[t] = 0;
877 for (i = 0; i < values.length (); i++)
879 histogram_value hist = values[i];
880 n_histogram_counters[(int) hist->type] += hist->n_counters;
883 any = 0;
884 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
886 if (!n_histogram_counters[t])
888 histogram_counts[t] = NULL;
889 continue;
892 histogram_counts[t] =
893 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t),
894 n_histogram_counters[t], cfg_checksum,
895 lineno_checksum, NULL);
896 if (histogram_counts[t])
897 any = 1;
898 act_count[t] = histogram_counts[t];
900 if (!any)
901 return;
903 for (i = 0; i < values.length (); i++)
905 histogram_value hist = values[i];
906 gimple stmt = hist->hvalue.stmt;
908 t = (int) hist->type;
910 aact_count = act_count[t];
912 if (act_count[t])
913 act_count[t] += hist->n_counters;
915 gimple_add_histogram_value (cfun, stmt, hist);
916 hist->hvalue.counters = XNEWVEC (gcov_type, hist->n_counters);
917 for (j = 0; j < hist->n_counters; j++)
918 if (aact_count)
919 hist->hvalue.counters[j] = aact_count[j];
920 else
921 hist->hvalue.counters[j] = 0;
923 /* Time profiler counter is not related to any statement,
924 so that we have to read the counter and set the value to
925 the corresponding call graph node. */
926 if (hist->type == HIST_TYPE_TIME_PROFILE)
928 node = cgraph_node::get (hist->fun->decl);
929 node->tp_first_run = hist->hvalue.counters[0];
931 if (dump_file)
932 fprintf (dump_file, "Read tp_first_run: %d\n", node->tp_first_run);
936 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
937 free (histogram_counts[t]);
940 /* When passed NULL as file_name, initialize.
941 When passed something else, output the necessary commands to change
942 line to LINE and offset to FILE_NAME. */
943 static void
944 output_location (char const *file_name, int line,
945 gcov_position_t *offset, basic_block bb)
947 static char const *prev_file_name;
948 static int prev_line;
949 bool name_differs, line_differs;
951 if (!file_name)
953 prev_file_name = NULL;
954 prev_line = -1;
955 return;
958 name_differs = !prev_file_name || filename_cmp (file_name, prev_file_name);
959 line_differs = prev_line != line;
961 if (name_differs || line_differs)
963 if (!*offset)
965 *offset = gcov_write_tag (GCOV_TAG_LINES);
966 gcov_write_unsigned (bb->index);
967 name_differs = line_differs=true;
970 /* If this is a new source file, then output the
971 file's name to the .bb file. */
972 if (name_differs)
974 prev_file_name = file_name;
975 gcov_write_unsigned (0);
976 gcov_write_string (prev_file_name);
978 if (line_differs)
980 gcov_write_unsigned (line);
981 prev_line = line;
986 /* Instrument and/or analyze program behavior based on program the CFG.
988 This function creates a representation of the control flow graph (of
989 the function being compiled) that is suitable for the instrumentation
990 of edges and/or converting measured edge counts to counts on the
991 complete CFG.
993 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
994 the flow graph that are needed to reconstruct the dynamic behavior of the
995 flow graph. This data is written to the gcno file for gcov.
997 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
998 information from the gcda file containing edge count information from
999 previous executions of the function being compiled. In this case, the
1000 control flow graph is annotated with actual execution counts by
1001 compute_branch_probabilities().
1003 Main entry point of this file. */
1005 void
1006 branch_prob (void)
1008 basic_block bb;
1009 unsigned i;
1010 unsigned num_edges, ignored_edges;
1011 unsigned num_instrumented;
1012 struct edge_list *el;
1013 histogram_values values = histogram_values ();
1014 unsigned cfg_checksum, lineno_checksum;
1016 total_num_times_called++;
1018 flow_call_edges_add (NULL);
1019 add_noreturn_fake_exit_edges ();
1021 /* We can't handle cyclic regions constructed using abnormal edges.
1022 To avoid these we replace every source of abnormal edge by a fake
1023 edge from entry node and every destination by fake edge to exit.
1024 This keeps graph acyclic and our calculation exact for all normal
1025 edges except for exit and entrance ones.
1027 We also add fake exit edges for each call and asm statement in the
1028 basic, since it may not return. */
1030 FOR_EACH_BB_FN (bb, cfun)
1032 int need_exit_edge = 0, need_entry_edge = 0;
1033 int have_exit_edge = 0, have_entry_edge = 0;
1034 edge e;
1035 edge_iterator ei;
1037 /* Functions returning multiple times are not handled by extra edges.
1038 Instead we simply allow negative counts on edges from exit to the
1039 block past call and corresponding probabilities. We can't go
1040 with the extra edges because that would result in flowgraph that
1041 needs to have fake edges outside the spanning tree. */
1043 FOR_EACH_EDGE (e, ei, bb->succs)
1045 gimple_stmt_iterator gsi;
1046 gimple last = NULL;
1048 /* It may happen that there are compiler generated statements
1049 without a locus at all. Go through the basic block from the
1050 last to the first statement looking for a locus. */
1051 for (gsi = gsi_last_nondebug_bb (bb);
1052 !gsi_end_p (gsi);
1053 gsi_prev_nondebug (&gsi))
1055 last = gsi_stmt (gsi);
1056 if (gimple_has_location (last))
1057 break;
1060 /* Edge with goto locus might get wrong coverage info unless
1061 it is the only edge out of BB.
1062 Don't do that when the locuses match, so
1063 if (blah) goto something;
1064 is not computed twice. */
1065 if (last
1066 && gimple_has_location (last)
1067 && LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
1068 && !single_succ_p (bb)
1069 && (LOCATION_FILE (e->goto_locus)
1070 != LOCATION_FILE (gimple_location (last))
1071 || (LOCATION_LINE (e->goto_locus)
1072 != LOCATION_LINE (gimple_location (last)))))
1074 basic_block new_bb = split_edge (e);
1075 edge ne = single_succ_edge (new_bb);
1076 ne->goto_locus = e->goto_locus;
1078 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1079 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1080 need_exit_edge = 1;
1081 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1082 have_exit_edge = 1;
1084 FOR_EACH_EDGE (e, ei, bb->preds)
1086 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1087 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1088 need_entry_edge = 1;
1089 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1090 have_entry_edge = 1;
1093 if (need_exit_edge && !have_exit_edge)
1095 if (dump_file)
1096 fprintf (dump_file, "Adding fake exit edge to bb %i\n",
1097 bb->index);
1098 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
1100 if (need_entry_edge && !have_entry_edge)
1102 if (dump_file)
1103 fprintf (dump_file, "Adding fake entry edge to bb %i\n",
1104 bb->index);
1105 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FAKE);
1106 /* Avoid bbs that have both fake entry edge and also some
1107 exit edge. One of those edges wouldn't be added to the
1108 spanning tree, but we can't instrument any of them. */
1109 if (have_exit_edge || need_exit_edge)
1111 gimple_stmt_iterator gsi;
1112 gimple first;
1114 gsi = gsi_start_nondebug_after_labels_bb (bb);
1115 gcc_checking_assert (!gsi_end_p (gsi));
1116 first = gsi_stmt (gsi);
1117 /* Don't split the bbs containing __builtin_setjmp_receiver
1118 or ABNORMAL_DISPATCHER calls. These are very
1119 special and don't expect anything to be inserted before
1120 them. */
1121 if (is_gimple_call (first)
1122 && (gimple_call_builtin_p (first, BUILT_IN_SETJMP_RECEIVER)
1123 || (gimple_call_flags (first) & ECF_RETURNS_TWICE)
1124 || (gimple_call_internal_p (first)
1125 && (gimple_call_internal_fn (first)
1126 == IFN_ABNORMAL_DISPATCHER))))
1127 continue;
1129 if (dump_file)
1130 fprintf (dump_file, "Splitting bb %i after labels\n",
1131 bb->index);
1132 split_block_after_labels (bb);
1137 el = create_edge_list ();
1138 num_edges = NUM_EDGES (el);
1139 alloc_aux_for_edges (sizeof (struct edge_profile_info));
1141 /* The basic blocks are expected to be numbered sequentially. */
1142 compact_blocks ();
1144 ignored_edges = 0;
1145 for (i = 0 ; i < num_edges ; i++)
1147 edge e = INDEX_EDGE (el, i);
1148 e->count = 0;
1150 /* Mark edges we've replaced by fake edges above as ignored. */
1151 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1152 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1153 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1155 EDGE_INFO (e)->ignore = 1;
1156 ignored_edges++;
1160 /* Create spanning tree from basic block graph, mark each edge that is
1161 on the spanning tree. We insert as many abnormal and critical edges
1162 as possible to minimize number of edge splits necessary. */
1164 find_spanning_tree (el);
1166 /* Fake edges that are not on the tree will not be instrumented, so
1167 mark them ignored. */
1168 for (num_instrumented = i = 0; i < num_edges; i++)
1170 edge e = INDEX_EDGE (el, i);
1171 struct edge_profile_info *inf = EDGE_INFO (e);
1173 if (inf->ignore || inf->on_tree)
1174 /*NOP*/;
1175 else if (e->flags & EDGE_FAKE)
1177 inf->ignore = 1;
1178 ignored_edges++;
1180 else
1181 num_instrumented++;
1184 total_num_blocks += n_basic_blocks_for_fn (cfun);
1185 if (dump_file)
1186 fprintf (dump_file, "%d basic blocks\n", n_basic_blocks_for_fn (cfun));
1188 total_num_edges += num_edges;
1189 if (dump_file)
1190 fprintf (dump_file, "%d edges\n", num_edges);
1192 total_num_edges_ignored += ignored_edges;
1193 if (dump_file)
1194 fprintf (dump_file, "%d ignored edges\n", ignored_edges);
1196 total_num_edges_instrumented += num_instrumented;
1197 if (dump_file)
1198 fprintf (dump_file, "%d instrumentation edges\n", num_instrumented);
1200 /* Compute two different checksums. Note that we want to compute
1201 the checksum in only once place, since it depends on the shape
1202 of the control flow which can change during
1203 various transformations. */
1204 cfg_checksum = coverage_compute_cfg_checksum (cfun);
1205 lineno_checksum = coverage_compute_lineno_checksum ();
1207 /* Write the data from which gcov can reconstruct the basic block
1208 graph and function line numbers (the gcno file). */
1209 if (coverage_begin_function (lineno_checksum, cfg_checksum))
1211 gcov_position_t offset;
1213 /* Basic block flags */
1214 offset = gcov_write_tag (GCOV_TAG_BLOCKS);
1215 for (i = 0; i != (unsigned) (n_basic_blocks_for_fn (cfun)); i++)
1216 gcov_write_unsigned (0);
1217 gcov_write_length (offset);
1219 /* Arcs */
1220 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
1221 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1223 edge e;
1224 edge_iterator ei;
1226 offset = gcov_write_tag (GCOV_TAG_ARCS);
1227 gcov_write_unsigned (bb->index);
1229 FOR_EACH_EDGE (e, ei, bb->succs)
1231 struct edge_profile_info *i = EDGE_INFO (e);
1232 if (!i->ignore)
1234 unsigned flag_bits = 0;
1236 if (i->on_tree)
1237 flag_bits |= GCOV_ARC_ON_TREE;
1238 if (e->flags & EDGE_FAKE)
1239 flag_bits |= GCOV_ARC_FAKE;
1240 if (e->flags & EDGE_FALLTHRU)
1241 flag_bits |= GCOV_ARC_FALLTHROUGH;
1242 /* On trees we don't have fallthru flags, but we can
1243 recompute them from CFG shape. */
1244 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)
1245 && e->src->next_bb == e->dest)
1246 flag_bits |= GCOV_ARC_FALLTHROUGH;
1248 gcov_write_unsigned (e->dest->index);
1249 gcov_write_unsigned (flag_bits);
1253 gcov_write_length (offset);
1256 /* Line numbers. */
1257 /* Initialize the output. */
1258 output_location (NULL, 0, NULL, NULL);
1260 FOR_EACH_BB_FN (bb, cfun)
1262 gimple_stmt_iterator gsi;
1263 gcov_position_t offset = 0;
1265 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
1267 expanded_location curr_location =
1268 expand_location (DECL_SOURCE_LOCATION (current_function_decl));
1269 output_location (curr_location.file, curr_location.line,
1270 &offset, bb);
1273 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1275 gimple stmt = gsi_stmt (gsi);
1276 if (gimple_has_location (stmt))
1277 output_location (gimple_filename (stmt), gimple_lineno (stmt),
1278 &offset, bb);
1281 /* Notice GOTO expressions eliminated while constructing the CFG. */
1282 if (single_succ_p (bb)
1283 && LOCATION_LOCUS (single_succ_edge (bb)->goto_locus)
1284 != UNKNOWN_LOCATION)
1286 expanded_location curr_location
1287 = expand_location (single_succ_edge (bb)->goto_locus);
1288 output_location (curr_location.file, curr_location.line,
1289 &offset, bb);
1292 if (offset)
1294 /* A file of NULL indicates the end of run. */
1295 gcov_write_unsigned (0);
1296 gcov_write_string (NULL);
1297 gcov_write_length (offset);
1302 if (flag_profile_values)
1303 gimple_find_values_to_profile (&values);
1305 if (flag_branch_probabilities)
1307 compute_branch_probabilities (cfg_checksum, lineno_checksum);
1308 if (flag_profile_values)
1309 compute_value_histograms (values, cfg_checksum, lineno_checksum);
1312 remove_fake_edges ();
1314 /* For each edge not on the spanning tree, add counting code. */
1315 if (profile_arc_flag
1316 && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
1318 unsigned n_instrumented;
1320 gimple_init_edge_profiler ();
1322 n_instrumented = instrument_edges (el);
1324 gcc_assert (n_instrumented == num_instrumented);
1326 if (flag_profile_values)
1327 instrument_values (values);
1329 /* Commit changes done by instrumentation. */
1330 gsi_commit_edge_inserts ();
1333 free_aux_for_edges ();
1335 values.release ();
1336 free_edge_list (el);
1337 coverage_end_function (lineno_checksum, cfg_checksum);
1340 /* Union find algorithm implementation for the basic blocks using
1341 aux fields. */
1343 static basic_block
1344 find_group (basic_block bb)
1346 basic_block group = bb, bb1;
1348 while ((basic_block) group->aux != group)
1349 group = (basic_block) group->aux;
1351 /* Compress path. */
1352 while ((basic_block) bb->aux != group)
1354 bb1 = (basic_block) bb->aux;
1355 bb->aux = (void *) group;
1356 bb = bb1;
1358 return group;
1361 static void
1362 union_groups (basic_block bb1, basic_block bb2)
1364 basic_block bb1g = find_group (bb1);
1365 basic_block bb2g = find_group (bb2);
1367 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1368 this code is unlikely going to be performance problem anyway. */
1369 gcc_assert (bb1g != bb2g);
1371 bb1g->aux = bb2g;
1374 /* This function searches all of the edges in the program flow graph, and puts
1375 as many bad edges as possible onto the spanning tree. Bad edges include
1376 abnormals edges, which can't be instrumented at the moment. Since it is
1377 possible for fake edges to form a cycle, we will have to develop some
1378 better way in the future. Also put critical edges to the tree, since they
1379 are more expensive to instrument. */
1381 static void
1382 find_spanning_tree (struct edge_list *el)
1384 int i;
1385 int num_edges = NUM_EDGES (el);
1386 basic_block bb;
1388 /* We use aux field for standard union-find algorithm. */
1389 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
1390 bb->aux = bb;
1392 /* Add fake edge exit to entry we can't instrument. */
1393 union_groups (EXIT_BLOCK_PTR_FOR_FN (cfun), ENTRY_BLOCK_PTR_FOR_FN (cfun));
1395 /* First add all abnormal edges to the tree unless they form a cycle. Also
1396 add all edges to the exit block to avoid inserting profiling code behind
1397 setting return value from function. */
1398 for (i = 0; i < num_edges; i++)
1400 edge e = INDEX_EDGE (el, i);
1401 if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
1402 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1403 && !EDGE_INFO (e)->ignore
1404 && (find_group (e->src) != find_group (e->dest)))
1406 if (dump_file)
1407 fprintf (dump_file, "Abnormal edge %d to %d put to tree\n",
1408 e->src->index, e->dest->index);
1409 EDGE_INFO (e)->on_tree = 1;
1410 union_groups (e->src, e->dest);
1414 /* Now insert all critical edges to the tree unless they form a cycle. */
1415 for (i = 0; i < num_edges; i++)
1417 edge e = INDEX_EDGE (el, i);
1418 if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore
1419 && find_group (e->src) != find_group (e->dest))
1421 if (dump_file)
1422 fprintf (dump_file, "Critical edge %d to %d put to tree\n",
1423 e->src->index, e->dest->index);
1424 EDGE_INFO (e)->on_tree = 1;
1425 union_groups (e->src, e->dest);
1429 /* And now the rest. */
1430 for (i = 0; i < num_edges; i++)
1432 edge e = INDEX_EDGE (el, i);
1433 if (!EDGE_INFO (e)->ignore
1434 && find_group (e->src) != find_group (e->dest))
1436 if (dump_file)
1437 fprintf (dump_file, "Normal edge %d to %d put to tree\n",
1438 e->src->index, e->dest->index);
1439 EDGE_INFO (e)->on_tree = 1;
1440 union_groups (e->src, e->dest);
1444 clear_aux_for_blocks ();
1447 /* Perform file-level initialization for branch-prob processing. */
1449 void
1450 init_branch_prob (void)
1452 int i;
1454 total_num_blocks = 0;
1455 total_num_edges = 0;
1456 total_num_edges_ignored = 0;
1457 total_num_edges_instrumented = 0;
1458 total_num_blocks_created = 0;
1459 total_num_passes = 0;
1460 total_num_times_called = 0;
1461 total_num_branches = 0;
1462 for (i = 0; i < 20; i++)
1463 total_hist_br_prob[i] = 0;
1466 /* Performs file-level cleanup after branch-prob processing
1467 is completed. */
1469 void
1470 end_branch_prob (void)
1472 if (dump_file)
1474 fprintf (dump_file, "\n");
1475 fprintf (dump_file, "Total number of blocks: %d\n",
1476 total_num_blocks);
1477 fprintf (dump_file, "Total number of edges: %d\n", total_num_edges);
1478 fprintf (dump_file, "Total number of ignored edges: %d\n",
1479 total_num_edges_ignored);
1480 fprintf (dump_file, "Total number of instrumented edges: %d\n",
1481 total_num_edges_instrumented);
1482 fprintf (dump_file, "Total number of blocks created: %d\n",
1483 total_num_blocks_created);
1484 fprintf (dump_file, "Total number of graph solution passes: %d\n",
1485 total_num_passes);
1486 if (total_num_times_called != 0)
1487 fprintf (dump_file, "Average number of graph solution passes: %d\n",
1488 (total_num_passes + (total_num_times_called >> 1))
1489 / total_num_times_called);
1490 fprintf (dump_file, "Total number of branches: %d\n",
1491 total_num_branches);
1492 if (total_num_branches)
1494 int i;
1496 for (i = 0; i < 10; i++)
1497 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1498 (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1499 / total_num_branches, 5*i, 5*i+5);