PR c++/50852
[official-gcc.git] / gcc / profile.c
blobad3a2c317464235f04fc46c5fa28530ed59c2f06
1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011, 2012
4 Free Software Foundation, Inc.
5 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
6 based on some ideas from Dain Samples of UC Berkeley.
7 Further mangling by Bob Manson, Cygnus Support.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 /* Generate basic block profile instrumentation and auxiliary files.
26 Profile generation is optimized, so that not all arcs in the basic
27 block graph need instrumenting. First, the BB graph is closed with
28 one entry (function start), and one exit (function exit). Any
29 ABNORMAL_EDGE cannot be instrumented (because there is no control
30 path to place the code). We close the graph by inserting fake
31 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
32 edges that do not go to the exit_block. We ignore such abnormal
33 edges. Naturally these fake edges are never directly traversed,
34 and so *cannot* be directly instrumented. Some other graph
35 massaging is done. To optimize the instrumentation we generate the
36 BB minimal span tree, only edges that are not on the span tree
37 (plus the entry point) need instrumenting. From that information
38 all other edge counts can be deduced. By construction all fake
39 edges must be on the spanning tree. We also attempt to place
40 EDGE_CRITICAL edges on the spanning tree.
42 The auxiliary files generated are <dumpbase>.gcno (at compile time)
43 and <dumpbase>.gcda (at run time). The format is
44 described in full in gcov-io.h. */
46 /* ??? Register allocation should use basic block execution counts to
47 give preference to the most commonly executed blocks. */
49 /* ??? Should calculate branch probabilities before instrumenting code, since
50 then we can use arc counts to help decide which arcs to instrument. */
52 #include "config.h"
53 #include "system.h"
54 #include "coretypes.h"
55 #include "tm.h"
56 #include "rtl.h"
57 #include "flags.h"
58 #include "regs.h"
59 #include "expr.h"
60 #include "function.h"
61 #include "basic-block.h"
62 #include "diagnostic-core.h"
63 #include "coverage.h"
64 #include "value-prof.h"
65 #include "tree.h"
66 #include "tree-flow.h"
67 #include "timevar.h"
68 #include "cfgloop.h"
69 #include "tree-pass.h"
71 #include "profile.h"
73 struct bb_info {
74 unsigned int count_valid : 1;
76 /* Number of successor and predecessor edges. */
77 gcov_type succ_count;
78 gcov_type pred_count;
81 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
84 /* Counter summary from the last set of coverage counts read. */
86 const struct gcov_ctr_summary *profile_info;
88 /* Collect statistics on the performance of this pass for the entire source
89 file. */
91 static int total_num_blocks;
92 static int total_num_edges;
93 static int total_num_edges_ignored;
94 static int total_num_edges_instrumented;
95 static int total_num_blocks_created;
96 static int total_num_passes;
97 static int total_num_times_called;
98 static int total_hist_br_prob[20];
99 static int total_num_branches;
101 /* Forward declarations. */
102 static void find_spanning_tree (struct edge_list *);
104 /* Add edge instrumentation code to the entire insn chain.
106 F is the first insn of the chain.
107 NUM_BLOCKS is the number of basic blocks found in F. */
109 static unsigned
110 instrument_edges (struct edge_list *el)
112 unsigned num_instr_edges = 0;
113 int num_edges = NUM_EDGES (el);
114 basic_block bb;
116 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
118 edge e;
119 edge_iterator ei;
121 FOR_EACH_EDGE (e, ei, bb->succs)
123 struct edge_info *inf = EDGE_INFO (e);
125 if (!inf->ignore && !inf->on_tree)
127 gcc_assert (!(e->flags & EDGE_ABNORMAL));
128 if (dump_file)
129 fprintf (dump_file, "Edge %d to %d instrumented%s\n",
130 e->src->index, e->dest->index,
131 EDGE_CRITICAL_P (e) ? " (and split)" : "");
132 gimple_gen_edge_profiler (num_instr_edges++, e);
137 total_num_blocks_created += num_edges;
138 if (dump_file)
139 fprintf (dump_file, "%d edges instrumented\n", num_instr_edges);
140 return num_instr_edges;
143 /* Add code to measure histograms for values in list VALUES. */
144 static void
145 instrument_values (histogram_values values)
147 unsigned i, t;
149 /* Emit code to generate the histograms before the insns. */
151 for (i = 0; i < VEC_length (histogram_value, values); i++)
153 histogram_value hist = VEC_index (histogram_value, values, i);
154 switch (hist->type)
156 case HIST_TYPE_INTERVAL:
157 t = GCOV_COUNTER_V_INTERVAL;
158 break;
160 case HIST_TYPE_POW2:
161 t = GCOV_COUNTER_V_POW2;
162 break;
164 case HIST_TYPE_SINGLE_VALUE:
165 t = GCOV_COUNTER_V_SINGLE;
166 break;
168 case HIST_TYPE_CONST_DELTA:
169 t = GCOV_COUNTER_V_DELTA;
170 break;
172 case HIST_TYPE_INDIR_CALL:
173 t = GCOV_COUNTER_V_INDIR;
174 break;
176 case HIST_TYPE_AVERAGE:
177 t = GCOV_COUNTER_AVERAGE;
178 break;
180 case HIST_TYPE_IOR:
181 t = GCOV_COUNTER_IOR;
182 break;
184 default:
185 gcc_unreachable ();
187 if (!coverage_counter_alloc (t, hist->n_counters))
188 continue;
190 switch (hist->type)
192 case HIST_TYPE_INTERVAL:
193 gimple_gen_interval_profiler (hist, t, 0);
194 break;
196 case HIST_TYPE_POW2:
197 gimple_gen_pow2_profiler (hist, t, 0);
198 break;
200 case HIST_TYPE_SINGLE_VALUE:
201 gimple_gen_one_value_profiler (hist, t, 0);
202 break;
204 case HIST_TYPE_CONST_DELTA:
205 gimple_gen_const_delta_profiler (hist, t, 0);
206 break;
208 case HIST_TYPE_INDIR_CALL:
209 gimple_gen_ic_profiler (hist, t, 0);
210 break;
212 case HIST_TYPE_AVERAGE:
213 gimple_gen_average_profiler (hist, t, 0);
214 break;
216 case HIST_TYPE_IOR:
217 gimple_gen_ior_profiler (hist, t, 0);
218 break;
220 default:
221 gcc_unreachable ();
227 /* Computes hybrid profile for all matching entries in da_file.
229 CFG_CHECKSUM is the precomputed checksum for the CFG. */
231 static gcov_type *
232 get_exec_counts (unsigned cfg_checksum, unsigned lineno_checksum)
234 unsigned num_edges = 0;
235 basic_block bb;
236 gcov_type *counts;
238 /* Count the edges to be (possibly) instrumented. */
239 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
241 edge e;
242 edge_iterator ei;
244 FOR_EACH_EDGE (e, ei, bb->succs)
245 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
246 num_edges++;
249 counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, cfg_checksum,
250 lineno_checksum, &profile_info);
251 if (!counts)
252 return NULL;
254 if (dump_file && profile_info)
255 fprintf(dump_file, "Merged %u profiles with maximal count %u.\n",
256 profile_info->runs, (unsigned) profile_info->sum_max);
258 return counts;
262 static bool
263 is_edge_inconsistent (VEC(edge,gc) *edges)
265 edge e;
266 edge_iterator ei;
267 FOR_EACH_EDGE (e, ei, edges)
269 if (!EDGE_INFO (e)->ignore)
271 if (e->count < 0
272 && (!(e->flags & EDGE_FAKE)
273 || !block_ends_with_call_p (e->src)))
275 if (dump_file)
277 fprintf (dump_file,
278 "Edge %i->%i is inconsistent, count"HOST_WIDEST_INT_PRINT_DEC,
279 e->src->index, e->dest->index, e->count);
280 dump_bb (e->src, dump_file, 0);
281 dump_bb (e->dest, dump_file, 0);
283 return true;
287 return false;
290 static void
291 correct_negative_edge_counts (void)
293 basic_block bb;
294 edge e;
295 edge_iterator ei;
297 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
299 FOR_EACH_EDGE (e, ei, bb->succs)
301 if (e->count < 0)
302 e->count = 0;
307 /* Check consistency.
308 Return true if inconsistency is found. */
309 static bool
310 is_inconsistent (void)
312 basic_block bb;
313 bool inconsistent = false;
314 FOR_EACH_BB (bb)
316 inconsistent |= is_edge_inconsistent (bb->preds);
317 if (!dump_file && inconsistent)
318 return true;
319 inconsistent |= is_edge_inconsistent (bb->succs);
320 if (!dump_file && inconsistent)
321 return true;
322 if (bb->count < 0)
324 if (dump_file)
326 fprintf (dump_file, "BB %i count is negative "
327 HOST_WIDEST_INT_PRINT_DEC,
328 bb->index,
329 bb->count);
330 dump_bb (bb, dump_file, 0);
332 inconsistent = true;
334 if (bb->count != sum_edge_counts (bb->preds))
336 if (dump_file)
338 fprintf (dump_file, "BB %i count does not match sum of incoming edges "
339 HOST_WIDEST_INT_PRINT_DEC" should be " HOST_WIDEST_INT_PRINT_DEC,
340 bb->index,
341 bb->count,
342 sum_edge_counts (bb->preds));
343 dump_bb (bb, dump_file, 0);
345 inconsistent = true;
347 if (bb->count != sum_edge_counts (bb->succs) &&
348 ! (find_edge (bb, EXIT_BLOCK_PTR) != NULL && block_ends_with_call_p (bb)))
350 if (dump_file)
352 fprintf (dump_file, "BB %i count does not match sum of outgoing edges "
353 HOST_WIDEST_INT_PRINT_DEC" should be " HOST_WIDEST_INT_PRINT_DEC,
354 bb->index,
355 bb->count,
356 sum_edge_counts (bb->succs));
357 dump_bb (bb, dump_file, 0);
359 inconsistent = true;
361 if (!dump_file && inconsistent)
362 return true;
365 return inconsistent;
368 /* Set each basic block count to the sum of its outgoing edge counts */
369 static void
370 set_bb_counts (void)
372 basic_block bb;
373 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
375 bb->count = sum_edge_counts (bb->succs);
376 gcc_assert (bb->count >= 0);
380 /* Reads profile data and returns total number of edge counts read */
381 static int
382 read_profile_edge_counts (gcov_type *exec_counts)
384 basic_block bb;
385 int num_edges = 0;
386 int exec_counts_pos = 0;
387 /* For each edge not on the spanning tree, set its execution count from
388 the .da file. */
389 /* The first count in the .da file is the number of times that the function
390 was entered. This is the exec_count for block zero. */
392 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
394 edge e;
395 edge_iterator ei;
397 FOR_EACH_EDGE (e, ei, bb->succs)
398 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
400 num_edges++;
401 if (exec_counts)
403 e->count = exec_counts[exec_counts_pos++];
404 if (e->count > profile_info->sum_max)
406 if (flag_profile_correction)
408 static bool informed = 0;
409 if (!informed)
410 inform (input_location,
411 "corrupted profile info: edge count exceeds maximal count");
412 informed = 1;
414 else
415 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
416 bb->index, e->dest->index);
419 else
420 e->count = 0;
422 EDGE_INFO (e)->count_valid = 1;
423 BB_INFO (bb)->succ_count--;
424 BB_INFO (e->dest)->pred_count--;
425 if (dump_file)
427 fprintf (dump_file, "\nRead edge from %i to %i, count:",
428 bb->index, e->dest->index);
429 fprintf (dump_file, HOST_WIDEST_INT_PRINT_DEC,
430 (HOST_WIDEST_INT) e->count);
435 return num_edges;
438 #define OVERLAP_BASE 10000
440 /* Compare the static estimated profile to the actual profile, and
441 return the "degree of overlap" measure between them.
443 Degree of overlap is a number between 0 and OVERLAP_BASE. It is
444 the sum of each basic block's minimum relative weights between
445 two profiles. And overlap of OVERLAP_BASE means two profiles are
446 identical. */
448 static int
449 compute_frequency_overlap (void)
451 gcov_type count_total = 0, freq_total = 0;
452 int overlap = 0;
453 basic_block bb;
455 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
457 count_total += bb->count;
458 freq_total += bb->frequency;
461 if (count_total == 0 || freq_total == 0)
462 return 0;
464 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
465 overlap += MIN (bb->count * OVERLAP_BASE / count_total,
466 bb->frequency * OVERLAP_BASE / freq_total);
468 return overlap;
471 /* Compute the branch probabilities for the various branches.
472 Annotate them accordingly.
474 CFG_CHECKSUM is the precomputed checksum for the CFG. */
476 static void
477 compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
479 basic_block bb;
480 int i;
481 int num_edges = 0;
482 int changes;
483 int passes;
484 int hist_br_prob[20];
485 int num_branches;
486 gcov_type *exec_counts = get_exec_counts (cfg_checksum, lineno_checksum);
487 int inconsistent = 0;
489 /* Very simple sanity checks so we catch bugs in our profiling code. */
490 if (!profile_info)
491 return;
492 if (profile_info->run_max * profile_info->runs < profile_info->sum_max)
494 error ("corrupted profile info: run_max * runs < sum_max");
495 exec_counts = NULL;
498 if (profile_info->sum_all < profile_info->sum_max)
500 error ("corrupted profile info: sum_all is smaller than sum_max");
501 exec_counts = NULL;
504 /* Attach extra info block to each bb. */
505 alloc_aux_for_blocks (sizeof (struct bb_info));
506 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
508 edge e;
509 edge_iterator ei;
511 FOR_EACH_EDGE (e, ei, bb->succs)
512 if (!EDGE_INFO (e)->ignore)
513 BB_INFO (bb)->succ_count++;
514 FOR_EACH_EDGE (e, ei, bb->preds)
515 if (!EDGE_INFO (e)->ignore)
516 BB_INFO (bb)->pred_count++;
519 /* Avoid predicting entry on exit nodes. */
520 BB_INFO (EXIT_BLOCK_PTR)->succ_count = 2;
521 BB_INFO (ENTRY_BLOCK_PTR)->pred_count = 2;
523 num_edges = read_profile_edge_counts (exec_counts);
525 if (dump_file)
526 fprintf (dump_file, "\n%d edge counts read\n", num_edges);
528 /* For every block in the file,
529 - if every exit/entrance edge has a known count, then set the block count
530 - if the block count is known, and every exit/entrance edge but one has
531 a known execution count, then set the count of the remaining edge
533 As edge counts are set, decrement the succ/pred count, but don't delete
534 the edge, that way we can easily tell when all edges are known, or only
535 one edge is unknown. */
537 /* The order that the basic blocks are iterated through is important.
538 Since the code that finds spanning trees starts with block 0, low numbered
539 edges are put on the spanning tree in preference to high numbered edges.
540 Hence, most instrumented edges are at the end. Graph solving works much
541 faster if we propagate numbers from the end to the start.
543 This takes an average of slightly more than 3 passes. */
545 changes = 1;
546 passes = 0;
547 while (changes)
549 passes++;
550 changes = 0;
551 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
553 struct bb_info *bi = BB_INFO (bb);
554 if (! bi->count_valid)
556 if (bi->succ_count == 0)
558 edge e;
559 edge_iterator ei;
560 gcov_type total = 0;
562 FOR_EACH_EDGE (e, ei, bb->succs)
563 total += e->count;
564 bb->count = total;
565 bi->count_valid = 1;
566 changes = 1;
568 else if (bi->pred_count == 0)
570 edge e;
571 edge_iterator ei;
572 gcov_type total = 0;
574 FOR_EACH_EDGE (e, ei, bb->preds)
575 total += e->count;
576 bb->count = total;
577 bi->count_valid = 1;
578 changes = 1;
581 if (bi->count_valid)
583 if (bi->succ_count == 1)
585 edge e;
586 edge_iterator ei;
587 gcov_type total = 0;
589 /* One of the counts will be invalid, but it is zero,
590 so adding it in also doesn't hurt. */
591 FOR_EACH_EDGE (e, ei, bb->succs)
592 total += e->count;
594 /* Search for the invalid edge, and set its count. */
595 FOR_EACH_EDGE (e, ei, bb->succs)
596 if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
597 break;
599 /* Calculate count for remaining edge by conservation. */
600 total = bb->count - total;
602 gcc_assert (e);
603 EDGE_INFO (e)->count_valid = 1;
604 e->count = total;
605 bi->succ_count--;
607 BB_INFO (e->dest)->pred_count--;
608 changes = 1;
610 if (bi->pred_count == 1)
612 edge e;
613 edge_iterator ei;
614 gcov_type total = 0;
616 /* One of the counts will be invalid, but it is zero,
617 so adding it in also doesn't hurt. */
618 FOR_EACH_EDGE (e, ei, bb->preds)
619 total += e->count;
621 /* Search for the invalid edge, and set its count. */
622 FOR_EACH_EDGE (e, ei, bb->preds)
623 if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
624 break;
626 /* Calculate count for remaining edge by conservation. */
627 total = bb->count - total + e->count;
629 gcc_assert (e);
630 EDGE_INFO (e)->count_valid = 1;
631 e->count = total;
632 bi->pred_count--;
634 BB_INFO (e->src)->succ_count--;
635 changes = 1;
640 if (dump_file)
642 int overlap = compute_frequency_overlap ();
643 dump_flow_info (dump_file, dump_flags);
644 fprintf (dump_file, "Static profile overlap: %d.%d%%\n",
645 overlap / (OVERLAP_BASE / 100),
646 overlap % (OVERLAP_BASE / 100));
649 total_num_passes += passes;
650 if (dump_file)
651 fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
653 /* If the graph has been correctly solved, every block will have a
654 succ and pred count of zero. */
655 FOR_EACH_BB (bb)
657 gcc_assert (!BB_INFO (bb)->succ_count && !BB_INFO (bb)->pred_count);
660 /* Check for inconsistent basic block counts */
661 inconsistent = is_inconsistent ();
663 if (inconsistent)
665 if (flag_profile_correction)
667 /* Inconsistency detected. Make it flow-consistent. */
668 static int informed = 0;
669 if (informed == 0)
671 informed = 1;
672 inform (input_location, "correcting inconsistent profile data");
674 correct_negative_edge_counts ();
675 /* Set bb counts to the sum of the outgoing edge counts */
676 set_bb_counts ();
677 if (dump_file)
678 fprintf (dump_file, "\nCalling mcf_smooth_cfg\n");
679 mcf_smooth_cfg ();
681 else
682 error ("corrupted profile info: profile data is not flow-consistent");
685 /* For every edge, calculate its branch probability and add a reg_note
686 to the branch insn to indicate this. */
688 for (i = 0; i < 20; i++)
689 hist_br_prob[i] = 0;
690 num_branches = 0;
692 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
694 edge e;
695 edge_iterator ei;
697 if (bb->count < 0)
699 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
700 bb->index, (int)bb->count);
701 bb->count = 0;
703 FOR_EACH_EDGE (e, ei, bb->succs)
705 /* Function may return twice in the cased the called function is
706 setjmp or calls fork, but we can't represent this by extra
707 edge from the entry, since extra edge from the exit is
708 already present. We get negative frequency from the entry
709 point. */
710 if ((e->count < 0
711 && e->dest == EXIT_BLOCK_PTR)
712 || (e->count > bb->count
713 && e->dest != EXIT_BLOCK_PTR))
715 if (block_ends_with_call_p (bb))
716 e->count = e->count < 0 ? 0 : bb->count;
718 if (e->count < 0 || e->count > bb->count)
720 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
721 e->src->index, e->dest->index,
722 (int)e->count);
723 e->count = bb->count / 2;
726 if (bb->count)
728 FOR_EACH_EDGE (e, ei, bb->succs)
729 e->probability = (e->count * REG_BR_PROB_BASE + bb->count / 2) / bb->count;
730 if (bb->index >= NUM_FIXED_BLOCKS
731 && block_ends_with_condjump_p (bb)
732 && EDGE_COUNT (bb->succs) >= 2)
734 int prob;
735 edge e;
736 int index;
738 /* Find the branch edge. It is possible that we do have fake
739 edges here. */
740 FOR_EACH_EDGE (e, ei, bb->succs)
741 if (!(e->flags & (EDGE_FAKE | EDGE_FALLTHRU)))
742 break;
744 prob = e->probability;
745 index = prob * 20 / REG_BR_PROB_BASE;
747 if (index == 20)
748 index = 19;
749 hist_br_prob[index]++;
751 num_branches++;
754 /* As a last resort, distribute the probabilities evenly.
755 Use simple heuristics that if there are normal edges,
756 give all abnormals frequency of 0, otherwise distribute the
757 frequency over abnormals (this is the case of noreturn
758 calls). */
759 else if (profile_status == PROFILE_ABSENT)
761 int total = 0;
763 FOR_EACH_EDGE (e, ei, bb->succs)
764 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
765 total ++;
766 if (total)
768 FOR_EACH_EDGE (e, ei, bb->succs)
769 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
770 e->probability = REG_BR_PROB_BASE / total;
771 else
772 e->probability = 0;
774 else
776 total += EDGE_COUNT (bb->succs);
777 FOR_EACH_EDGE (e, ei, bb->succs)
778 e->probability = REG_BR_PROB_BASE / total;
780 if (bb->index >= NUM_FIXED_BLOCKS
781 && block_ends_with_condjump_p (bb)
782 && EDGE_COUNT (bb->succs) >= 2)
783 num_branches++;
786 counts_to_freqs ();
787 profile_status = PROFILE_READ;
788 compute_function_frequency ();
790 if (dump_file)
792 fprintf (dump_file, "%d branches\n", num_branches);
793 if (num_branches)
794 for (i = 0; i < 10; i++)
795 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
796 (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
797 5 * i, 5 * i + 5);
799 total_num_branches += num_branches;
800 for (i = 0; i < 20; i++)
801 total_hist_br_prob[i] += hist_br_prob[i];
803 fputc ('\n', dump_file);
804 fputc ('\n', dump_file);
807 free_aux_for_blocks ();
810 /* Load value histograms values whose description is stored in VALUES array
811 from .gcda file.
813 CFG_CHECKSUM is the precomputed checksum for the CFG. */
815 static void
816 compute_value_histograms (histogram_values values, unsigned cfg_checksum,
817 unsigned lineno_checksum)
819 unsigned i, j, t, any;
820 unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS];
821 gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS];
822 gcov_type *act_count[GCOV_N_VALUE_COUNTERS];
823 gcov_type *aact_count;
825 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
826 n_histogram_counters[t] = 0;
828 for (i = 0; i < VEC_length (histogram_value, values); i++)
830 histogram_value hist = VEC_index (histogram_value, values, i);
831 n_histogram_counters[(int) hist->type] += hist->n_counters;
834 any = 0;
835 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
837 if (!n_histogram_counters[t])
839 histogram_counts[t] = NULL;
840 continue;
843 histogram_counts[t] =
844 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t),
845 n_histogram_counters[t], cfg_checksum,
846 lineno_checksum, NULL);
847 if (histogram_counts[t])
848 any = 1;
849 act_count[t] = histogram_counts[t];
851 if (!any)
852 return;
854 for (i = 0; i < VEC_length (histogram_value, values); i++)
856 histogram_value hist = VEC_index (histogram_value, values, i);
857 gimple stmt = hist->hvalue.stmt;
859 t = (int) hist->type;
861 aact_count = act_count[t];
862 act_count[t] += hist->n_counters;
864 gimple_add_histogram_value (cfun, stmt, hist);
865 hist->hvalue.counters = XNEWVEC (gcov_type, hist->n_counters);
866 for (j = 0; j < hist->n_counters; j++)
867 hist->hvalue.counters[j] = aact_count[j];
870 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
871 free (histogram_counts[t]);
874 /* The entry basic block will be moved around so that it has index=1,
875 there is nothing at index 0 and the exit is at n_basic_block. */
876 #define BB_TO_GCOV_INDEX(bb) ((bb)->index - 1)
877 /* When passed NULL as file_name, initialize.
878 When passed something else, output the necessary commands to change
879 line to LINE and offset to FILE_NAME. */
880 static void
881 output_location (char const *file_name, int line,
882 gcov_position_t *offset, basic_block bb)
884 static char const *prev_file_name;
885 static int prev_line;
886 bool name_differs, line_differs;
888 if (!file_name)
890 prev_file_name = NULL;
891 prev_line = -1;
892 return;
895 name_differs = !prev_file_name || filename_cmp (file_name, prev_file_name);
896 line_differs = prev_line != line;
898 if (name_differs || line_differs)
900 if (!*offset)
902 *offset = gcov_write_tag (GCOV_TAG_LINES);
903 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
904 name_differs = line_differs=true;
907 /* If this is a new source file, then output the
908 file's name to the .bb file. */
909 if (name_differs)
911 prev_file_name = file_name;
912 gcov_write_unsigned (0);
913 gcov_write_string (prev_file_name);
915 if (line_differs)
917 gcov_write_unsigned (line);
918 prev_line = line;
923 /* Instrument and/or analyze program behavior based on program flow graph.
924 In either case, this function builds a flow graph for the function being
925 compiled. The flow graph is stored in BB_GRAPH.
927 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
928 the flow graph that are needed to reconstruct the dynamic behavior of the
929 flow graph.
931 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
932 information from a data file containing edge count information from previous
933 executions of the function being compiled. In this case, the flow graph is
934 annotated with actual execution counts, which are later propagated into the
935 rtl for optimization purposes.
937 Main entry point of this file. */
939 void
940 branch_prob (void)
942 basic_block bb;
943 unsigned i;
944 unsigned num_edges, ignored_edges;
945 unsigned num_instrumented;
946 struct edge_list *el;
947 histogram_values values = NULL;
948 unsigned cfg_checksum, lineno_checksum;
950 total_num_times_called++;
952 flow_call_edges_add (NULL);
953 add_noreturn_fake_exit_edges ();
955 /* We can't handle cyclic regions constructed using abnormal edges.
956 To avoid these we replace every source of abnormal edge by a fake
957 edge from entry node and every destination by fake edge to exit.
958 This keeps graph acyclic and our calculation exact for all normal
959 edges except for exit and entrance ones.
961 We also add fake exit edges for each call and asm statement in the
962 basic, since it may not return. */
964 FOR_EACH_BB (bb)
966 int need_exit_edge = 0, need_entry_edge = 0;
967 int have_exit_edge = 0, have_entry_edge = 0;
968 edge e;
969 edge_iterator ei;
971 /* Functions returning multiple times are not handled by extra edges.
972 Instead we simply allow negative counts on edges from exit to the
973 block past call and corresponding probabilities. We can't go
974 with the extra edges because that would result in flowgraph that
975 needs to have fake edges outside the spanning tree. */
977 FOR_EACH_EDGE (e, ei, bb->succs)
979 gimple_stmt_iterator gsi;
980 gimple last = NULL;
982 /* It may happen that there are compiler generated statements
983 without a locus at all. Go through the basic block from the
984 last to the first statement looking for a locus. */
985 for (gsi = gsi_last_nondebug_bb (bb);
986 !gsi_end_p (gsi);
987 gsi_prev_nondebug (&gsi))
989 last = gsi_stmt (gsi);
990 if (gimple_has_location (last))
991 break;
994 /* Edge with goto locus might get wrong coverage info unless
995 it is the only edge out of BB.
996 Don't do that when the locuses match, so
997 if (blah) goto something;
998 is not computed twice. */
999 if (last
1000 && gimple_has_location (last)
1001 && e->goto_locus != UNKNOWN_LOCATION
1002 && !single_succ_p (bb)
1003 && (LOCATION_FILE (e->goto_locus)
1004 != LOCATION_FILE (gimple_location (last))
1005 || (LOCATION_LINE (e->goto_locus)
1006 != LOCATION_LINE (gimple_location (last)))))
1008 basic_block new_bb = split_edge (e);
1009 edge ne = single_succ_edge (new_bb);
1010 ne->goto_locus = e->goto_locus;
1011 ne->goto_block = e->goto_block;
1013 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1014 && e->dest != EXIT_BLOCK_PTR)
1015 need_exit_edge = 1;
1016 if (e->dest == EXIT_BLOCK_PTR)
1017 have_exit_edge = 1;
1019 FOR_EACH_EDGE (e, ei, bb->preds)
1021 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1022 && e->src != ENTRY_BLOCK_PTR)
1023 need_entry_edge = 1;
1024 if (e->src == ENTRY_BLOCK_PTR)
1025 have_entry_edge = 1;
1028 if (need_exit_edge && !have_exit_edge)
1030 if (dump_file)
1031 fprintf (dump_file, "Adding fake exit edge to bb %i\n",
1032 bb->index);
1033 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
1035 if (need_entry_edge && !have_entry_edge)
1037 if (dump_file)
1038 fprintf (dump_file, "Adding fake entry edge to bb %i\n",
1039 bb->index);
1040 make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FAKE);
1041 /* Avoid bbs that have both fake entry edge and also some
1042 exit edge. One of those edges wouldn't be added to the
1043 spanning tree, but we can't instrument any of them. */
1044 if (have_exit_edge || need_exit_edge)
1046 gimple_stmt_iterator gsi;
1047 gimple first;
1048 tree fndecl;
1050 gsi = gsi_after_labels (bb);
1051 gcc_checking_assert (!gsi_end_p (gsi));
1052 first = gsi_stmt (gsi);
1053 if (is_gimple_debug (first))
1055 gsi_next_nondebug (&gsi);
1056 gcc_checking_assert (!gsi_end_p (gsi));
1057 first = gsi_stmt (gsi);
1059 /* Don't split the bbs containing __builtin_setjmp_receiver
1060 or __builtin_setjmp_dispatcher calls. These are very
1061 special and don't expect anything to be inserted before
1062 them. */
1063 if (!is_gimple_call (first)
1064 || (fndecl = gimple_call_fndecl (first)) == NULL
1065 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL
1066 || (DECL_FUNCTION_CODE (fndecl) != BUILT_IN_SETJMP_RECEIVER
1067 && (DECL_FUNCTION_CODE (fndecl)
1068 != BUILT_IN_SETJMP_DISPATCHER)))
1070 if (dump_file)
1071 fprintf (dump_file, "Splitting bb %i after labels\n",
1072 bb->index);
1073 split_block_after_labels (bb);
1079 el = create_edge_list ();
1080 num_edges = NUM_EDGES (el);
1081 alloc_aux_for_edges (sizeof (struct edge_info));
1083 /* The basic blocks are expected to be numbered sequentially. */
1084 compact_blocks ();
1086 ignored_edges = 0;
1087 for (i = 0 ; i < num_edges ; i++)
1089 edge e = INDEX_EDGE (el, i);
1090 e->count = 0;
1092 /* Mark edges we've replaced by fake edges above as ignored. */
1093 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1094 && e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR)
1096 EDGE_INFO (e)->ignore = 1;
1097 ignored_edges++;
1101 /* Create spanning tree from basic block graph, mark each edge that is
1102 on the spanning tree. We insert as many abnormal and critical edges
1103 as possible to minimize number of edge splits necessary. */
1105 find_spanning_tree (el);
1107 /* Fake edges that are not on the tree will not be instrumented, so
1108 mark them ignored. */
1109 for (num_instrumented = i = 0; i < num_edges; i++)
1111 edge e = INDEX_EDGE (el, i);
1112 struct edge_info *inf = EDGE_INFO (e);
1114 if (inf->ignore || inf->on_tree)
1115 /*NOP*/;
1116 else if (e->flags & EDGE_FAKE)
1118 inf->ignore = 1;
1119 ignored_edges++;
1121 else
1122 num_instrumented++;
1125 total_num_blocks += n_basic_blocks;
1126 if (dump_file)
1127 fprintf (dump_file, "%d basic blocks\n", n_basic_blocks);
1129 total_num_edges += num_edges;
1130 if (dump_file)
1131 fprintf (dump_file, "%d edges\n", num_edges);
1133 total_num_edges_ignored += ignored_edges;
1134 if (dump_file)
1135 fprintf (dump_file, "%d ignored edges\n", ignored_edges);
1138 /* Compute two different checksums. Note that we want to compute
1139 the checksum in only once place, since it depends on the shape
1140 of the control flow which can change during
1141 various transformations. */
1142 cfg_checksum = coverage_compute_cfg_checksum ();
1143 lineno_checksum = coverage_compute_lineno_checksum ();
1145 /* Write the data from which gcov can reconstruct the basic block
1146 graph and function line numbers */
1148 if (coverage_begin_function (lineno_checksum, cfg_checksum))
1150 gcov_position_t offset;
1152 /* Basic block flags */
1153 offset = gcov_write_tag (GCOV_TAG_BLOCKS);
1154 for (i = 0; i != (unsigned) (n_basic_blocks); i++)
1155 gcov_write_unsigned (0);
1156 gcov_write_length (offset);
1158 /* Keep all basic block indexes nonnegative in the gcov output.
1159 Index 0 is used for entry block, last index is for exit
1160 block. */
1161 ENTRY_BLOCK_PTR->index = 1;
1162 EXIT_BLOCK_PTR->index = last_basic_block;
1164 /* Arcs */
1165 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1167 edge e;
1168 edge_iterator ei;
1170 offset = gcov_write_tag (GCOV_TAG_ARCS);
1171 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
1173 FOR_EACH_EDGE (e, ei, bb->succs)
1175 struct edge_info *i = EDGE_INFO (e);
1176 if (!i->ignore)
1178 unsigned flag_bits = 0;
1180 if (i->on_tree)
1181 flag_bits |= GCOV_ARC_ON_TREE;
1182 if (e->flags & EDGE_FAKE)
1183 flag_bits |= GCOV_ARC_FAKE;
1184 if (e->flags & EDGE_FALLTHRU)
1185 flag_bits |= GCOV_ARC_FALLTHROUGH;
1186 /* On trees we don't have fallthru flags, but we can
1187 recompute them from CFG shape. */
1188 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)
1189 && e->src->next_bb == e->dest)
1190 flag_bits |= GCOV_ARC_FALLTHROUGH;
1192 gcov_write_unsigned (BB_TO_GCOV_INDEX (e->dest));
1193 gcov_write_unsigned (flag_bits);
1197 gcov_write_length (offset);
1200 ENTRY_BLOCK_PTR->index = ENTRY_BLOCK;
1201 EXIT_BLOCK_PTR->index = EXIT_BLOCK;
1203 /* Line numbers. */
1204 /* Initialize the output. */
1205 output_location (NULL, 0, NULL, NULL);
1207 FOR_EACH_BB (bb)
1209 gimple_stmt_iterator gsi;
1210 gcov_position_t offset = 0;
1212 if (bb == ENTRY_BLOCK_PTR->next_bb)
1214 expanded_location curr_location =
1215 expand_location (DECL_SOURCE_LOCATION (current_function_decl));
1216 output_location (curr_location.file, curr_location.line,
1217 &offset, bb);
1220 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1222 gimple stmt = gsi_stmt (gsi);
1223 if (gimple_has_location (stmt))
1224 output_location (gimple_filename (stmt), gimple_lineno (stmt),
1225 &offset, bb);
1228 /* Notice GOTO expressions eliminated while constructing the CFG. */
1229 if (single_succ_p (bb)
1230 && single_succ_edge (bb)->goto_locus != UNKNOWN_LOCATION)
1232 expanded_location curr_location
1233 = expand_location (single_succ_edge (bb)->goto_locus);
1234 output_location (curr_location.file, curr_location.line,
1235 &offset, bb);
1238 if (offset)
1240 /* A file of NULL indicates the end of run. */
1241 gcov_write_unsigned (0);
1242 gcov_write_string (NULL);
1243 gcov_write_length (offset);
1248 #undef BB_TO_GCOV_INDEX
1250 if (flag_profile_values)
1251 gimple_find_values_to_profile (&values);
1253 if (flag_branch_probabilities)
1255 compute_branch_probabilities (cfg_checksum, lineno_checksum);
1256 if (flag_profile_values)
1257 compute_value_histograms (values, cfg_checksum, lineno_checksum);
1260 remove_fake_edges ();
1262 /* For each edge not on the spanning tree, add counting code. */
1263 if (profile_arc_flag
1264 && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
1266 unsigned n_instrumented;
1268 gimple_init_edge_profiler ();
1270 n_instrumented = instrument_edges (el);
1272 gcc_assert (n_instrumented == num_instrumented);
1274 if (flag_profile_values)
1275 instrument_values (values);
1277 /* Commit changes done by instrumentation. */
1278 gsi_commit_edge_inserts ();
1281 free_aux_for_edges ();
1283 VEC_free (histogram_value, heap, values);
1284 free_edge_list (el);
1285 coverage_end_function (lineno_checksum, cfg_checksum);
1288 /* Union find algorithm implementation for the basic blocks using
1289 aux fields. */
1291 static basic_block
1292 find_group (basic_block bb)
1294 basic_block group = bb, bb1;
1296 while ((basic_block) group->aux != group)
1297 group = (basic_block) group->aux;
1299 /* Compress path. */
1300 while ((basic_block) bb->aux != group)
1302 bb1 = (basic_block) bb->aux;
1303 bb->aux = (void *) group;
1304 bb = bb1;
1306 return group;
1309 static void
1310 union_groups (basic_block bb1, basic_block bb2)
1312 basic_block bb1g = find_group (bb1);
1313 basic_block bb2g = find_group (bb2);
1315 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1316 this code is unlikely going to be performance problem anyway. */
1317 gcc_assert (bb1g != bb2g);
1319 bb1g->aux = bb2g;
1322 /* This function searches all of the edges in the program flow graph, and puts
1323 as many bad edges as possible onto the spanning tree. Bad edges include
1324 abnormals edges, which can't be instrumented at the moment. Since it is
1325 possible for fake edges to form a cycle, we will have to develop some
1326 better way in the future. Also put critical edges to the tree, since they
1327 are more expensive to instrument. */
1329 static void
1330 find_spanning_tree (struct edge_list *el)
1332 int i;
1333 int num_edges = NUM_EDGES (el);
1334 basic_block bb;
1336 /* We use aux field for standard union-find algorithm. */
1337 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1338 bb->aux = bb;
1340 /* Add fake edge exit to entry we can't instrument. */
1341 union_groups (EXIT_BLOCK_PTR, ENTRY_BLOCK_PTR);
1343 /* First add all abnormal edges to the tree unless they form a cycle. Also
1344 add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
1345 setting return value from function. */
1346 for (i = 0; i < num_edges; i++)
1348 edge e = INDEX_EDGE (el, i);
1349 if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
1350 || e->dest == EXIT_BLOCK_PTR)
1351 && !EDGE_INFO (e)->ignore
1352 && (find_group (e->src) != find_group (e->dest)))
1354 if (dump_file)
1355 fprintf (dump_file, "Abnormal edge %d to %d put to tree\n",
1356 e->src->index, e->dest->index);
1357 EDGE_INFO (e)->on_tree = 1;
1358 union_groups (e->src, e->dest);
1362 /* Now insert all critical edges to the tree unless they form a cycle. */
1363 for (i = 0; i < num_edges; i++)
1365 edge e = INDEX_EDGE (el, i);
1366 if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore
1367 && find_group (e->src) != find_group (e->dest))
1369 if (dump_file)
1370 fprintf (dump_file, "Critical edge %d to %d put to tree\n",
1371 e->src->index, e->dest->index);
1372 EDGE_INFO (e)->on_tree = 1;
1373 union_groups (e->src, e->dest);
1377 /* And now the rest. */
1378 for (i = 0; i < num_edges; i++)
1380 edge e = INDEX_EDGE (el, i);
1381 if (!EDGE_INFO (e)->ignore
1382 && find_group (e->src) != find_group (e->dest))
1384 if (dump_file)
1385 fprintf (dump_file, "Normal edge %d to %d put to tree\n",
1386 e->src->index, e->dest->index);
1387 EDGE_INFO (e)->on_tree = 1;
1388 union_groups (e->src, e->dest);
1392 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1393 bb->aux = NULL;
1396 /* Perform file-level initialization for branch-prob processing. */
1398 void
1399 init_branch_prob (void)
1401 int i;
1403 total_num_blocks = 0;
1404 total_num_edges = 0;
1405 total_num_edges_ignored = 0;
1406 total_num_edges_instrumented = 0;
1407 total_num_blocks_created = 0;
1408 total_num_passes = 0;
1409 total_num_times_called = 0;
1410 total_num_branches = 0;
1411 for (i = 0; i < 20; i++)
1412 total_hist_br_prob[i] = 0;
1415 /* Performs file-level cleanup after branch-prob processing
1416 is completed. */
1418 void
1419 end_branch_prob (void)
1421 if (dump_file)
1423 fprintf (dump_file, "\n");
1424 fprintf (dump_file, "Total number of blocks: %d\n",
1425 total_num_blocks);
1426 fprintf (dump_file, "Total number of edges: %d\n", total_num_edges);
1427 fprintf (dump_file, "Total number of ignored edges: %d\n",
1428 total_num_edges_ignored);
1429 fprintf (dump_file, "Total number of instrumented edges: %d\n",
1430 total_num_edges_instrumented);
1431 fprintf (dump_file, "Total number of blocks created: %d\n",
1432 total_num_blocks_created);
1433 fprintf (dump_file, "Total number of graph solution passes: %d\n",
1434 total_num_passes);
1435 if (total_num_times_called != 0)
1436 fprintf (dump_file, "Average number of graph solution passes: %d\n",
1437 (total_num_passes + (total_num_times_called >> 1))
1438 / total_num_times_called);
1439 fprintf (dump_file, "Total number of branches: %d\n",
1440 total_num_branches);
1441 if (total_num_branches)
1443 int i;
1445 for (i = 0; i < 10; i++)
1446 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1447 (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1448 / total_num_branches, 5*i, 5*i+5);