1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
6 based on some ideas from Dain Samples of UC Berkeley.
7 Further mangling by Bob Manson, Cygnus Support.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 /* Generate basic block profile instrumentation and auxiliary files.
26 Profile generation is optimized, so that not all arcs in the basic
27 block graph need instrumenting. First, the BB graph is closed with
28 one entry (function start), and one exit (function exit). Any
29 ABNORMAL_EDGE cannot be instrumented (because there is no control
30 path to place the code). We close the graph by inserting fake
31 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
32 edges that do not go to the exit_block. We ignore such abnormal
33 edges. Naturally these fake edges are never directly traversed,
34 and so *cannot* be directly instrumented. Some other graph
35 massaging is done. To optimize the instrumentation we generate the
36 BB minimal span tree, only edges that are not on the span tree
37 (plus the entry point) need instrumenting. From that information
38 all other edge counts can be deduced. By construction all fake
39 edges must be on the spanning tree. We also attempt to place
40 EDGE_CRITICAL edges on the spanning tree.
42 The auxiliary files generated are <dumpbase>.gcno (at compile time)
43 and <dumpbase>.gcda (at run time). The format is
44 described in full in gcov-io.h. */
46 /* ??? Register allocation should use basic block execution counts to
47 give preference to the most commonly executed blocks. */
49 /* ??? Should calculate branch probabilities before instrumenting code, since
50 then we can use arc counts to help decide which arcs to instrument. */
54 #include "coretypes.h"
62 #include "basic-block.h"
63 #include "diagnostic-core.h"
65 #include "value-prof.h"
68 #include "tree-flow.h"
71 #include "tree-pass.h"
76 unsigned int count_valid
: 1;
78 /* Number of successor and predecessor edges. */
83 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
86 /* Counter summary from the last set of coverage counts read. */
88 const struct gcov_ctr_summary
*profile_info
;
90 /* Collect statistics on the performance of this pass for the entire source
93 static int total_num_blocks
;
94 static int total_num_edges
;
95 static int total_num_edges_ignored
;
96 static int total_num_edges_instrumented
;
97 static int total_num_blocks_created
;
98 static int total_num_passes
;
99 static int total_num_times_called
;
100 static int total_hist_br_prob
[20];
101 static int total_num_branches
;
103 /* Forward declarations. */
104 static void find_spanning_tree (struct edge_list
*);
106 /* Add edge instrumentation code to the entire insn chain.
108 F is the first insn of the chain.
109 NUM_BLOCKS is the number of basic blocks found in F. */
112 instrument_edges (struct edge_list
*el
)
114 unsigned num_instr_edges
= 0;
115 int num_edges
= NUM_EDGES (el
);
118 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
123 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
125 struct edge_info
*inf
= EDGE_INFO (e
);
127 if (!inf
->ignore
&& !inf
->on_tree
)
129 gcc_assert (!(e
->flags
& EDGE_ABNORMAL
));
131 fprintf (dump_file
, "Edge %d to %d instrumented%s\n",
132 e
->src
->index
, e
->dest
->index
,
133 EDGE_CRITICAL_P (e
) ? " (and split)" : "");
134 gimple_gen_edge_profiler (num_instr_edges
++, e
);
139 total_num_blocks_created
+= num_edges
;
141 fprintf (dump_file
, "%d edges instrumented\n", num_instr_edges
);
142 return num_instr_edges
;
145 /* Add code to measure histograms for values in list VALUES. */
147 instrument_values (histogram_values values
)
151 /* Emit code to generate the histograms before the insns. */
153 for (i
= 0; i
< VEC_length (histogram_value
, values
); i
++)
155 histogram_value hist
= VEC_index (histogram_value
, values
, i
);
158 case HIST_TYPE_INTERVAL
:
159 t
= GCOV_COUNTER_V_INTERVAL
;
163 t
= GCOV_COUNTER_V_POW2
;
166 case HIST_TYPE_SINGLE_VALUE
:
167 t
= GCOV_COUNTER_V_SINGLE
;
170 case HIST_TYPE_CONST_DELTA
:
171 t
= GCOV_COUNTER_V_DELTA
;
174 case HIST_TYPE_INDIR_CALL
:
175 t
= GCOV_COUNTER_V_INDIR
;
178 case HIST_TYPE_AVERAGE
:
179 t
= GCOV_COUNTER_AVERAGE
;
183 t
= GCOV_COUNTER_IOR
;
189 if (!coverage_counter_alloc (t
, hist
->n_counters
))
194 case HIST_TYPE_INTERVAL
:
195 gimple_gen_interval_profiler (hist
, t
, 0);
199 gimple_gen_pow2_profiler (hist
, t
, 0);
202 case HIST_TYPE_SINGLE_VALUE
:
203 gimple_gen_one_value_profiler (hist
, t
, 0);
206 case HIST_TYPE_CONST_DELTA
:
207 gimple_gen_const_delta_profiler (hist
, t
, 0);
210 case HIST_TYPE_INDIR_CALL
:
211 gimple_gen_ic_profiler (hist
, t
, 0);
214 case HIST_TYPE_AVERAGE
:
215 gimple_gen_average_profiler (hist
, t
, 0);
219 gimple_gen_ior_profiler (hist
, t
, 0);
229 /* Computes hybrid profile for all matching entries in da_file.
231 CFG_CHECKSUM is the precomputed checksum for the CFG. */
234 get_exec_counts (unsigned cfg_checksum
, unsigned lineno_checksum
)
236 unsigned num_edges
= 0;
240 /* Count the edges to be (possibly) instrumented. */
241 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
246 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
247 if (!EDGE_INFO (e
)->ignore
&& !EDGE_INFO (e
)->on_tree
)
251 counts
= get_coverage_counts (GCOV_COUNTER_ARCS
, num_edges
, cfg_checksum
,
252 lineno_checksum
, &profile_info
);
256 if (dump_file
&& profile_info
)
257 fprintf(dump_file
, "Merged %u profiles with maximal count %u.\n",
258 profile_info
->runs
, (unsigned) profile_info
->sum_max
);
265 is_edge_inconsistent (VEC(edge
,gc
) *edges
)
269 FOR_EACH_EDGE (e
, ei
, edges
)
271 if (!EDGE_INFO (e
)->ignore
)
274 && (!(e
->flags
& EDGE_FAKE
)
275 || !block_ends_with_call_p (e
->src
)))
280 "Edge %i->%i is inconsistent, count"HOST_WIDEST_INT_PRINT_DEC
,
281 e
->src
->index
, e
->dest
->index
, e
->count
);
282 dump_bb (e
->src
, dump_file
, 0);
283 dump_bb (e
->dest
, dump_file
, 0);
293 correct_negative_edge_counts (void)
299 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
301 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
309 /* Check consistency.
310 Return true if inconsistency is found. */
312 is_inconsistent (void)
315 bool inconsistent
= false;
318 inconsistent
|= is_edge_inconsistent (bb
->preds
);
319 if (!dump_file
&& inconsistent
)
321 inconsistent
|= is_edge_inconsistent (bb
->succs
);
322 if (!dump_file
&& inconsistent
)
328 fprintf (dump_file
, "BB %i count is negative "
329 HOST_WIDEST_INT_PRINT_DEC
,
332 dump_bb (bb
, dump_file
, 0);
336 if (bb
->count
!= sum_edge_counts (bb
->preds
))
340 fprintf (dump_file
, "BB %i count does not match sum of incoming edges "
341 HOST_WIDEST_INT_PRINT_DEC
" should be " HOST_WIDEST_INT_PRINT_DEC
,
344 sum_edge_counts (bb
->preds
));
345 dump_bb (bb
, dump_file
, 0);
349 if (bb
->count
!= sum_edge_counts (bb
->succs
) &&
350 ! (find_edge (bb
, EXIT_BLOCK_PTR
) != NULL
&& block_ends_with_call_p (bb
)))
354 fprintf (dump_file
, "BB %i count does not match sum of outgoing edges "
355 HOST_WIDEST_INT_PRINT_DEC
" should be " HOST_WIDEST_INT_PRINT_DEC
,
358 sum_edge_counts (bb
->succs
));
359 dump_bb (bb
, dump_file
, 0);
363 if (!dump_file
&& inconsistent
)
370 /* Set each basic block count to the sum of its outgoing edge counts */
375 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
377 bb
->count
= sum_edge_counts (bb
->succs
);
378 gcc_assert (bb
->count
>= 0);
382 /* Reads profile data and returns total number of edge counts read */
384 read_profile_edge_counts (gcov_type
*exec_counts
)
388 int exec_counts_pos
= 0;
389 /* For each edge not on the spanning tree, set its execution count from
391 /* The first count in the .da file is the number of times that the function
392 was entered. This is the exec_count for block zero. */
394 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
399 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
400 if (!EDGE_INFO (e
)->ignore
&& !EDGE_INFO (e
)->on_tree
)
405 e
->count
= exec_counts
[exec_counts_pos
++];
406 if (e
->count
> profile_info
->sum_max
)
408 if (flag_profile_correction
)
410 static bool informed
= 0;
412 inform (input_location
,
413 "corrupted profile info: edge count exceeds maximal count");
417 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
418 bb
->index
, e
->dest
->index
);
424 EDGE_INFO (e
)->count_valid
= 1;
425 BB_INFO (bb
)->succ_count
--;
426 BB_INFO (e
->dest
)->pred_count
--;
429 fprintf (dump_file
, "\nRead edge from %i to %i, count:",
430 bb
->index
, e
->dest
->index
);
431 fprintf (dump_file
, HOST_WIDEST_INT_PRINT_DEC
,
432 (HOST_WIDEST_INT
) e
->count
);
440 /* Compute the branch probabilities for the various branches.
441 Annotate them accordingly.
443 CFG_CHECKSUM is the precomputed checksum for the CFG. */
446 compute_branch_probabilities (unsigned cfg_checksum
, unsigned lineno_checksum
)
453 int hist_br_prob
[20];
455 gcov_type
*exec_counts
= get_exec_counts (cfg_checksum
, lineno_checksum
);
456 int inconsistent
= 0;
458 /* Very simple sanity checks so we catch bugs in our profiling code. */
461 if (profile_info
->run_max
* profile_info
->runs
< profile_info
->sum_max
)
463 error ("corrupted profile info: run_max * runs < sum_max");
467 if (profile_info
->sum_all
< profile_info
->sum_max
)
469 error ("corrupted profile info: sum_all is smaller than sum_max");
473 /* Attach extra info block to each bb. */
474 alloc_aux_for_blocks (sizeof (struct bb_info
));
475 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
480 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
481 if (!EDGE_INFO (e
)->ignore
)
482 BB_INFO (bb
)->succ_count
++;
483 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
484 if (!EDGE_INFO (e
)->ignore
)
485 BB_INFO (bb
)->pred_count
++;
488 /* Avoid predicting entry on exit nodes. */
489 BB_INFO (EXIT_BLOCK_PTR
)->succ_count
= 2;
490 BB_INFO (ENTRY_BLOCK_PTR
)->pred_count
= 2;
492 num_edges
= read_profile_edge_counts (exec_counts
);
495 fprintf (dump_file
, "\n%d edge counts read\n", num_edges
);
497 /* For every block in the file,
498 - if every exit/entrance edge has a known count, then set the block count
499 - if the block count is known, and every exit/entrance edge but one has
500 a known execution count, then set the count of the remaining edge
502 As edge counts are set, decrement the succ/pred count, but don't delete
503 the edge, that way we can easily tell when all edges are known, or only
504 one edge is unknown. */
506 /* The order that the basic blocks are iterated through is important.
507 Since the code that finds spanning trees starts with block 0, low numbered
508 edges are put on the spanning tree in preference to high numbered edges.
509 Hence, most instrumented edges are at the end. Graph solving works much
510 faster if we propagate numbers from the end to the start.
512 This takes an average of slightly more than 3 passes. */
520 FOR_BB_BETWEEN (bb
, EXIT_BLOCK_PTR
, NULL
, prev_bb
)
522 struct bb_info
*bi
= BB_INFO (bb
);
523 if (! bi
->count_valid
)
525 if (bi
->succ_count
== 0)
531 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
537 else if (bi
->pred_count
== 0)
543 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
552 if (bi
->succ_count
== 1)
558 /* One of the counts will be invalid, but it is zero,
559 so adding it in also doesn't hurt. */
560 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
563 /* Search for the invalid edge, and set its count. */
564 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
565 if (! EDGE_INFO (e
)->count_valid
&& ! EDGE_INFO (e
)->ignore
)
568 /* Calculate count for remaining edge by conservation. */
569 total
= bb
->count
- total
;
572 EDGE_INFO (e
)->count_valid
= 1;
576 BB_INFO (e
->dest
)->pred_count
--;
579 if (bi
->pred_count
== 1)
585 /* One of the counts will be invalid, but it is zero,
586 so adding it in also doesn't hurt. */
587 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
590 /* Search for the invalid edge, and set its count. */
591 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
592 if (!EDGE_INFO (e
)->count_valid
&& !EDGE_INFO (e
)->ignore
)
595 /* Calculate count for remaining edge by conservation. */
596 total
= bb
->count
- total
+ e
->count
;
599 EDGE_INFO (e
)->count_valid
= 1;
603 BB_INFO (e
->src
)->succ_count
--;
610 dump_flow_info (dump_file
, dump_flags
);
612 total_num_passes
+= passes
;
614 fprintf (dump_file
, "Graph solving took %d passes.\n\n", passes
);
616 /* If the graph has been correctly solved, every block will have a
617 succ and pred count of zero. */
620 gcc_assert (!BB_INFO (bb
)->succ_count
&& !BB_INFO (bb
)->pred_count
);
623 /* Check for inconsistent basic block counts */
624 inconsistent
= is_inconsistent ();
628 if (flag_profile_correction
)
630 /* Inconsistency detected. Make it flow-consistent. */
631 static int informed
= 0;
635 inform (input_location
, "correcting inconsistent profile data");
637 correct_negative_edge_counts ();
638 /* Set bb counts to the sum of the outgoing edge counts */
641 fprintf (dump_file
, "\nCalling mcf_smooth_cfg\n");
645 error ("corrupted profile info: profile data is not flow-consistent");
648 /* For every edge, calculate its branch probability and add a reg_note
649 to the branch insn to indicate this. */
651 for (i
= 0; i
< 20; i
++)
655 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
662 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
663 bb
->index
, (int)bb
->count
);
666 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
668 /* Function may return twice in the cased the called function is
669 setjmp or calls fork, but we can't represent this by extra
670 edge from the entry, since extra edge from the exit is
671 already present. We get negative frequency from the entry
674 && e
->dest
== EXIT_BLOCK_PTR
)
675 || (e
->count
> bb
->count
676 && e
->dest
!= EXIT_BLOCK_PTR
))
678 if (block_ends_with_call_p (bb
))
679 e
->count
= e
->count
< 0 ? 0 : bb
->count
;
681 if (e
->count
< 0 || e
->count
> bb
->count
)
683 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
684 e
->src
->index
, e
->dest
->index
,
686 e
->count
= bb
->count
/ 2;
691 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
692 e
->probability
= (e
->count
* REG_BR_PROB_BASE
+ bb
->count
/ 2) / bb
->count
;
693 if (bb
->index
>= NUM_FIXED_BLOCKS
694 && block_ends_with_condjump_p (bb
)
695 && EDGE_COUNT (bb
->succs
) >= 2)
701 /* Find the branch edge. It is possible that we do have fake
703 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
704 if (!(e
->flags
& (EDGE_FAKE
| EDGE_FALLTHRU
)))
707 prob
= e
->probability
;
708 index
= prob
* 20 / REG_BR_PROB_BASE
;
712 hist_br_prob
[index
]++;
717 /* As a last resort, distribute the probabilities evenly.
718 Use simple heuristics that if there are normal edges,
719 give all abnormals frequency of 0, otherwise distribute the
720 frequency over abnormals (this is the case of noreturn
722 else if (profile_status
== PROFILE_ABSENT
)
726 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
727 if (!(e
->flags
& (EDGE_COMPLEX
| EDGE_FAKE
)))
731 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
732 if (!(e
->flags
& (EDGE_COMPLEX
| EDGE_FAKE
)))
733 e
->probability
= REG_BR_PROB_BASE
/ total
;
739 total
+= EDGE_COUNT (bb
->succs
);
740 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
741 e
->probability
= REG_BR_PROB_BASE
/ total
;
743 if (bb
->index
>= NUM_FIXED_BLOCKS
744 && block_ends_with_condjump_p (bb
)
745 && EDGE_COUNT (bb
->succs
) >= 2)
750 profile_status
= PROFILE_READ
;
754 fprintf (dump_file
, "%d branches\n", num_branches
);
756 for (i
= 0; i
< 10; i
++)
757 fprintf (dump_file
, "%d%% branches in range %d-%d%%\n",
758 (hist_br_prob
[i
] + hist_br_prob
[19-i
]) * 100 / num_branches
,
761 total_num_branches
+= num_branches
;
762 for (i
= 0; i
< 20; i
++)
763 total_hist_br_prob
[i
] += hist_br_prob
[i
];
765 fputc ('\n', dump_file
);
766 fputc ('\n', dump_file
);
769 free_aux_for_blocks ();
772 /* Load value histograms values whose description is stored in VALUES array
775 CFG_CHECKSUM is the precomputed checksum for the CFG. */
778 compute_value_histograms (histogram_values values
, unsigned cfg_checksum
,
779 unsigned lineno_checksum
)
781 unsigned i
, j
, t
, any
;
782 unsigned n_histogram_counters
[GCOV_N_VALUE_COUNTERS
];
783 gcov_type
*histogram_counts
[GCOV_N_VALUE_COUNTERS
];
784 gcov_type
*act_count
[GCOV_N_VALUE_COUNTERS
];
785 gcov_type
*aact_count
;
787 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
788 n_histogram_counters
[t
] = 0;
790 for (i
= 0; i
< VEC_length (histogram_value
, values
); i
++)
792 histogram_value hist
= VEC_index (histogram_value
, values
, i
);
793 n_histogram_counters
[(int) hist
->type
] += hist
->n_counters
;
797 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
799 if (!n_histogram_counters
[t
])
801 histogram_counts
[t
] = NULL
;
805 histogram_counts
[t
] =
806 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t
),
807 n_histogram_counters
[t
], cfg_checksum
,
808 lineno_checksum
, NULL
);
809 if (histogram_counts
[t
])
811 act_count
[t
] = histogram_counts
[t
];
816 for (i
= 0; i
< VEC_length (histogram_value
, values
); i
++)
818 histogram_value hist
= VEC_index (histogram_value
, values
, i
);
819 gimple stmt
= hist
->hvalue
.stmt
;
821 t
= (int) hist
->type
;
823 aact_count
= act_count
[t
];
824 act_count
[t
] += hist
->n_counters
;
826 gimple_add_histogram_value (cfun
, stmt
, hist
);
827 hist
->hvalue
.counters
= XNEWVEC (gcov_type
, hist
->n_counters
);
828 for (j
= 0; j
< hist
->n_counters
; j
++)
829 hist
->hvalue
.counters
[j
] = aact_count
[j
];
832 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
833 free (histogram_counts
[t
]);
836 /* The entry basic block will be moved around so that it has index=1,
837 there is nothing at index 0 and the exit is at n_basic_block. */
838 #define BB_TO_GCOV_INDEX(bb) ((bb)->index - 1)
839 /* When passed NULL as file_name, initialize.
840 When passed something else, output the necessary commands to change
841 line to LINE and offset to FILE_NAME. */
843 output_location (char const *file_name
, int line
,
844 gcov_position_t
*offset
, basic_block bb
)
846 static char const *prev_file_name
;
847 static int prev_line
;
848 bool name_differs
, line_differs
;
852 prev_file_name
= NULL
;
857 name_differs
= !prev_file_name
|| filename_cmp (file_name
, prev_file_name
);
858 line_differs
= prev_line
!= line
;
860 if (name_differs
|| line_differs
)
864 *offset
= gcov_write_tag (GCOV_TAG_LINES
);
865 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb
));
866 name_differs
= line_differs
=true;
869 /* If this is a new source file, then output the
870 file's name to the .bb file. */
873 prev_file_name
= file_name
;
874 gcov_write_unsigned (0);
875 gcov_write_string (prev_file_name
);
879 gcov_write_unsigned (line
);
885 /* Instrument and/or analyze program behavior based on program flow graph.
886 In either case, this function builds a flow graph for the function being
887 compiled. The flow graph is stored in BB_GRAPH.
889 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
890 the flow graph that are needed to reconstruct the dynamic behavior of the
893 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
894 information from a data file containing edge count information from previous
895 executions of the function being compiled. In this case, the flow graph is
896 annotated with actual execution counts, which are later propagated into the
897 rtl for optimization purposes.
899 Main entry point of this file. */
906 unsigned num_edges
, ignored_edges
;
907 unsigned num_instrumented
;
908 struct edge_list
*el
;
909 histogram_values values
= NULL
;
910 unsigned cfg_checksum
, lineno_checksum
;
912 total_num_times_called
++;
914 flow_call_edges_add (NULL
);
915 add_noreturn_fake_exit_edges ();
917 /* We can't handle cyclic regions constructed using abnormal edges.
918 To avoid these we replace every source of abnormal edge by a fake
919 edge from entry node and every destination by fake edge to exit.
920 This keeps graph acyclic and our calculation exact for all normal
921 edges except for exit and entrance ones.
923 We also add fake exit edges for each call and asm statement in the
924 basic, since it may not return. */
928 int need_exit_edge
= 0, need_entry_edge
= 0;
929 int have_exit_edge
= 0, have_entry_edge
= 0;
933 /* Functions returning multiple times are not handled by extra edges.
934 Instead we simply allow negative counts on edges from exit to the
935 block past call and corresponding probabilities. We can't go
936 with the extra edges because that would result in flowgraph that
937 needs to have fake edges outside the spanning tree. */
939 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
941 gimple_stmt_iterator gsi
;
944 /* It may happen that there are compiler generated statements
945 without a locus at all. Go through the basic block from the
946 last to the first statement looking for a locus. */
947 for (gsi
= gsi_last_nondebug_bb (bb
);
949 gsi_prev_nondebug (&gsi
))
951 last
= gsi_stmt (gsi
);
952 if (gimple_has_location (last
))
956 /* Edge with goto locus might get wrong coverage info unless
957 it is the only edge out of BB.
958 Don't do that when the locuses match, so
959 if (blah) goto something;
960 is not computed twice. */
962 && gimple_has_location (last
)
963 && e
->goto_locus
!= UNKNOWN_LOCATION
964 && !single_succ_p (bb
)
965 && (LOCATION_FILE (e
->goto_locus
)
966 != LOCATION_FILE (gimple_location (last
))
967 || (LOCATION_LINE (e
->goto_locus
)
968 != LOCATION_LINE (gimple_location (last
)))))
970 basic_block new_bb
= split_edge (e
);
971 edge ne
= single_succ_edge (new_bb
);
972 ne
->goto_locus
= e
->goto_locus
;
973 ne
->goto_block
= e
->goto_block
;
975 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
976 && e
->dest
!= EXIT_BLOCK_PTR
)
978 if (e
->dest
== EXIT_BLOCK_PTR
)
981 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
983 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
984 && e
->src
!= ENTRY_BLOCK_PTR
)
986 if (e
->src
== ENTRY_BLOCK_PTR
)
990 if (need_exit_edge
&& !have_exit_edge
)
993 fprintf (dump_file
, "Adding fake exit edge to bb %i\n",
995 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
997 if (need_entry_edge
&& !have_entry_edge
)
1000 fprintf (dump_file
, "Adding fake entry edge to bb %i\n",
1002 make_edge (ENTRY_BLOCK_PTR
, bb
, EDGE_FAKE
);
1006 el
= create_edge_list ();
1007 num_edges
= NUM_EDGES (el
);
1008 alloc_aux_for_edges (sizeof (struct edge_info
));
1010 /* The basic blocks are expected to be numbered sequentially. */
1014 for (i
= 0 ; i
< num_edges
; i
++)
1016 edge e
= INDEX_EDGE (el
, i
);
1019 /* Mark edges we've replaced by fake edges above as ignored. */
1020 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
1021 && e
->src
!= ENTRY_BLOCK_PTR
&& e
->dest
!= EXIT_BLOCK_PTR
)
1023 EDGE_INFO (e
)->ignore
= 1;
1028 /* Create spanning tree from basic block graph, mark each edge that is
1029 on the spanning tree. We insert as many abnormal and critical edges
1030 as possible to minimize number of edge splits necessary. */
1032 find_spanning_tree (el
);
1034 /* Fake edges that are not on the tree will not be instrumented, so
1035 mark them ignored. */
1036 for (num_instrumented
= i
= 0; i
< num_edges
; i
++)
1038 edge e
= INDEX_EDGE (el
, i
);
1039 struct edge_info
*inf
= EDGE_INFO (e
);
1041 if (inf
->ignore
|| inf
->on_tree
)
1043 else if (e
->flags
& EDGE_FAKE
)
1052 total_num_blocks
+= n_basic_blocks
;
1054 fprintf (dump_file
, "%d basic blocks\n", n_basic_blocks
);
1056 total_num_edges
+= num_edges
;
1058 fprintf (dump_file
, "%d edges\n", num_edges
);
1060 total_num_edges_ignored
+= ignored_edges
;
1062 fprintf (dump_file
, "%d ignored edges\n", ignored_edges
);
1065 /* Compute two different checksums. Note that we want to compute
1066 the checksum in only once place, since it depends on the shape
1067 of the control flow which can change during
1068 various transformations. */
1069 cfg_checksum
= coverage_compute_cfg_checksum ();
1070 lineno_checksum
= coverage_compute_lineno_checksum ();
1072 /* Write the data from which gcov can reconstruct the basic block
1075 /* Basic block flags */
1076 if (coverage_begin_output (lineno_checksum
, cfg_checksum
))
1078 gcov_position_t offset
;
1080 offset
= gcov_write_tag (GCOV_TAG_BLOCKS
);
1081 for (i
= 0; i
!= (unsigned) (n_basic_blocks
); i
++)
1082 gcov_write_unsigned (0);
1083 gcov_write_length (offset
);
1086 /* Keep all basic block indexes nonnegative in the gcov output.
1087 Index 0 is used for entry block, last index is for exit block.
1089 ENTRY_BLOCK_PTR
->index
= 1;
1090 EXIT_BLOCK_PTR
->index
= last_basic_block
;
1093 if (coverage_begin_output (lineno_checksum
, cfg_checksum
))
1095 gcov_position_t offset
;
1097 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, next_bb
)
1102 offset
= gcov_write_tag (GCOV_TAG_ARCS
);
1103 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb
));
1105 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1107 struct edge_info
*i
= EDGE_INFO (e
);
1110 unsigned flag_bits
= 0;
1113 flag_bits
|= GCOV_ARC_ON_TREE
;
1114 if (e
->flags
& EDGE_FAKE
)
1115 flag_bits
|= GCOV_ARC_FAKE
;
1116 if (e
->flags
& EDGE_FALLTHRU
)
1117 flag_bits
|= GCOV_ARC_FALLTHROUGH
;
1118 /* On trees we don't have fallthru flags, but we can
1119 recompute them from CFG shape. */
1120 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)
1121 && e
->src
->next_bb
== e
->dest
)
1122 flag_bits
|= GCOV_ARC_FALLTHROUGH
;
1124 gcov_write_unsigned (BB_TO_GCOV_INDEX (e
->dest
));
1125 gcov_write_unsigned (flag_bits
);
1129 gcov_write_length (offset
);
1134 if (coverage_begin_output (lineno_checksum
, cfg_checksum
))
1136 /* Initialize the output. */
1137 output_location (NULL
, 0, NULL
, NULL
);
1141 gimple_stmt_iterator gsi
;
1142 gcov_position_t offset
= 0;
1144 if (bb
== ENTRY_BLOCK_PTR
->next_bb
)
1146 expanded_location curr_location
=
1147 expand_location (DECL_SOURCE_LOCATION (current_function_decl
));
1148 output_location (curr_location
.file
, curr_location
.line
,
1152 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1154 gimple stmt
= gsi_stmt (gsi
);
1155 if (gimple_has_location (stmt
))
1156 output_location (gimple_filename (stmt
), gimple_lineno (stmt
),
1160 /* Notice GOTO expressions eliminated while constructing the CFG. */
1161 if (single_succ_p (bb
)
1162 && single_succ_edge (bb
)->goto_locus
!= UNKNOWN_LOCATION
)
1164 expanded_location curr_location
1165 = expand_location (single_succ_edge (bb
)->goto_locus
);
1166 output_location (curr_location
.file
, curr_location
.line
,
1172 /* A file of NULL indicates the end of run. */
1173 gcov_write_unsigned (0);
1174 gcov_write_string (NULL
);
1175 gcov_write_length (offset
);
1180 ENTRY_BLOCK_PTR
->index
= ENTRY_BLOCK
;
1181 EXIT_BLOCK_PTR
->index
= EXIT_BLOCK
;
1182 #undef BB_TO_GCOV_INDEX
1184 if (flag_profile_values
)
1185 gimple_find_values_to_profile (&values
);
1187 if (flag_branch_probabilities
)
1189 compute_branch_probabilities (cfg_checksum
, lineno_checksum
);
1190 if (flag_profile_values
)
1191 compute_value_histograms (values
, cfg_checksum
, lineno_checksum
);
1194 remove_fake_edges ();
1196 /* For each edge not on the spanning tree, add counting code. */
1197 if (profile_arc_flag
1198 && coverage_counter_alloc (GCOV_COUNTER_ARCS
, num_instrumented
))
1200 unsigned n_instrumented
;
1202 gimple_init_edge_profiler ();
1204 n_instrumented
= instrument_edges (el
);
1206 gcc_assert (n_instrumented
== num_instrumented
);
1208 if (flag_profile_values
)
1209 instrument_values (values
);
1211 /* Commit changes done by instrumentation. */
1212 gsi_commit_edge_inserts ();
1215 free_aux_for_edges ();
1217 VEC_free (histogram_value
, heap
, values
);
1218 free_edge_list (el
);
1219 coverage_end_function (lineno_checksum
, cfg_checksum
);
1222 /* Union find algorithm implementation for the basic blocks using
1226 find_group (basic_block bb
)
1228 basic_block group
= bb
, bb1
;
1230 while ((basic_block
) group
->aux
!= group
)
1231 group
= (basic_block
) group
->aux
;
1233 /* Compress path. */
1234 while ((basic_block
) bb
->aux
!= group
)
1236 bb1
= (basic_block
) bb
->aux
;
1237 bb
->aux
= (void *) group
;
1244 union_groups (basic_block bb1
, basic_block bb2
)
1246 basic_block bb1g
= find_group (bb1
);
1247 basic_block bb2g
= find_group (bb2
);
1249 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1250 this code is unlikely going to be performance problem anyway. */
1251 gcc_assert (bb1g
!= bb2g
);
1256 /* This function searches all of the edges in the program flow graph, and puts
1257 as many bad edges as possible onto the spanning tree. Bad edges include
1258 abnormals edges, which can't be instrumented at the moment. Since it is
1259 possible for fake edges to form a cycle, we will have to develop some
1260 better way in the future. Also put critical edges to the tree, since they
1261 are more expensive to instrument. */
1264 find_spanning_tree (struct edge_list
*el
)
1267 int num_edges
= NUM_EDGES (el
);
1270 /* We use aux field for standard union-find algorithm. */
1271 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1274 /* Add fake edge exit to entry we can't instrument. */
1275 union_groups (EXIT_BLOCK_PTR
, ENTRY_BLOCK_PTR
);
1277 /* First add all abnormal edges to the tree unless they form a cycle. Also
1278 add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
1279 setting return value from function. */
1280 for (i
= 0; i
< num_edges
; i
++)
1282 edge e
= INDEX_EDGE (el
, i
);
1283 if (((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
| EDGE_FAKE
))
1284 || e
->dest
== EXIT_BLOCK_PTR
)
1285 && !EDGE_INFO (e
)->ignore
1286 && (find_group (e
->src
) != find_group (e
->dest
)))
1289 fprintf (dump_file
, "Abnormal edge %d to %d put to tree\n",
1290 e
->src
->index
, e
->dest
->index
);
1291 EDGE_INFO (e
)->on_tree
= 1;
1292 union_groups (e
->src
, e
->dest
);
1296 /* Now insert all critical edges to the tree unless they form a cycle. */
1297 for (i
= 0; i
< num_edges
; i
++)
1299 edge e
= INDEX_EDGE (el
, i
);
1300 if (EDGE_CRITICAL_P (e
) && !EDGE_INFO (e
)->ignore
1301 && find_group (e
->src
) != find_group (e
->dest
))
1304 fprintf (dump_file
, "Critical edge %d to %d put to tree\n",
1305 e
->src
->index
, e
->dest
->index
);
1306 EDGE_INFO (e
)->on_tree
= 1;
1307 union_groups (e
->src
, e
->dest
);
1311 /* And now the rest. */
1312 for (i
= 0; i
< num_edges
; i
++)
1314 edge e
= INDEX_EDGE (el
, i
);
1315 if (!EDGE_INFO (e
)->ignore
1316 && find_group (e
->src
) != find_group (e
->dest
))
1319 fprintf (dump_file
, "Normal edge %d to %d put to tree\n",
1320 e
->src
->index
, e
->dest
->index
);
1321 EDGE_INFO (e
)->on_tree
= 1;
1322 union_groups (e
->src
, e
->dest
);
1326 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1330 /* Perform file-level initialization for branch-prob processing. */
1333 init_branch_prob (void)
1337 total_num_blocks
= 0;
1338 total_num_edges
= 0;
1339 total_num_edges_ignored
= 0;
1340 total_num_edges_instrumented
= 0;
1341 total_num_blocks_created
= 0;
1342 total_num_passes
= 0;
1343 total_num_times_called
= 0;
1344 total_num_branches
= 0;
1345 for (i
= 0; i
< 20; i
++)
1346 total_hist_br_prob
[i
] = 0;
1349 /* Performs file-level cleanup after branch-prob processing
1353 end_branch_prob (void)
1357 fprintf (dump_file
, "\n");
1358 fprintf (dump_file
, "Total number of blocks: %d\n",
1360 fprintf (dump_file
, "Total number of edges: %d\n", total_num_edges
);
1361 fprintf (dump_file
, "Total number of ignored edges: %d\n",
1362 total_num_edges_ignored
);
1363 fprintf (dump_file
, "Total number of instrumented edges: %d\n",
1364 total_num_edges_instrumented
);
1365 fprintf (dump_file
, "Total number of blocks created: %d\n",
1366 total_num_blocks_created
);
1367 fprintf (dump_file
, "Total number of graph solution passes: %d\n",
1369 if (total_num_times_called
!= 0)
1370 fprintf (dump_file
, "Average number of graph solution passes: %d\n",
1371 (total_num_passes
+ (total_num_times_called
>> 1))
1372 / total_num_times_called
);
1373 fprintf (dump_file
, "Total number of branches: %d\n",
1374 total_num_branches
);
1375 if (total_num_branches
)
1379 for (i
= 0; i
< 10; i
++)
1380 fprintf (dump_file
, "%d%% branches in range %d-%d%%\n",
1381 (total_hist_br_prob
[i
] + total_hist_br_prob
[19-i
]) * 100
1382 / total_num_branches
, 5*i
, 5*i
+5);