1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008
4 Free Software Foundation, Inc.
5 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
6 based on some ideas from Dain Samples of UC Berkeley.
7 Further mangling by Bob Manson, Cygnus Support.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 /* Generate basic block profile instrumentation and auxiliary files.
26 Profile generation is optimized, so that not all arcs in the basic
27 block graph need instrumenting. First, the BB graph is closed with
28 one entry (function start), and one exit (function exit). Any
29 ABNORMAL_EDGE cannot be instrumented (because there is no control
30 path to place the code). We close the graph by inserting fake
31 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
32 edges that do not go to the exit_block. We ignore such abnormal
33 edges. Naturally these fake edges are never directly traversed,
34 and so *cannot* be directly instrumented. Some other graph
35 massaging is done. To optimize the instrumentation we generate the
36 BB minimal span tree, only edges that are not on the span tree
37 (plus the entry point) need instrumenting. From that information
38 all other edge counts can be deduced. By construction all fake
39 edges must be on the spanning tree. We also attempt to place
40 EDGE_CRITICAL edges on the spanning tree.
42 The auxiliary files generated are <dumpbase>.gcno (at compile time)
43 and <dumpbase>.gcda (at run time). The format is
44 described in full in gcov-io.h. */
46 /* ??? Register allocation should use basic block execution counts to
47 give preference to the most commonly executed blocks. */
49 /* ??? Should calculate branch probabilities before instrumenting code, since
50 then we can use arc counts to help decide which arcs to instrument. */
54 #include "coretypes.h"
64 #include "value-prof.h"
67 #include "tree-flow.h"
70 #include "tree-pass.h"
74 /* Hooks for profiling. */
75 static struct profile_hooks
* profile_hooks
;
78 unsigned int count_valid
: 1;
80 /* Number of successor and predecessor edges. */
85 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
88 /* Counter summary from the last set of coverage counts read. */
90 const struct gcov_ctr_summary
*profile_info
;
92 /* Collect statistics on the performance of this pass for the entire source
95 static int total_num_blocks
;
96 static int total_num_edges
;
97 static int total_num_edges_ignored
;
98 static int total_num_edges_instrumented
;
99 static int total_num_blocks_created
;
100 static int total_num_passes
;
101 static int total_num_times_called
;
102 static int total_hist_br_prob
[20];
103 static int total_num_never_executed
;
104 static int total_num_branches
;
106 /* Forward declarations. */
107 static void find_spanning_tree (struct edge_list
*);
108 static unsigned instrument_edges (struct edge_list
*);
109 static void instrument_values (histogram_values
);
110 static void compute_branch_probabilities (void);
111 static void compute_value_histograms (histogram_values
);
112 static gcov_type
* get_exec_counts (void);
113 static basic_block
find_group (basic_block
);
114 static void union_groups (basic_block
, basic_block
);
116 /* Add edge instrumentation code to the entire insn chain.
118 F is the first insn of the chain.
119 NUM_BLOCKS is the number of basic blocks found in F. */
122 instrument_edges (struct edge_list
*el
)
124 unsigned num_instr_edges
= 0;
125 int num_edges
= NUM_EDGES (el
);
128 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
133 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
135 struct edge_info
*inf
= EDGE_INFO (e
);
137 if (!inf
->ignore
&& !inf
->on_tree
)
139 gcc_assert (!(e
->flags
& EDGE_ABNORMAL
));
141 fprintf (dump_file
, "Edge %d to %d instrumented%s\n",
142 e
->src
->index
, e
->dest
->index
,
143 EDGE_CRITICAL_P (e
) ? " (and split)" : "");
144 (profile_hooks
->gen_edge_profiler
) (num_instr_edges
++, e
);
149 total_num_blocks_created
+= num_edges
;
151 fprintf (dump_file
, "%d edges instrumented\n", num_instr_edges
);
152 return num_instr_edges
;
155 /* Add code to measure histograms for values in list VALUES. */
157 instrument_values (histogram_values values
)
161 /* Emit code to generate the histograms before the insns. */
163 for (i
= 0; i
< VEC_length (histogram_value
, values
); i
++)
165 histogram_value hist
= VEC_index (histogram_value
, values
, i
);
168 case HIST_TYPE_INTERVAL
:
169 t
= GCOV_COUNTER_V_INTERVAL
;
173 t
= GCOV_COUNTER_V_POW2
;
176 case HIST_TYPE_SINGLE_VALUE
:
177 t
= GCOV_COUNTER_V_SINGLE
;
180 case HIST_TYPE_CONST_DELTA
:
181 t
= GCOV_COUNTER_V_DELTA
;
184 case HIST_TYPE_INDIR_CALL
:
185 t
= GCOV_COUNTER_V_INDIR
;
188 case HIST_TYPE_AVERAGE
:
189 t
= GCOV_COUNTER_AVERAGE
;
193 t
= GCOV_COUNTER_IOR
;
199 if (!coverage_counter_alloc (t
, hist
->n_counters
))
204 case HIST_TYPE_INTERVAL
:
205 (profile_hooks
->gen_interval_profiler
) (hist
, t
, 0);
209 (profile_hooks
->gen_pow2_profiler
) (hist
, t
, 0);
212 case HIST_TYPE_SINGLE_VALUE
:
213 (profile_hooks
->gen_one_value_profiler
) (hist
, t
, 0);
216 case HIST_TYPE_CONST_DELTA
:
217 (profile_hooks
->gen_const_delta_profiler
) (hist
, t
, 0);
220 case HIST_TYPE_INDIR_CALL
:
221 (profile_hooks
->gen_ic_profiler
) (hist
, t
, 0);
224 case HIST_TYPE_AVERAGE
:
225 (profile_hooks
->gen_average_profiler
) (hist
, t
, 0);
229 (profile_hooks
->gen_ior_profiler
) (hist
, t
, 0);
239 /* Computes hybrid profile for all matching entries in da_file. */
242 get_exec_counts (void)
244 unsigned num_edges
= 0;
248 /* Count the edges to be (possibly) instrumented. */
249 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
254 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
255 if (!EDGE_INFO (e
)->ignore
&& !EDGE_INFO (e
)->on_tree
)
259 counts
= get_coverage_counts (GCOV_COUNTER_ARCS
, num_edges
, &profile_info
);
263 if (dump_file
&& profile_info
)
264 fprintf(dump_file
, "Merged %u profiles with maximal count %u.\n",
265 profile_info
->runs
, (unsigned) profile_info
->sum_max
);
272 is_edge_inconsistent (VEC(edge
,gc
) *edges
)
276 FOR_EACH_EDGE (e
, ei
, edges
)
278 if (!EDGE_INFO (e
)->ignore
)
281 && (!(e
->flags
& EDGE_FAKE
)
282 || !block_ends_with_call_p (e
->src
)))
287 "Edge %i->%i is inconsistent, count"HOST_WIDEST_INT_PRINT_DEC
,
288 e
->src
->index
, e
->dest
->index
, e
->count
);
289 dump_bb (e
->src
, dump_file
, 0);
290 dump_bb (e
->dest
, dump_file
, 0);
300 correct_negative_edge_counts (void)
306 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
308 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
316 /* Check consistency.
317 Return true if inconsistency is found. */
319 is_inconsistent (void)
322 bool inconsistent
= false;
325 inconsistent
|= is_edge_inconsistent (bb
->preds
);
326 if (!dump_file
&& inconsistent
)
328 inconsistent
|= is_edge_inconsistent (bb
->succs
);
329 if (!dump_file
&& inconsistent
)
335 fprintf (dump_file
, "BB %i count is negative "
336 HOST_WIDEST_INT_PRINT_DEC
,
339 dump_bb (bb
, dump_file
, 0);
343 if (bb
->count
!= sum_edge_counts (bb
->preds
))
347 fprintf (dump_file
, "BB %i count does not match sum of incomming edges "
348 HOST_WIDEST_INT_PRINT_DEC
" should be " HOST_WIDEST_INT_PRINT_DEC
,
351 sum_edge_counts (bb
->preds
));
352 dump_bb (bb
, dump_file
, 0);
356 if (bb
->count
!= sum_edge_counts (bb
->succs
) &&
357 ! (find_edge (bb
, EXIT_BLOCK_PTR
) != NULL
&& block_ends_with_call_p (bb
)))
361 fprintf (dump_file
, "BB %i count does not match sum of outgoing edges "
362 HOST_WIDEST_INT_PRINT_DEC
" should be " HOST_WIDEST_INT_PRINT_DEC
,
365 sum_edge_counts (bb
->succs
));
366 dump_bb (bb
, dump_file
, 0);
370 if (!dump_file
&& inconsistent
)
377 /* Set each basic block count to the sum of its outgoing edge counts */
382 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
384 bb
->count
= sum_edge_counts (bb
->succs
);
385 gcc_assert (bb
->count
>= 0);
389 /* Reads profile data and returns total number of edge counts read */
391 read_profile_edge_counts (gcov_type
*exec_counts
)
395 int exec_counts_pos
= 0;
396 /* For each edge not on the spanning tree, set its execution count from
398 /* The first count in the .da file is the number of times that the function
399 was entered. This is the exec_count for block zero. */
401 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
406 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
407 if (!EDGE_INFO (e
)->ignore
&& !EDGE_INFO (e
)->on_tree
)
412 e
->count
= exec_counts
[exec_counts_pos
++];
413 if (e
->count
> profile_info
->sum_max
)
415 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
416 bb
->index
, e
->dest
->index
);
422 EDGE_INFO (e
)->count_valid
= 1;
423 BB_INFO (bb
)->succ_count
--;
424 BB_INFO (e
->dest
)->pred_count
--;
427 fprintf (dump_file
, "\nRead edge from %i to %i, count:",
428 bb
->index
, e
->dest
->index
);
429 fprintf (dump_file
, HOST_WIDEST_INT_PRINT_DEC
,
430 (HOST_WIDEST_INT
) e
->count
);
438 /* Compute the branch probabilities for the various branches.
439 Annotate them accordingly. */
442 compute_branch_probabilities (void)
449 int hist_br_prob
[20];
450 int num_never_executed
;
452 gcov_type
*exec_counts
= get_exec_counts ();
453 int inconsistent
= 0;
455 /* Very simple sanity checks so we catch bugs in our profiling code. */
458 if (profile_info
->run_max
* profile_info
->runs
< profile_info
->sum_max
)
460 error ("corrupted profile info: run_max * runs < sum_max");
464 if (profile_info
->sum_all
< profile_info
->sum_max
)
466 error ("corrupted profile info: sum_all is smaller than sum_max");
470 /* Attach extra info block to each bb. */
471 alloc_aux_for_blocks (sizeof (struct bb_info
));
472 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
477 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
478 if (!EDGE_INFO (e
)->ignore
)
479 BB_INFO (bb
)->succ_count
++;
480 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
481 if (!EDGE_INFO (e
)->ignore
)
482 BB_INFO (bb
)->pred_count
++;
485 /* Avoid predicting entry on exit nodes. */
486 BB_INFO (EXIT_BLOCK_PTR
)->succ_count
= 2;
487 BB_INFO (ENTRY_BLOCK_PTR
)->pred_count
= 2;
489 num_edges
= read_profile_edge_counts (exec_counts
);
492 fprintf (dump_file
, "\n%d edge counts read\n", num_edges
);
494 /* For every block in the file,
495 - if every exit/entrance edge has a known count, then set the block count
496 - if the block count is known, and every exit/entrance edge but one has
497 a known execution count, then set the count of the remaining edge
499 As edge counts are set, decrement the succ/pred count, but don't delete
500 the edge, that way we can easily tell when all edges are known, or only
501 one edge is unknown. */
503 /* The order that the basic blocks are iterated through is important.
504 Since the code that finds spanning trees starts with block 0, low numbered
505 edges are put on the spanning tree in preference to high numbered edges.
506 Hence, most instrumented edges are at the end. Graph solving works much
507 faster if we propagate numbers from the end to the start.
509 This takes an average of slightly more than 3 passes. */
517 FOR_BB_BETWEEN (bb
, EXIT_BLOCK_PTR
, NULL
, prev_bb
)
519 struct bb_info
*bi
= BB_INFO (bb
);
520 if (! bi
->count_valid
)
522 if (bi
->succ_count
== 0)
528 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
534 else if (bi
->pred_count
== 0)
540 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
549 if (bi
->succ_count
== 1)
555 /* One of the counts will be invalid, but it is zero,
556 so adding it in also doesn't hurt. */
557 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
560 /* Search for the invalid edge, and set its count. */
561 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
562 if (! EDGE_INFO (e
)->count_valid
&& ! EDGE_INFO (e
)->ignore
)
565 /* Calculate count for remaining edge by conservation. */
566 total
= bb
->count
- total
;
569 EDGE_INFO (e
)->count_valid
= 1;
573 BB_INFO (e
->dest
)->pred_count
--;
576 if (bi
->pred_count
== 1)
582 /* One of the counts will be invalid, but it is zero,
583 so adding it in also doesn't hurt. */
584 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
587 /* Search for the invalid edge, and set its count. */
588 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
589 if (!EDGE_INFO (e
)->count_valid
&& !EDGE_INFO (e
)->ignore
)
592 /* Calculate count for remaining edge by conservation. */
593 total
= bb
->count
- total
+ e
->count
;
596 EDGE_INFO (e
)->count_valid
= 1;
600 BB_INFO (e
->src
)->succ_count
--;
607 dump_flow_info (dump_file
, dump_flags
);
609 total_num_passes
+= passes
;
611 fprintf (dump_file
, "Graph solving took %d passes.\n\n", passes
);
613 /* If the graph has been correctly solved, every block will have a
614 succ and pred count of zero. */
617 gcc_assert (!BB_INFO (bb
)->succ_count
&& !BB_INFO (bb
)->pred_count
);
620 /* Check for inconsistent basic block counts */
621 inconsistent
= is_inconsistent ();
625 if (flag_profile_correction
)
627 /* Inconsistency detected. Make it flow-consistent. */
628 static int informed
= 0;
632 inform (input_location
, "correcting inconsistent profile data");
634 correct_negative_edge_counts ();
635 /* Set bb counts to the sum of the outgoing edge counts */
638 fprintf (dump_file
, "\nCalling mcf_smooth_cfg\n");
642 error ("corrupted profile info: profile data is not flow-consistent");
645 /* For every edge, calculate its branch probability and add a reg_note
646 to the branch insn to indicate this. */
648 for (i
= 0; i
< 20; i
++)
650 num_never_executed
= 0;
653 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
660 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
661 bb
->index
, (int)bb
->count
);
664 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
666 /* Function may return twice in the cased the called function is
667 setjmp or calls fork, but we can't represent this by extra
668 edge from the entry, since extra edge from the exit is
669 already present. We get negative frequency from the entry
672 && e
->dest
== EXIT_BLOCK_PTR
)
673 || (e
->count
> bb
->count
674 && e
->dest
!= EXIT_BLOCK_PTR
))
676 if (block_ends_with_call_p (bb
))
677 e
->count
= e
->count
< 0 ? 0 : bb
->count
;
679 if (e
->count
< 0 || e
->count
> bb
->count
)
681 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
682 e
->src
->index
, e
->dest
->index
,
684 e
->count
= bb
->count
/ 2;
689 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
690 e
->probability
= (e
->count
* REG_BR_PROB_BASE
+ bb
->count
/ 2) / bb
->count
;
691 if (bb
->index
>= NUM_FIXED_BLOCKS
692 && block_ends_with_condjump_p (bb
)
693 && EDGE_COUNT (bb
->succs
) >= 2)
699 /* Find the branch edge. It is possible that we do have fake
701 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
702 if (!(e
->flags
& (EDGE_FAKE
| EDGE_FALLTHRU
)))
705 prob
= e
->probability
;
706 index
= prob
* 20 / REG_BR_PROB_BASE
;
710 hist_br_prob
[index
]++;
715 /* As a last resort, distribute the probabilities evenly.
716 Use simple heuristics that if there are normal edges,
717 give all abnormals frequency of 0, otherwise distribute the
718 frequency over abnormals (this is the case of noreturn
720 else if (profile_status
== PROFILE_ABSENT
)
724 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
725 if (!(e
->flags
& (EDGE_COMPLEX
| EDGE_FAKE
)))
729 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
730 if (!(e
->flags
& (EDGE_COMPLEX
| EDGE_FAKE
)))
731 e
->probability
= REG_BR_PROB_BASE
/ total
;
737 total
+= EDGE_COUNT (bb
->succs
);
738 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
739 e
->probability
= REG_BR_PROB_BASE
/ total
;
741 if (bb
->index
>= NUM_FIXED_BLOCKS
742 && block_ends_with_condjump_p (bb
)
743 && EDGE_COUNT (bb
->succs
) >= 2)
744 num_branches
++, num_never_executed
;
748 profile_status
= PROFILE_READ
;
752 fprintf (dump_file
, "%d branches\n", num_branches
);
753 fprintf (dump_file
, "%d branches never executed\n",
756 for (i
= 0; i
< 10; i
++)
757 fprintf (dump_file
, "%d%% branches in range %d-%d%%\n",
758 (hist_br_prob
[i
] + hist_br_prob
[19-i
]) * 100 / num_branches
,
761 total_num_branches
+= num_branches
;
762 total_num_never_executed
+= num_never_executed
;
763 for (i
= 0; i
< 20; i
++)
764 total_hist_br_prob
[i
] += hist_br_prob
[i
];
766 fputc ('\n', dump_file
);
767 fputc ('\n', dump_file
);
770 free_aux_for_blocks ();
773 /* Load value histograms values whose description is stored in VALUES array
777 compute_value_histograms (histogram_values values
)
779 unsigned i
, j
, t
, any
;
780 unsigned n_histogram_counters
[GCOV_N_VALUE_COUNTERS
];
781 gcov_type
*histogram_counts
[GCOV_N_VALUE_COUNTERS
];
782 gcov_type
*act_count
[GCOV_N_VALUE_COUNTERS
];
783 gcov_type
*aact_count
;
785 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
786 n_histogram_counters
[t
] = 0;
788 for (i
= 0; i
< VEC_length (histogram_value
, values
); i
++)
790 histogram_value hist
= VEC_index (histogram_value
, values
, i
);
791 n_histogram_counters
[(int) hist
->type
] += hist
->n_counters
;
795 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
797 if (!n_histogram_counters
[t
])
799 histogram_counts
[t
] = NULL
;
803 histogram_counts
[t
] =
804 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t
),
805 n_histogram_counters
[t
], NULL
);
806 if (histogram_counts
[t
])
808 act_count
[t
] = histogram_counts
[t
];
813 for (i
= 0; i
< VEC_length (histogram_value
, values
); i
++)
815 histogram_value hist
= VEC_index (histogram_value
, values
, i
);
816 gimple stmt
= hist
->hvalue
.stmt
;
818 t
= (int) hist
->type
;
820 aact_count
= act_count
[t
];
821 act_count
[t
] += hist
->n_counters
;
823 gimple_add_histogram_value (cfun
, stmt
, hist
);
824 hist
->hvalue
.counters
= XNEWVEC (gcov_type
, hist
->n_counters
);
825 for (j
= 0; j
< hist
->n_counters
; j
++)
826 hist
->hvalue
.counters
[j
] = aact_count
[j
];
829 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
830 if (histogram_counts
[t
])
831 free (histogram_counts
[t
]);
834 /* The entry basic block will be moved around so that it has index=1,
835 there is nothing at index 0 and the exit is at n_basic_block. */
836 #define BB_TO_GCOV_INDEX(bb) ((bb)->index - 1)
837 /* When passed NULL as file_name, initialize.
838 When passed something else, output the necessary commands to change
839 line to LINE and offset to FILE_NAME. */
841 output_location (char const *file_name
, int line
,
842 gcov_position_t
*offset
, basic_block bb
)
844 static char const *prev_file_name
;
845 static int prev_line
;
846 bool name_differs
, line_differs
;
850 prev_file_name
= NULL
;
855 name_differs
= !prev_file_name
|| strcmp (file_name
, prev_file_name
);
856 line_differs
= prev_line
!= line
;
858 if (name_differs
|| line_differs
)
862 *offset
= gcov_write_tag (GCOV_TAG_LINES
);
863 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb
));
864 name_differs
= line_differs
=true;
867 /* If this is a new source file, then output the
868 file's name to the .bb file. */
871 prev_file_name
= file_name
;
872 gcov_write_unsigned (0);
873 gcov_write_string (prev_file_name
);
877 gcov_write_unsigned (line
);
883 /* Instrument and/or analyze program behavior based on program flow graph.
884 In either case, this function builds a flow graph for the function being
885 compiled. The flow graph is stored in BB_GRAPH.
887 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
888 the flow graph that are needed to reconstruct the dynamic behavior of the
891 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
892 information from a data file containing edge count information from previous
893 executions of the function being compiled. In this case, the flow graph is
894 annotated with actual execution counts, which are later propagated into the
895 rtl for optimization purposes.
897 Main entry point of this file. */
904 unsigned num_edges
, ignored_edges
;
905 unsigned num_instrumented
;
906 struct edge_list
*el
;
907 histogram_values values
= NULL
;
909 total_num_times_called
++;
911 flow_call_edges_add (NULL
);
912 add_noreturn_fake_exit_edges ();
914 /* We can't handle cyclic regions constructed using abnormal edges.
915 To avoid these we replace every source of abnormal edge by a fake
916 edge from entry node and every destination by fake edge to exit.
917 This keeps graph acyclic and our calculation exact for all normal
918 edges except for exit and entrance ones.
920 We also add fake exit edges for each call and asm statement in the
921 basic, since it may not return. */
925 int need_exit_edge
= 0, need_entry_edge
= 0;
926 int have_exit_edge
= 0, have_entry_edge
= 0;
930 /* Functions returning multiple times are not handled by extra edges.
931 Instead we simply allow negative counts on edges from exit to the
932 block past call and corresponding probabilities. We can't go
933 with the extra edges because that would result in flowgraph that
934 needs to have fake edges outside the spanning tree. */
936 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
938 gimple_stmt_iterator gsi
;
941 /* It may happen that there are compiler generated statements
942 without a locus at all. Go through the basic block from the
943 last to the first statement looking for a locus. */
944 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
946 last
= gsi_stmt (gsi
);
947 if (gimple_has_location (last
))
951 /* Edge with goto locus might get wrong coverage info unless
952 it is the only edge out of BB.
953 Don't do that when the locuses match, so
954 if (blah) goto something;
955 is not computed twice. */
957 && gimple_has_location (last
)
958 && e
->goto_locus
!= UNKNOWN_LOCATION
959 && !single_succ_p (bb
)
960 && (LOCATION_FILE (e
->goto_locus
)
961 != LOCATION_FILE (gimple_location (last
))
962 || (LOCATION_LINE (e
->goto_locus
)
963 != LOCATION_LINE (gimple_location (last
)))))
965 basic_block new_bb
= split_edge (e
);
966 edge ne
= single_succ_edge (new_bb
);
967 ne
->goto_locus
= e
->goto_locus
;
968 ne
->goto_block
= e
->goto_block
;
970 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
971 && e
->dest
!= EXIT_BLOCK_PTR
)
973 if (e
->dest
== EXIT_BLOCK_PTR
)
976 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
978 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
979 && e
->src
!= ENTRY_BLOCK_PTR
)
981 if (e
->src
== ENTRY_BLOCK_PTR
)
985 if (need_exit_edge
&& !have_exit_edge
)
988 fprintf (dump_file
, "Adding fake exit edge to bb %i\n",
990 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
992 if (need_entry_edge
&& !have_entry_edge
)
995 fprintf (dump_file
, "Adding fake entry edge to bb %i\n",
997 make_edge (ENTRY_BLOCK_PTR
, bb
, EDGE_FAKE
);
1001 el
= create_edge_list ();
1002 num_edges
= NUM_EDGES (el
);
1003 alloc_aux_for_edges (sizeof (struct edge_info
));
1005 /* The basic blocks are expected to be numbered sequentially. */
1009 for (i
= 0 ; i
< num_edges
; i
++)
1011 edge e
= INDEX_EDGE (el
, i
);
1014 /* Mark edges we've replaced by fake edges above as ignored. */
1015 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
1016 && e
->src
!= ENTRY_BLOCK_PTR
&& e
->dest
!= EXIT_BLOCK_PTR
)
1018 EDGE_INFO (e
)->ignore
= 1;
1023 /* Create spanning tree from basic block graph, mark each edge that is
1024 on the spanning tree. We insert as many abnormal and critical edges
1025 as possible to minimize number of edge splits necessary. */
1027 find_spanning_tree (el
);
1029 /* Fake edges that are not on the tree will not be instrumented, so
1030 mark them ignored. */
1031 for (num_instrumented
= i
= 0; i
< num_edges
; i
++)
1033 edge e
= INDEX_EDGE (el
, i
);
1034 struct edge_info
*inf
= EDGE_INFO (e
);
1036 if (inf
->ignore
|| inf
->on_tree
)
1038 else if (e
->flags
& EDGE_FAKE
)
1047 total_num_blocks
+= n_basic_blocks
;
1049 fprintf (dump_file
, "%d basic blocks\n", n_basic_blocks
);
1051 total_num_edges
+= num_edges
;
1053 fprintf (dump_file
, "%d edges\n", num_edges
);
1055 total_num_edges_ignored
+= ignored_edges
;
1057 fprintf (dump_file
, "%d ignored edges\n", ignored_edges
);
1059 /* Write the data from which gcov can reconstruct the basic block
1062 /* Basic block flags */
1063 if (coverage_begin_output ())
1065 gcov_position_t offset
;
1067 offset
= gcov_write_tag (GCOV_TAG_BLOCKS
);
1068 for (i
= 0; i
!= (unsigned) (n_basic_blocks
); i
++)
1069 gcov_write_unsigned (0);
1070 gcov_write_length (offset
);
1073 /* Keep all basic block indexes nonnegative in the gcov output.
1074 Index 0 is used for entry block, last index is for exit block.
1076 ENTRY_BLOCK_PTR
->index
= 1;
1077 EXIT_BLOCK_PTR
->index
= last_basic_block
;
1080 if (coverage_begin_output ())
1082 gcov_position_t offset
;
1084 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, next_bb
)
1089 offset
= gcov_write_tag (GCOV_TAG_ARCS
);
1090 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb
));
1092 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1094 struct edge_info
*i
= EDGE_INFO (e
);
1097 unsigned flag_bits
= 0;
1100 flag_bits
|= GCOV_ARC_ON_TREE
;
1101 if (e
->flags
& EDGE_FAKE
)
1102 flag_bits
|= GCOV_ARC_FAKE
;
1103 if (e
->flags
& EDGE_FALLTHRU
)
1104 flag_bits
|= GCOV_ARC_FALLTHROUGH
;
1105 /* On trees we don't have fallthru flags, but we can
1106 recompute them from CFG shape. */
1107 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)
1108 && e
->src
->next_bb
== e
->dest
)
1109 flag_bits
|= GCOV_ARC_FALLTHROUGH
;
1111 gcov_write_unsigned (BB_TO_GCOV_INDEX (e
->dest
));
1112 gcov_write_unsigned (flag_bits
);
1116 gcov_write_length (offset
);
1121 if (coverage_begin_output ())
1123 gcov_position_t offset
;
1125 /* Initialize the output. */
1126 output_location (NULL
, 0, NULL
, NULL
);
1130 gimple_stmt_iterator gsi
;
1134 if (bb
== ENTRY_BLOCK_PTR
->next_bb
)
1136 expanded_location curr_location
=
1137 expand_location (DECL_SOURCE_LOCATION (current_function_decl
));
1138 output_location (curr_location
.file
, curr_location
.line
,
1142 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1144 gimple stmt
= gsi_stmt (gsi
);
1145 if (gimple_has_location (stmt
))
1146 output_location (gimple_filename (stmt
), gimple_lineno (stmt
),
1150 /* Notice GOTO expressions we eliminated while constructing the
1152 if (single_succ_p (bb
)
1153 && single_succ_edge (bb
)->goto_locus
!= UNKNOWN_LOCATION
)
1155 location_t curr_location
= single_succ_edge (bb
)->goto_locus
;
1156 /* ??? The FILE/LINE API is inconsistent for these cases. */
1157 output_location (LOCATION_FILE (curr_location
),
1158 LOCATION_LINE (curr_location
), &offset
, bb
);
1163 /* A file of NULL indicates the end of run. */
1164 gcov_write_unsigned (0);
1165 gcov_write_string (NULL
);
1166 gcov_write_length (offset
);
1171 ENTRY_BLOCK_PTR
->index
= ENTRY_BLOCK
;
1172 EXIT_BLOCK_PTR
->index
= EXIT_BLOCK
;
1173 #undef BB_TO_GCOV_INDEX
1175 if (flag_profile_values
)
1176 find_values_to_profile (&values
);
1178 if (flag_branch_probabilities
)
1180 compute_branch_probabilities ();
1181 if (flag_profile_values
)
1182 compute_value_histograms (values
);
1185 remove_fake_edges ();
1187 /* For each edge not on the spanning tree, add counting code. */
1188 if (profile_arc_flag
1189 && coverage_counter_alloc (GCOV_COUNTER_ARCS
, num_instrumented
))
1191 unsigned n_instrumented
;
1193 profile_hooks
->init_edge_profiler ();
1195 n_instrumented
= instrument_edges (el
);
1197 gcc_assert (n_instrumented
== num_instrumented
);
1199 if (flag_profile_values
)
1200 instrument_values (values
);
1202 /* Commit changes done by instrumentation. */
1203 gsi_commit_edge_inserts ();
1206 free_aux_for_edges ();
1208 VEC_free (histogram_value
, heap
, values
);
1209 free_edge_list (el
);
1210 coverage_end_function ();
1213 /* Union find algorithm implementation for the basic blocks using
1217 find_group (basic_block bb
)
1219 basic_block group
= bb
, bb1
;
1221 while ((basic_block
) group
->aux
!= group
)
1222 group
= (basic_block
) group
->aux
;
1224 /* Compress path. */
1225 while ((basic_block
) bb
->aux
!= group
)
1227 bb1
= (basic_block
) bb
->aux
;
1228 bb
->aux
= (void *) group
;
1235 union_groups (basic_block bb1
, basic_block bb2
)
1237 basic_block bb1g
= find_group (bb1
);
1238 basic_block bb2g
= find_group (bb2
);
1240 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1241 this code is unlikely going to be performance problem anyway. */
1242 gcc_assert (bb1g
!= bb2g
);
1247 /* This function searches all of the edges in the program flow graph, and puts
1248 as many bad edges as possible onto the spanning tree. Bad edges include
1249 abnormals edges, which can't be instrumented at the moment. Since it is
1250 possible for fake edges to form a cycle, we will have to develop some
1251 better way in the future. Also put critical edges to the tree, since they
1252 are more expensive to instrument. */
1255 find_spanning_tree (struct edge_list
*el
)
1258 int num_edges
= NUM_EDGES (el
);
1261 /* We use aux field for standard union-find algorithm. */
1262 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1265 /* Add fake edge exit to entry we can't instrument. */
1266 union_groups (EXIT_BLOCK_PTR
, ENTRY_BLOCK_PTR
);
1268 /* First add all abnormal edges to the tree unless they form a cycle. Also
1269 add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
1270 setting return value from function. */
1271 for (i
= 0; i
< num_edges
; i
++)
1273 edge e
= INDEX_EDGE (el
, i
);
1274 if (((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
| EDGE_FAKE
))
1275 || e
->dest
== EXIT_BLOCK_PTR
)
1276 && !EDGE_INFO (e
)->ignore
1277 && (find_group (e
->src
) != find_group (e
->dest
)))
1280 fprintf (dump_file
, "Abnormal edge %d to %d put to tree\n",
1281 e
->src
->index
, e
->dest
->index
);
1282 EDGE_INFO (e
)->on_tree
= 1;
1283 union_groups (e
->src
, e
->dest
);
1287 /* Now insert all critical edges to the tree unless they form a cycle. */
1288 for (i
= 0; i
< num_edges
; i
++)
1290 edge e
= INDEX_EDGE (el
, i
);
1291 if (EDGE_CRITICAL_P (e
) && !EDGE_INFO (e
)->ignore
1292 && find_group (e
->src
) != find_group (e
->dest
))
1295 fprintf (dump_file
, "Critical edge %d to %d put to tree\n",
1296 e
->src
->index
, e
->dest
->index
);
1297 EDGE_INFO (e
)->on_tree
= 1;
1298 union_groups (e
->src
, e
->dest
);
1302 /* And now the rest. */
1303 for (i
= 0; i
< num_edges
; i
++)
1305 edge e
= INDEX_EDGE (el
, i
);
1306 if (!EDGE_INFO (e
)->ignore
1307 && find_group (e
->src
) != find_group (e
->dest
))
1310 fprintf (dump_file
, "Normal edge %d to %d put to tree\n",
1311 e
->src
->index
, e
->dest
->index
);
1312 EDGE_INFO (e
)->on_tree
= 1;
1313 union_groups (e
->src
, e
->dest
);
1317 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1321 /* Perform file-level initialization for branch-prob processing. */
1324 init_branch_prob (void)
1328 total_num_blocks
= 0;
1329 total_num_edges
= 0;
1330 total_num_edges_ignored
= 0;
1331 total_num_edges_instrumented
= 0;
1332 total_num_blocks_created
= 0;
1333 total_num_passes
= 0;
1334 total_num_times_called
= 0;
1335 total_num_branches
= 0;
1336 total_num_never_executed
= 0;
1337 for (i
= 0; i
< 20; i
++)
1338 total_hist_br_prob
[i
] = 0;
1341 /* Performs file-level cleanup after branch-prob processing
1345 end_branch_prob (void)
1349 fprintf (dump_file
, "\n");
1350 fprintf (dump_file
, "Total number of blocks: %d\n",
1352 fprintf (dump_file
, "Total number of edges: %d\n", total_num_edges
);
1353 fprintf (dump_file
, "Total number of ignored edges: %d\n",
1354 total_num_edges_ignored
);
1355 fprintf (dump_file
, "Total number of instrumented edges: %d\n",
1356 total_num_edges_instrumented
);
1357 fprintf (dump_file
, "Total number of blocks created: %d\n",
1358 total_num_blocks_created
);
1359 fprintf (dump_file
, "Total number of graph solution passes: %d\n",
1361 if (total_num_times_called
!= 0)
1362 fprintf (dump_file
, "Average number of graph solution passes: %d\n",
1363 (total_num_passes
+ (total_num_times_called
>> 1))
1364 / total_num_times_called
);
1365 fprintf (dump_file
, "Total number of branches: %d\n",
1366 total_num_branches
);
1367 fprintf (dump_file
, "Total number of branches never executed: %d\n",
1368 total_num_never_executed
);
1369 if (total_num_branches
)
1373 for (i
= 0; i
< 10; i
++)
1374 fprintf (dump_file
, "%d%% branches in range %d-%d%%\n",
1375 (total_hist_br_prob
[i
] + total_hist_br_prob
[19-i
]) * 100
1376 / total_num_branches
, 5*i
, 5*i
+5);
1381 /* Set up hooks to enable tree-based profiling. */
1384 tree_register_profile_hooks (void)
1386 gcc_assert (current_ir_type () == IR_GIMPLE
);
1387 profile_hooks
= &tree_profile_hooks
;