1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
6 based on some ideas from Dain Samples of UC Berkeley.
7 Further mangling by Bob Manson, Cygnus Support.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 /* Generate basic block profile instrumentation and auxiliary files.
26 Profile generation is optimized, so that not all arcs in the basic
27 block graph need instrumenting. First, the BB graph is closed with
28 one entry (function start), and one exit (function exit). Any
29 ABNORMAL_EDGE cannot be instrumented (because there is no control
30 path to place the code). We close the graph by inserting fake
31 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
32 edges that do not go to the exit_block. We ignore such abnormal
33 edges. Naturally these fake edges are never directly traversed,
34 and so *cannot* be directly instrumented. Some other graph
35 massaging is done. To optimize the instrumentation we generate the
36 BB minimal span tree, only edges that are not on the span tree
37 (plus the entry point) need instrumenting. From that information
38 all other edge counts can be deduced. By construction all fake
39 edges must be on the spanning tree. We also attempt to place
40 EDGE_CRITICAL edges on the spanning tree.
42 The auxiliary files generated are <dumpbase>.gcno (at compile time)
43 and <dumpbase>.gcda (at run time). The format is
44 described in full in gcov-io.h. */
46 /* ??? Register allocation should use basic block execution counts to
47 give preference to the most commonly executed blocks. */
49 /* ??? Should calculate branch probabilities before instrumenting code, since
50 then we can use arc counts to help decide which arcs to instrument. */
54 #include "coretypes.h"
62 #include "basic-block.h"
65 #include "value-prof.h"
68 #include "tree-flow.h"
71 #include "tree-pass.h"
75 /* Hooks for profiling. */
76 static struct profile_hooks
* profile_hooks
;
79 unsigned int count_valid
: 1;
81 /* Number of successor and predecessor edges. */
86 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
89 /* Counter summary from the last set of coverage counts read. */
91 const struct gcov_ctr_summary
*profile_info
;
93 /* Collect statistics on the performance of this pass for the entire source
96 static int total_num_blocks
;
97 static int total_num_edges
;
98 static int total_num_edges_ignored
;
99 static int total_num_edges_instrumented
;
100 static int total_num_blocks_created
;
101 static int total_num_passes
;
102 static int total_num_times_called
;
103 static int total_hist_br_prob
[20];
104 static int total_num_branches
;
106 /* Forward declarations. */
107 static void find_spanning_tree (struct edge_list
*);
108 static unsigned instrument_edges (struct edge_list
*);
109 static void instrument_values (histogram_values
);
110 static void compute_branch_probabilities (void);
111 static void compute_value_histograms (histogram_values
);
112 static gcov_type
* get_exec_counts (void);
113 static basic_block
find_group (basic_block
);
114 static void union_groups (basic_block
, basic_block
);
116 /* Add edge instrumentation code to the entire insn chain.
118 F is the first insn of the chain.
119 NUM_BLOCKS is the number of basic blocks found in F. */
122 instrument_edges (struct edge_list
*el
)
124 unsigned num_instr_edges
= 0;
125 int num_edges
= NUM_EDGES (el
);
128 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
133 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
135 struct edge_info
*inf
= EDGE_INFO (e
);
137 if (!inf
->ignore
&& !inf
->on_tree
)
139 gcc_assert (!(e
->flags
& EDGE_ABNORMAL
));
141 fprintf (dump_file
, "Edge %d to %d instrumented%s\n",
142 e
->src
->index
, e
->dest
->index
,
143 EDGE_CRITICAL_P (e
) ? " (and split)" : "");
144 (profile_hooks
->gen_edge_profiler
) (num_instr_edges
++, e
);
149 total_num_blocks_created
+= num_edges
;
151 fprintf (dump_file
, "%d edges instrumented\n", num_instr_edges
);
152 return num_instr_edges
;
155 /* Add code to measure histograms for values in list VALUES. */
157 instrument_values (histogram_values values
)
161 /* Emit code to generate the histograms before the insns. */
163 for (i
= 0; i
< VEC_length (histogram_value
, values
); i
++)
165 histogram_value hist
= VEC_index (histogram_value
, values
, i
);
168 case HIST_TYPE_INTERVAL
:
169 t
= GCOV_COUNTER_V_INTERVAL
;
173 t
= GCOV_COUNTER_V_POW2
;
176 case HIST_TYPE_SINGLE_VALUE
:
177 t
= GCOV_COUNTER_V_SINGLE
;
180 case HIST_TYPE_CONST_DELTA
:
181 t
= GCOV_COUNTER_V_DELTA
;
184 case HIST_TYPE_INDIR_CALL
:
185 t
= GCOV_COUNTER_V_INDIR
;
188 case HIST_TYPE_AVERAGE
:
189 t
= GCOV_COUNTER_AVERAGE
;
193 t
= GCOV_COUNTER_IOR
;
199 if (!coverage_counter_alloc (t
, hist
->n_counters
))
204 case HIST_TYPE_INTERVAL
:
205 (profile_hooks
->gen_interval_profiler
) (hist
, t
, 0);
209 (profile_hooks
->gen_pow2_profiler
) (hist
, t
, 0);
212 case HIST_TYPE_SINGLE_VALUE
:
213 (profile_hooks
->gen_one_value_profiler
) (hist
, t
, 0);
216 case HIST_TYPE_CONST_DELTA
:
217 (profile_hooks
->gen_const_delta_profiler
) (hist
, t
, 0);
220 case HIST_TYPE_INDIR_CALL
:
221 (profile_hooks
->gen_ic_profiler
) (hist
, t
, 0);
224 case HIST_TYPE_AVERAGE
:
225 (profile_hooks
->gen_average_profiler
) (hist
, t
, 0);
229 (profile_hooks
->gen_ior_profiler
) (hist
, t
, 0);
239 /* Computes hybrid profile for all matching entries in da_file. */
242 get_exec_counts (void)
244 unsigned num_edges
= 0;
248 /* Count the edges to be (possibly) instrumented. */
249 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
254 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
255 if (!EDGE_INFO (e
)->ignore
&& !EDGE_INFO (e
)->on_tree
)
259 counts
= get_coverage_counts (GCOV_COUNTER_ARCS
, num_edges
, &profile_info
);
263 if (dump_file
&& profile_info
)
264 fprintf(dump_file
, "Merged %u profiles with maximal count %u.\n",
265 profile_info
->runs
, (unsigned) profile_info
->sum_max
);
272 is_edge_inconsistent (VEC(edge
,gc
) *edges
)
276 FOR_EACH_EDGE (e
, ei
, edges
)
278 if (!EDGE_INFO (e
)->ignore
)
281 && (!(e
->flags
& EDGE_FAKE
)
282 || !block_ends_with_call_p (e
->src
)))
287 "Edge %i->%i is inconsistent, count"HOST_WIDEST_INT_PRINT_DEC
,
288 e
->src
->index
, e
->dest
->index
, e
->count
);
289 dump_bb (e
->src
, dump_file
, 0);
290 dump_bb (e
->dest
, dump_file
, 0);
300 correct_negative_edge_counts (void)
306 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
308 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
316 /* Check consistency.
317 Return true if inconsistency is found. */
319 is_inconsistent (void)
322 bool inconsistent
= false;
325 inconsistent
|= is_edge_inconsistent (bb
->preds
);
326 if (!dump_file
&& inconsistent
)
328 inconsistent
|= is_edge_inconsistent (bb
->succs
);
329 if (!dump_file
&& inconsistent
)
335 fprintf (dump_file
, "BB %i count is negative "
336 HOST_WIDEST_INT_PRINT_DEC
,
339 dump_bb (bb
, dump_file
, 0);
343 if (bb
->count
!= sum_edge_counts (bb
->preds
))
347 fprintf (dump_file
, "BB %i count does not match sum of incoming edges "
348 HOST_WIDEST_INT_PRINT_DEC
" should be " HOST_WIDEST_INT_PRINT_DEC
,
351 sum_edge_counts (bb
->preds
));
352 dump_bb (bb
, dump_file
, 0);
356 if (bb
->count
!= sum_edge_counts (bb
->succs
) &&
357 ! (find_edge (bb
, EXIT_BLOCK_PTR
) != NULL
&& block_ends_with_call_p (bb
)))
361 fprintf (dump_file
, "BB %i count does not match sum of outgoing edges "
362 HOST_WIDEST_INT_PRINT_DEC
" should be " HOST_WIDEST_INT_PRINT_DEC
,
365 sum_edge_counts (bb
->succs
));
366 dump_bb (bb
, dump_file
, 0);
370 if (!dump_file
&& inconsistent
)
377 /* Set each basic block count to the sum of its outgoing edge counts */
382 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
384 bb
->count
= sum_edge_counts (bb
->succs
);
385 gcc_assert (bb
->count
>= 0);
389 /* Reads profile data and returns total number of edge counts read */
391 read_profile_edge_counts (gcov_type
*exec_counts
)
395 int exec_counts_pos
= 0;
396 /* For each edge not on the spanning tree, set its execution count from
398 /* The first count in the .da file is the number of times that the function
399 was entered. This is the exec_count for block zero. */
401 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
406 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
407 if (!EDGE_INFO (e
)->ignore
&& !EDGE_INFO (e
)->on_tree
)
412 e
->count
= exec_counts
[exec_counts_pos
++];
413 if (e
->count
> profile_info
->sum_max
)
415 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
416 bb
->index
, e
->dest
->index
);
422 EDGE_INFO (e
)->count_valid
= 1;
423 BB_INFO (bb
)->succ_count
--;
424 BB_INFO (e
->dest
)->pred_count
--;
427 fprintf (dump_file
, "\nRead edge from %i to %i, count:",
428 bb
->index
, e
->dest
->index
);
429 fprintf (dump_file
, HOST_WIDEST_INT_PRINT_DEC
,
430 (HOST_WIDEST_INT
) e
->count
);
438 /* Compute the branch probabilities for the various branches.
439 Annotate them accordingly. */
442 compute_branch_probabilities (void)
449 int hist_br_prob
[20];
451 gcov_type
*exec_counts
= get_exec_counts ();
452 int inconsistent
= 0;
454 /* Very simple sanity checks so we catch bugs in our profiling code. */
457 if (profile_info
->run_max
* profile_info
->runs
< profile_info
->sum_max
)
459 error ("corrupted profile info: run_max * runs < sum_max");
463 if (profile_info
->sum_all
< profile_info
->sum_max
)
465 error ("corrupted profile info: sum_all is smaller than sum_max");
469 /* Attach extra info block to each bb. */
470 alloc_aux_for_blocks (sizeof (struct bb_info
));
471 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
476 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
477 if (!EDGE_INFO (e
)->ignore
)
478 BB_INFO (bb
)->succ_count
++;
479 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
480 if (!EDGE_INFO (e
)->ignore
)
481 BB_INFO (bb
)->pred_count
++;
484 /* Avoid predicting entry on exit nodes. */
485 BB_INFO (EXIT_BLOCK_PTR
)->succ_count
= 2;
486 BB_INFO (ENTRY_BLOCK_PTR
)->pred_count
= 2;
488 num_edges
= read_profile_edge_counts (exec_counts
);
491 fprintf (dump_file
, "\n%d edge counts read\n", num_edges
);
493 /* For every block in the file,
494 - if every exit/entrance edge has a known count, then set the block count
495 - if the block count is known, and every exit/entrance edge but one has
496 a known execution count, then set the count of the remaining edge
498 As edge counts are set, decrement the succ/pred count, but don't delete
499 the edge, that way we can easily tell when all edges are known, or only
500 one edge is unknown. */
502 /* The order that the basic blocks are iterated through is important.
503 Since the code that finds spanning trees starts with block 0, low numbered
504 edges are put on the spanning tree in preference to high numbered edges.
505 Hence, most instrumented edges are at the end. Graph solving works much
506 faster if we propagate numbers from the end to the start.
508 This takes an average of slightly more than 3 passes. */
516 FOR_BB_BETWEEN (bb
, EXIT_BLOCK_PTR
, NULL
, prev_bb
)
518 struct bb_info
*bi
= BB_INFO (bb
);
519 if (! bi
->count_valid
)
521 if (bi
->succ_count
== 0)
527 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
533 else if (bi
->pred_count
== 0)
539 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
548 if (bi
->succ_count
== 1)
554 /* One of the counts will be invalid, but it is zero,
555 so adding it in also doesn't hurt. */
556 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
559 /* Search for the invalid edge, and set its count. */
560 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
561 if (! EDGE_INFO (e
)->count_valid
&& ! EDGE_INFO (e
)->ignore
)
564 /* Calculate count for remaining edge by conservation. */
565 total
= bb
->count
- total
;
568 EDGE_INFO (e
)->count_valid
= 1;
572 BB_INFO (e
->dest
)->pred_count
--;
575 if (bi
->pred_count
== 1)
581 /* One of the counts will be invalid, but it is zero,
582 so adding it in also doesn't hurt. */
583 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
586 /* Search for the invalid edge, and set its count. */
587 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
588 if (!EDGE_INFO (e
)->count_valid
&& !EDGE_INFO (e
)->ignore
)
591 /* Calculate count for remaining edge by conservation. */
592 total
= bb
->count
- total
+ e
->count
;
595 EDGE_INFO (e
)->count_valid
= 1;
599 BB_INFO (e
->src
)->succ_count
--;
606 dump_flow_info (dump_file
, dump_flags
);
608 total_num_passes
+= passes
;
610 fprintf (dump_file
, "Graph solving took %d passes.\n\n", passes
);
612 /* If the graph has been correctly solved, every block will have a
613 succ and pred count of zero. */
616 gcc_assert (!BB_INFO (bb
)->succ_count
&& !BB_INFO (bb
)->pred_count
);
619 /* Check for inconsistent basic block counts */
620 inconsistent
= is_inconsistent ();
624 if (flag_profile_correction
)
626 /* Inconsistency detected. Make it flow-consistent. */
627 static int informed
= 0;
631 inform (input_location
, "correcting inconsistent profile data");
633 correct_negative_edge_counts ();
634 /* Set bb counts to the sum of the outgoing edge counts */
637 fprintf (dump_file
, "\nCalling mcf_smooth_cfg\n");
641 error ("corrupted profile info: profile data is not flow-consistent");
644 /* For every edge, calculate its branch probability and add a reg_note
645 to the branch insn to indicate this. */
647 for (i
= 0; i
< 20; i
++)
651 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
658 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
659 bb
->index
, (int)bb
->count
);
662 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
664 /* Function may return twice in the cased the called function is
665 setjmp or calls fork, but we can't represent this by extra
666 edge from the entry, since extra edge from the exit is
667 already present. We get negative frequency from the entry
670 && e
->dest
== EXIT_BLOCK_PTR
)
671 || (e
->count
> bb
->count
672 && e
->dest
!= EXIT_BLOCK_PTR
))
674 if (block_ends_with_call_p (bb
))
675 e
->count
= e
->count
< 0 ? 0 : bb
->count
;
677 if (e
->count
< 0 || e
->count
> bb
->count
)
679 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
680 e
->src
->index
, e
->dest
->index
,
682 e
->count
= bb
->count
/ 2;
687 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
688 e
->probability
= (e
->count
* REG_BR_PROB_BASE
+ bb
->count
/ 2) / bb
->count
;
689 if (bb
->index
>= NUM_FIXED_BLOCKS
690 && block_ends_with_condjump_p (bb
)
691 && EDGE_COUNT (bb
->succs
) >= 2)
697 /* Find the branch edge. It is possible that we do have fake
699 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
700 if (!(e
->flags
& (EDGE_FAKE
| EDGE_FALLTHRU
)))
703 prob
= e
->probability
;
704 index
= prob
* 20 / REG_BR_PROB_BASE
;
708 hist_br_prob
[index
]++;
713 /* As a last resort, distribute the probabilities evenly.
714 Use simple heuristics that if there are normal edges,
715 give all abnormals frequency of 0, otherwise distribute the
716 frequency over abnormals (this is the case of noreturn
718 else if (profile_status
== PROFILE_ABSENT
)
722 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
723 if (!(e
->flags
& (EDGE_COMPLEX
| EDGE_FAKE
)))
727 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
728 if (!(e
->flags
& (EDGE_COMPLEX
| EDGE_FAKE
)))
729 e
->probability
= REG_BR_PROB_BASE
/ total
;
735 total
+= EDGE_COUNT (bb
->succs
);
736 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
737 e
->probability
= REG_BR_PROB_BASE
/ total
;
739 if (bb
->index
>= NUM_FIXED_BLOCKS
740 && block_ends_with_condjump_p (bb
)
741 && EDGE_COUNT (bb
->succs
) >= 2)
746 profile_status
= PROFILE_READ
;
750 fprintf (dump_file
, "%d branches\n", num_branches
);
752 for (i
= 0; i
< 10; i
++)
753 fprintf (dump_file
, "%d%% branches in range %d-%d%%\n",
754 (hist_br_prob
[i
] + hist_br_prob
[19-i
]) * 100 / num_branches
,
757 total_num_branches
+= num_branches
;
758 for (i
= 0; i
< 20; i
++)
759 total_hist_br_prob
[i
] += hist_br_prob
[i
];
761 fputc ('\n', dump_file
);
762 fputc ('\n', dump_file
);
765 free_aux_for_blocks ();
768 /* Load value histograms values whose description is stored in VALUES array
772 compute_value_histograms (histogram_values values
)
774 unsigned i
, j
, t
, any
;
775 unsigned n_histogram_counters
[GCOV_N_VALUE_COUNTERS
];
776 gcov_type
*histogram_counts
[GCOV_N_VALUE_COUNTERS
];
777 gcov_type
*act_count
[GCOV_N_VALUE_COUNTERS
];
778 gcov_type
*aact_count
;
780 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
781 n_histogram_counters
[t
] = 0;
783 for (i
= 0; i
< VEC_length (histogram_value
, values
); i
++)
785 histogram_value hist
= VEC_index (histogram_value
, values
, i
);
786 n_histogram_counters
[(int) hist
->type
] += hist
->n_counters
;
790 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
792 if (!n_histogram_counters
[t
])
794 histogram_counts
[t
] = NULL
;
798 histogram_counts
[t
] =
799 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t
),
800 n_histogram_counters
[t
], NULL
);
801 if (histogram_counts
[t
])
803 act_count
[t
] = histogram_counts
[t
];
808 for (i
= 0; i
< VEC_length (histogram_value
, values
); i
++)
810 histogram_value hist
= VEC_index (histogram_value
, values
, i
);
811 gimple stmt
= hist
->hvalue
.stmt
;
813 t
= (int) hist
->type
;
815 aact_count
= act_count
[t
];
816 act_count
[t
] += hist
->n_counters
;
818 gimple_add_histogram_value (cfun
, stmt
, hist
);
819 hist
->hvalue
.counters
= XNEWVEC (gcov_type
, hist
->n_counters
);
820 for (j
= 0; j
< hist
->n_counters
; j
++)
821 hist
->hvalue
.counters
[j
] = aact_count
[j
];
824 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
825 if (histogram_counts
[t
])
826 free (histogram_counts
[t
]);
829 /* The entry basic block will be moved around so that it has index=1,
830 there is nothing at index 0 and the exit is at n_basic_block. */
831 #define BB_TO_GCOV_INDEX(bb) ((bb)->index - 1)
832 /* When passed NULL as file_name, initialize.
833 When passed something else, output the necessary commands to change
834 line to LINE and offset to FILE_NAME. */
836 output_location (char const *file_name
, int line
,
837 gcov_position_t
*offset
, basic_block bb
)
839 static char const *prev_file_name
;
840 static int prev_line
;
841 bool name_differs
, line_differs
;
845 prev_file_name
= NULL
;
850 name_differs
= !prev_file_name
|| strcmp (file_name
, prev_file_name
);
851 line_differs
= prev_line
!= line
;
853 if (name_differs
|| line_differs
)
857 *offset
= gcov_write_tag (GCOV_TAG_LINES
);
858 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb
));
859 name_differs
= line_differs
=true;
862 /* If this is a new source file, then output the
863 file's name to the .bb file. */
866 prev_file_name
= file_name
;
867 gcov_write_unsigned (0);
868 gcov_write_string (prev_file_name
);
872 gcov_write_unsigned (line
);
878 /* Instrument and/or analyze program behavior based on program flow graph.
879 In either case, this function builds a flow graph for the function being
880 compiled. The flow graph is stored in BB_GRAPH.
882 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
883 the flow graph that are needed to reconstruct the dynamic behavior of the
886 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
887 information from a data file containing edge count information from previous
888 executions of the function being compiled. In this case, the flow graph is
889 annotated with actual execution counts, which are later propagated into the
890 rtl for optimization purposes.
892 Main entry point of this file. */
899 unsigned num_edges
, ignored_edges
;
900 unsigned num_instrumented
;
901 struct edge_list
*el
;
902 histogram_values values
= NULL
;
904 total_num_times_called
++;
906 flow_call_edges_add (NULL
);
907 add_noreturn_fake_exit_edges ();
909 /* We can't handle cyclic regions constructed using abnormal edges.
910 To avoid these we replace every source of abnormal edge by a fake
911 edge from entry node and every destination by fake edge to exit.
912 This keeps graph acyclic and our calculation exact for all normal
913 edges except for exit and entrance ones.
915 We also add fake exit edges for each call and asm statement in the
916 basic, since it may not return. */
920 int need_exit_edge
= 0, need_entry_edge
= 0;
921 int have_exit_edge
= 0, have_entry_edge
= 0;
925 /* Functions returning multiple times are not handled by extra edges.
926 Instead we simply allow negative counts on edges from exit to the
927 block past call and corresponding probabilities. We can't go
928 with the extra edges because that would result in flowgraph that
929 needs to have fake edges outside the spanning tree. */
931 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
933 gimple_stmt_iterator gsi
;
936 /* It may happen that there are compiler generated statements
937 without a locus at all. Go through the basic block from the
938 last to the first statement looking for a locus. */
939 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
941 last
= gsi_stmt (gsi
);
942 if (gimple_has_location (last
))
946 /* Edge with goto locus might get wrong coverage info unless
947 it is the only edge out of BB.
948 Don't do that when the locuses match, so
949 if (blah) goto something;
950 is not computed twice. */
952 && gimple_has_location (last
)
953 && e
->goto_locus
!= UNKNOWN_LOCATION
954 && !single_succ_p (bb
)
955 && (LOCATION_FILE (e
->goto_locus
)
956 != LOCATION_FILE (gimple_location (last
))
957 || (LOCATION_LINE (e
->goto_locus
)
958 != LOCATION_LINE (gimple_location (last
)))))
960 basic_block new_bb
= split_edge (e
);
961 edge ne
= single_succ_edge (new_bb
);
962 ne
->goto_locus
= e
->goto_locus
;
963 ne
->goto_block
= e
->goto_block
;
965 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
966 && e
->dest
!= EXIT_BLOCK_PTR
)
968 if (e
->dest
== EXIT_BLOCK_PTR
)
971 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
973 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
974 && e
->src
!= ENTRY_BLOCK_PTR
)
976 if (e
->src
== ENTRY_BLOCK_PTR
)
980 if (need_exit_edge
&& !have_exit_edge
)
983 fprintf (dump_file
, "Adding fake exit edge to bb %i\n",
985 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
987 if (need_entry_edge
&& !have_entry_edge
)
990 fprintf (dump_file
, "Adding fake entry edge to bb %i\n",
992 make_edge (ENTRY_BLOCK_PTR
, bb
, EDGE_FAKE
);
996 el
= create_edge_list ();
997 num_edges
= NUM_EDGES (el
);
998 alloc_aux_for_edges (sizeof (struct edge_info
));
1000 /* The basic blocks are expected to be numbered sequentially. */
1004 for (i
= 0 ; i
< num_edges
; i
++)
1006 edge e
= INDEX_EDGE (el
, i
);
1009 /* Mark edges we've replaced by fake edges above as ignored. */
1010 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
1011 && e
->src
!= ENTRY_BLOCK_PTR
&& e
->dest
!= EXIT_BLOCK_PTR
)
1013 EDGE_INFO (e
)->ignore
= 1;
1018 /* Create spanning tree from basic block graph, mark each edge that is
1019 on the spanning tree. We insert as many abnormal and critical edges
1020 as possible to minimize number of edge splits necessary. */
1022 find_spanning_tree (el
);
1024 /* Fake edges that are not on the tree will not be instrumented, so
1025 mark them ignored. */
1026 for (num_instrumented
= i
= 0; i
< num_edges
; i
++)
1028 edge e
= INDEX_EDGE (el
, i
);
1029 struct edge_info
*inf
= EDGE_INFO (e
);
1031 if (inf
->ignore
|| inf
->on_tree
)
1033 else if (e
->flags
& EDGE_FAKE
)
1042 total_num_blocks
+= n_basic_blocks
;
1044 fprintf (dump_file
, "%d basic blocks\n", n_basic_blocks
);
1046 total_num_edges
+= num_edges
;
1048 fprintf (dump_file
, "%d edges\n", num_edges
);
1050 total_num_edges_ignored
+= ignored_edges
;
1052 fprintf (dump_file
, "%d ignored edges\n", ignored_edges
);
1054 /* Write the data from which gcov can reconstruct the basic block
1057 /* Basic block flags */
1058 if (coverage_begin_output ())
1060 gcov_position_t offset
;
1062 offset
= gcov_write_tag (GCOV_TAG_BLOCKS
);
1063 for (i
= 0; i
!= (unsigned) (n_basic_blocks
); i
++)
1064 gcov_write_unsigned (0);
1065 gcov_write_length (offset
);
1068 /* Keep all basic block indexes nonnegative in the gcov output.
1069 Index 0 is used for entry block, last index is for exit block.
1071 ENTRY_BLOCK_PTR
->index
= 1;
1072 EXIT_BLOCK_PTR
->index
= last_basic_block
;
1075 if (coverage_begin_output ())
1077 gcov_position_t offset
;
1079 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, next_bb
)
1084 offset
= gcov_write_tag (GCOV_TAG_ARCS
);
1085 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb
));
1087 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1089 struct edge_info
*i
= EDGE_INFO (e
);
1092 unsigned flag_bits
= 0;
1095 flag_bits
|= GCOV_ARC_ON_TREE
;
1096 if (e
->flags
& EDGE_FAKE
)
1097 flag_bits
|= GCOV_ARC_FAKE
;
1098 if (e
->flags
& EDGE_FALLTHRU
)
1099 flag_bits
|= GCOV_ARC_FALLTHROUGH
;
1100 /* On trees we don't have fallthru flags, but we can
1101 recompute them from CFG shape. */
1102 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)
1103 && e
->src
->next_bb
== e
->dest
)
1104 flag_bits
|= GCOV_ARC_FALLTHROUGH
;
1106 gcov_write_unsigned (BB_TO_GCOV_INDEX (e
->dest
));
1107 gcov_write_unsigned (flag_bits
);
1111 gcov_write_length (offset
);
1116 if (coverage_begin_output ())
1118 gcov_position_t offset
;
1120 /* Initialize the output. */
1121 output_location (NULL
, 0, NULL
, NULL
);
1125 gimple_stmt_iterator gsi
;
1129 if (bb
== ENTRY_BLOCK_PTR
->next_bb
)
1131 expanded_location curr_location
=
1132 expand_location (DECL_SOURCE_LOCATION (current_function_decl
));
1133 output_location (curr_location
.file
, curr_location
.line
,
1137 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1139 gimple stmt
= gsi_stmt (gsi
);
1140 if (gimple_has_location (stmt
))
1141 output_location (gimple_filename (stmt
), gimple_lineno (stmt
),
1145 /* Notice GOTO expressions we eliminated while constructing the
1147 if (single_succ_p (bb
)
1148 && single_succ_edge (bb
)->goto_locus
!= UNKNOWN_LOCATION
)
1150 location_t curr_location
= single_succ_edge (bb
)->goto_locus
;
1151 /* ??? The FILE/LINE API is inconsistent for these cases. */
1152 output_location (LOCATION_FILE (curr_location
),
1153 LOCATION_LINE (curr_location
), &offset
, bb
);
1158 /* A file of NULL indicates the end of run. */
1159 gcov_write_unsigned (0);
1160 gcov_write_string (NULL
);
1161 gcov_write_length (offset
);
1166 ENTRY_BLOCK_PTR
->index
= ENTRY_BLOCK
;
1167 EXIT_BLOCK_PTR
->index
= EXIT_BLOCK
;
1168 #undef BB_TO_GCOV_INDEX
1170 if (flag_profile_values
)
1171 find_values_to_profile (&values
);
1173 if (flag_branch_probabilities
)
1175 compute_branch_probabilities ();
1176 if (flag_profile_values
)
1177 compute_value_histograms (values
);
1180 remove_fake_edges ();
1182 /* For each edge not on the spanning tree, add counting code. */
1183 if (profile_arc_flag
1184 && coverage_counter_alloc (GCOV_COUNTER_ARCS
, num_instrumented
))
1186 unsigned n_instrumented
;
1188 profile_hooks
->init_edge_profiler ();
1190 n_instrumented
= instrument_edges (el
);
1192 gcc_assert (n_instrumented
== num_instrumented
);
1194 if (flag_profile_values
)
1195 instrument_values (values
);
1197 /* Commit changes done by instrumentation. */
1198 gsi_commit_edge_inserts ();
1201 free_aux_for_edges ();
1203 VEC_free (histogram_value
, heap
, values
);
1204 free_edge_list (el
);
1205 coverage_end_function ();
1208 /* Union find algorithm implementation for the basic blocks using
1212 find_group (basic_block bb
)
1214 basic_block group
= bb
, bb1
;
1216 while ((basic_block
) group
->aux
!= group
)
1217 group
= (basic_block
) group
->aux
;
1219 /* Compress path. */
1220 while ((basic_block
) bb
->aux
!= group
)
1222 bb1
= (basic_block
) bb
->aux
;
1223 bb
->aux
= (void *) group
;
1230 union_groups (basic_block bb1
, basic_block bb2
)
1232 basic_block bb1g
= find_group (bb1
);
1233 basic_block bb2g
= find_group (bb2
);
1235 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1236 this code is unlikely going to be performance problem anyway. */
1237 gcc_assert (bb1g
!= bb2g
);
1242 /* This function searches all of the edges in the program flow graph, and puts
1243 as many bad edges as possible onto the spanning tree. Bad edges include
1244 abnormals edges, which can't be instrumented at the moment. Since it is
1245 possible for fake edges to form a cycle, we will have to develop some
1246 better way in the future. Also put critical edges to the tree, since they
1247 are more expensive to instrument. */
1250 find_spanning_tree (struct edge_list
*el
)
1253 int num_edges
= NUM_EDGES (el
);
1256 /* We use aux field for standard union-find algorithm. */
1257 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1260 /* Add fake edge exit to entry we can't instrument. */
1261 union_groups (EXIT_BLOCK_PTR
, ENTRY_BLOCK_PTR
);
1263 /* First add all abnormal edges to the tree unless they form a cycle. Also
1264 add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
1265 setting return value from function. */
1266 for (i
= 0; i
< num_edges
; i
++)
1268 edge e
= INDEX_EDGE (el
, i
);
1269 if (((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
| EDGE_FAKE
))
1270 || e
->dest
== EXIT_BLOCK_PTR
)
1271 && !EDGE_INFO (e
)->ignore
1272 && (find_group (e
->src
) != find_group (e
->dest
)))
1275 fprintf (dump_file
, "Abnormal edge %d to %d put to tree\n",
1276 e
->src
->index
, e
->dest
->index
);
1277 EDGE_INFO (e
)->on_tree
= 1;
1278 union_groups (e
->src
, e
->dest
);
1282 /* Now insert all critical edges to the tree unless they form a cycle. */
1283 for (i
= 0; i
< num_edges
; i
++)
1285 edge e
= INDEX_EDGE (el
, i
);
1286 if (EDGE_CRITICAL_P (e
) && !EDGE_INFO (e
)->ignore
1287 && find_group (e
->src
) != find_group (e
->dest
))
1290 fprintf (dump_file
, "Critical edge %d to %d put to tree\n",
1291 e
->src
->index
, e
->dest
->index
);
1292 EDGE_INFO (e
)->on_tree
= 1;
1293 union_groups (e
->src
, e
->dest
);
1297 /* And now the rest. */
1298 for (i
= 0; i
< num_edges
; i
++)
1300 edge e
= INDEX_EDGE (el
, i
);
1301 if (!EDGE_INFO (e
)->ignore
1302 && find_group (e
->src
) != find_group (e
->dest
))
1305 fprintf (dump_file
, "Normal edge %d to %d put to tree\n",
1306 e
->src
->index
, e
->dest
->index
);
1307 EDGE_INFO (e
)->on_tree
= 1;
1308 union_groups (e
->src
, e
->dest
);
1312 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1316 /* Perform file-level initialization for branch-prob processing. */
1319 init_branch_prob (void)
1323 total_num_blocks
= 0;
1324 total_num_edges
= 0;
1325 total_num_edges_ignored
= 0;
1326 total_num_edges_instrumented
= 0;
1327 total_num_blocks_created
= 0;
1328 total_num_passes
= 0;
1329 total_num_times_called
= 0;
1330 total_num_branches
= 0;
1331 for (i
= 0; i
< 20; i
++)
1332 total_hist_br_prob
[i
] = 0;
1335 /* Performs file-level cleanup after branch-prob processing
1339 end_branch_prob (void)
1343 fprintf (dump_file
, "\n");
1344 fprintf (dump_file
, "Total number of blocks: %d\n",
1346 fprintf (dump_file
, "Total number of edges: %d\n", total_num_edges
);
1347 fprintf (dump_file
, "Total number of ignored edges: %d\n",
1348 total_num_edges_ignored
);
1349 fprintf (dump_file
, "Total number of instrumented edges: %d\n",
1350 total_num_edges_instrumented
);
1351 fprintf (dump_file
, "Total number of blocks created: %d\n",
1352 total_num_blocks_created
);
1353 fprintf (dump_file
, "Total number of graph solution passes: %d\n",
1355 if (total_num_times_called
!= 0)
1356 fprintf (dump_file
, "Average number of graph solution passes: %d\n",
1357 (total_num_passes
+ (total_num_times_called
>> 1))
1358 / total_num_times_called
);
1359 fprintf (dump_file
, "Total number of branches: %d\n",
1360 total_num_branches
);
1361 if (total_num_branches
)
1365 for (i
= 0; i
< 10; i
++)
1366 fprintf (dump_file
, "%d%% branches in range %d-%d%%\n",
1367 (total_hist_br_prob
[i
] + total_hist_br_prob
[19-i
]) * 100
1368 / total_num_branches
, 5*i
, 5*i
+5);
1373 /* Set up hooks to enable tree-based profiling. */
1376 tree_register_profile_hooks (void)
1378 gcc_assert (current_ir_type () == IR_GIMPLE
);
1379 profile_hooks
= &tree_profile_hooks
;