1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
6 based on some ideas from Dain Samples of UC Berkeley.
7 Further mangling by Bob Manson, Cygnus Support.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 /* Generate basic block profile instrumentation and auxiliary files.
26 Profile generation is optimized, so that not all arcs in the basic
27 block graph need instrumenting. First, the BB graph is closed with
28 one entry (function start), and one exit (function exit). Any
29 ABNORMAL_EDGE cannot be instrumented (because there is no control
30 path to place the code). We close the graph by inserting fake
31 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
32 edges that do not go to the exit_block. We ignore such abnormal
33 edges. Naturally these fake edges are never directly traversed,
34 and so *cannot* be directly instrumented. Some other graph
35 massaging is done. To optimize the instrumentation we generate the
36 BB minimal span tree, only edges that are not on the span tree
37 (plus the entry point) need instrumenting. From that information
38 all other edge counts can be deduced. By construction all fake
39 edges must be on the spanning tree. We also attempt to place
40 EDGE_CRITICAL edges on the spanning tree.
42 The auxiliary files generated are <dumpbase>.gcno (at compile time)
43 and <dumpbase>.gcda (at run time). The format is
44 described in full in gcov-io.h. */
46 /* ??? Register allocation should use basic block execution counts to
47 give preference to the most commonly executed blocks. */
49 /* ??? Should calculate branch probabilities before instrumenting code, since
50 then we can use arc counts to help decide which arcs to instrument. */
54 #include "coretypes.h"
64 #include "value-prof.h"
67 #include "tree-flow.h"
70 #include "tree-pass.h"
74 /* Hooks for profiling. */
75 static struct profile_hooks
* profile_hooks
;
78 unsigned int count_valid
: 1;
80 /* Number of successor and predecessor edges. */
85 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
88 /* Counter summary from the last set of coverage counts read. */
90 const struct gcov_ctr_summary
*profile_info
;
92 /* Collect statistics on the performance of this pass for the entire source
95 static int total_num_blocks
;
96 static int total_num_edges
;
97 static int total_num_edges_ignored
;
98 static int total_num_edges_instrumented
;
99 static int total_num_blocks_created
;
100 static int total_num_passes
;
101 static int total_num_times_called
;
102 static int total_hist_br_prob
[20];
103 static int total_num_branches
;
105 /* Forward declarations. */
106 static void find_spanning_tree (struct edge_list
*);
107 static unsigned instrument_edges (struct edge_list
*);
108 static void instrument_values (histogram_values
);
109 static void compute_branch_probabilities (void);
110 static void compute_value_histograms (histogram_values
);
111 static gcov_type
* get_exec_counts (void);
112 static basic_block
find_group (basic_block
);
113 static void union_groups (basic_block
, basic_block
);
115 /* Add edge instrumentation code to the entire insn chain.
117 F is the first insn of the chain.
118 NUM_BLOCKS is the number of basic blocks found in F. */
121 instrument_edges (struct edge_list
*el
)
123 unsigned num_instr_edges
= 0;
124 int num_edges
= NUM_EDGES (el
);
127 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
132 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
134 struct edge_info
*inf
= EDGE_INFO (e
);
136 if (!inf
->ignore
&& !inf
->on_tree
)
138 gcc_assert (!(e
->flags
& EDGE_ABNORMAL
));
140 fprintf (dump_file
, "Edge %d to %d instrumented%s\n",
141 e
->src
->index
, e
->dest
->index
,
142 EDGE_CRITICAL_P (e
) ? " (and split)" : "");
143 (profile_hooks
->gen_edge_profiler
) (num_instr_edges
++, e
);
148 total_num_blocks_created
+= num_edges
;
150 fprintf (dump_file
, "%d edges instrumented\n", num_instr_edges
);
151 return num_instr_edges
;
154 /* Add code to measure histograms for values in list VALUES. */
156 instrument_values (histogram_values values
)
160 /* Emit code to generate the histograms before the insns. */
162 for (i
= 0; i
< VEC_length (histogram_value
, values
); i
++)
164 histogram_value hist
= VEC_index (histogram_value
, values
, i
);
167 case HIST_TYPE_INTERVAL
:
168 t
= GCOV_COUNTER_V_INTERVAL
;
172 t
= GCOV_COUNTER_V_POW2
;
175 case HIST_TYPE_SINGLE_VALUE
:
176 t
= GCOV_COUNTER_V_SINGLE
;
179 case HIST_TYPE_CONST_DELTA
:
180 t
= GCOV_COUNTER_V_DELTA
;
183 case HIST_TYPE_INDIR_CALL
:
184 t
= GCOV_COUNTER_V_INDIR
;
187 case HIST_TYPE_AVERAGE
:
188 t
= GCOV_COUNTER_AVERAGE
;
192 t
= GCOV_COUNTER_IOR
;
198 if (!coverage_counter_alloc (t
, hist
->n_counters
))
203 case HIST_TYPE_INTERVAL
:
204 (profile_hooks
->gen_interval_profiler
) (hist
, t
, 0);
208 (profile_hooks
->gen_pow2_profiler
) (hist
, t
, 0);
211 case HIST_TYPE_SINGLE_VALUE
:
212 (profile_hooks
->gen_one_value_profiler
) (hist
, t
, 0);
215 case HIST_TYPE_CONST_DELTA
:
216 (profile_hooks
->gen_const_delta_profiler
) (hist
, t
, 0);
219 case HIST_TYPE_INDIR_CALL
:
220 (profile_hooks
->gen_ic_profiler
) (hist
, t
, 0);
223 case HIST_TYPE_AVERAGE
:
224 (profile_hooks
->gen_average_profiler
) (hist
, t
, 0);
228 (profile_hooks
->gen_ior_profiler
) (hist
, t
, 0);
238 /* Computes hybrid profile for all matching entries in da_file. */
241 get_exec_counts (void)
243 unsigned num_edges
= 0;
247 /* Count the edges to be (possibly) instrumented. */
248 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
253 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
254 if (!EDGE_INFO (e
)->ignore
&& !EDGE_INFO (e
)->on_tree
)
258 counts
= get_coverage_counts (GCOV_COUNTER_ARCS
, num_edges
, &profile_info
);
262 if (dump_file
&& profile_info
)
263 fprintf(dump_file
, "Merged %u profiles with maximal count %u.\n",
264 profile_info
->runs
, (unsigned) profile_info
->sum_max
);
271 is_edge_inconsistent (VEC(edge
,gc
) *edges
)
275 FOR_EACH_EDGE (e
, ei
, edges
)
277 if (!EDGE_INFO (e
)->ignore
)
280 && (!(e
->flags
& EDGE_FAKE
)
281 || !block_ends_with_call_p (e
->src
)))
286 "Edge %i->%i is inconsistent, count"HOST_WIDEST_INT_PRINT_DEC
,
287 e
->src
->index
, e
->dest
->index
, e
->count
);
288 dump_bb (e
->src
, dump_file
, 0);
289 dump_bb (e
->dest
, dump_file
, 0);
299 correct_negative_edge_counts (void)
305 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
307 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
315 /* Check consistency.
316 Return true if inconsistency is found. */
318 is_inconsistent (void)
321 bool inconsistent
= false;
324 inconsistent
|= is_edge_inconsistent (bb
->preds
);
325 if (!dump_file
&& inconsistent
)
327 inconsistent
|= is_edge_inconsistent (bb
->succs
);
328 if (!dump_file
&& inconsistent
)
334 fprintf (dump_file
, "BB %i count is negative "
335 HOST_WIDEST_INT_PRINT_DEC
,
338 dump_bb (bb
, dump_file
, 0);
342 if (bb
->count
!= sum_edge_counts (bb
->preds
))
346 fprintf (dump_file
, "BB %i count does not match sum of incomming edges "
347 HOST_WIDEST_INT_PRINT_DEC
" should be " HOST_WIDEST_INT_PRINT_DEC
,
350 sum_edge_counts (bb
->preds
));
351 dump_bb (bb
, dump_file
, 0);
355 if (bb
->count
!= sum_edge_counts (bb
->succs
) &&
356 ! (find_edge (bb
, EXIT_BLOCK_PTR
) != NULL
&& block_ends_with_call_p (bb
)))
360 fprintf (dump_file
, "BB %i count does not match sum of outgoing edges "
361 HOST_WIDEST_INT_PRINT_DEC
" should be " HOST_WIDEST_INT_PRINT_DEC
,
364 sum_edge_counts (bb
->succs
));
365 dump_bb (bb
, dump_file
, 0);
369 if (!dump_file
&& inconsistent
)
376 /* Set each basic block count to the sum of its outgoing edge counts */
381 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
383 bb
->count
= sum_edge_counts (bb
->succs
);
384 gcc_assert (bb
->count
>= 0);
388 /* Reads profile data and returns total number of edge counts read */
390 read_profile_edge_counts (gcov_type
*exec_counts
)
394 int exec_counts_pos
= 0;
395 /* For each edge not on the spanning tree, set its execution count from
397 /* The first count in the .da file is the number of times that the function
398 was entered. This is the exec_count for block zero. */
400 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
405 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
406 if (!EDGE_INFO (e
)->ignore
&& !EDGE_INFO (e
)->on_tree
)
411 e
->count
= exec_counts
[exec_counts_pos
++];
412 if (e
->count
> profile_info
->sum_max
)
414 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
415 bb
->index
, e
->dest
->index
);
421 EDGE_INFO (e
)->count_valid
= 1;
422 BB_INFO (bb
)->succ_count
--;
423 BB_INFO (e
->dest
)->pred_count
--;
426 fprintf (dump_file
, "\nRead edge from %i to %i, count:",
427 bb
->index
, e
->dest
->index
);
428 fprintf (dump_file
, HOST_WIDEST_INT_PRINT_DEC
,
429 (HOST_WIDEST_INT
) e
->count
);
437 /* Compute the branch probabilities for the various branches.
438 Annotate them accordingly. */
441 compute_branch_probabilities (void)
448 int hist_br_prob
[20];
450 gcov_type
*exec_counts
= get_exec_counts ();
451 int inconsistent
= 0;
453 /* Very simple sanity checks so we catch bugs in our profiling code. */
456 if (profile_info
->run_max
* profile_info
->runs
< profile_info
->sum_max
)
458 error ("corrupted profile info: run_max * runs < sum_max");
462 if (profile_info
->sum_all
< profile_info
->sum_max
)
464 error ("corrupted profile info: sum_all is smaller than sum_max");
468 /* Attach extra info block to each bb. */
469 alloc_aux_for_blocks (sizeof (struct bb_info
));
470 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
475 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
476 if (!EDGE_INFO (e
)->ignore
)
477 BB_INFO (bb
)->succ_count
++;
478 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
479 if (!EDGE_INFO (e
)->ignore
)
480 BB_INFO (bb
)->pred_count
++;
483 /* Avoid predicting entry on exit nodes. */
484 BB_INFO (EXIT_BLOCK_PTR
)->succ_count
= 2;
485 BB_INFO (ENTRY_BLOCK_PTR
)->pred_count
= 2;
487 num_edges
= read_profile_edge_counts (exec_counts
);
490 fprintf (dump_file
, "\n%d edge counts read\n", num_edges
);
492 /* For every block in the file,
493 - if every exit/entrance edge has a known count, then set the block count
494 - if the block count is known, and every exit/entrance edge but one has
495 a known execution count, then set the count of the remaining edge
497 As edge counts are set, decrement the succ/pred count, but don't delete
498 the edge, that way we can easily tell when all edges are known, or only
499 one edge is unknown. */
501 /* The order that the basic blocks are iterated through is important.
502 Since the code that finds spanning trees starts with block 0, low numbered
503 edges are put on the spanning tree in preference to high numbered edges.
504 Hence, most instrumented edges are at the end. Graph solving works much
505 faster if we propagate numbers from the end to the start.
507 This takes an average of slightly more than 3 passes. */
515 FOR_BB_BETWEEN (bb
, EXIT_BLOCK_PTR
, NULL
, prev_bb
)
517 struct bb_info
*bi
= BB_INFO (bb
);
518 if (! bi
->count_valid
)
520 if (bi
->succ_count
== 0)
526 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
532 else if (bi
->pred_count
== 0)
538 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
547 if (bi
->succ_count
== 1)
553 /* One of the counts will be invalid, but it is zero,
554 so adding it in also doesn't hurt. */
555 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
558 /* Search for the invalid edge, and set its count. */
559 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
560 if (! EDGE_INFO (e
)->count_valid
&& ! EDGE_INFO (e
)->ignore
)
563 /* Calculate count for remaining edge by conservation. */
564 total
= bb
->count
- total
;
567 EDGE_INFO (e
)->count_valid
= 1;
571 BB_INFO (e
->dest
)->pred_count
--;
574 if (bi
->pred_count
== 1)
580 /* One of the counts will be invalid, but it is zero,
581 so adding it in also doesn't hurt. */
582 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
585 /* Search for the invalid edge, and set its count. */
586 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
587 if (!EDGE_INFO (e
)->count_valid
&& !EDGE_INFO (e
)->ignore
)
590 /* Calculate count for remaining edge by conservation. */
591 total
= bb
->count
- total
+ e
->count
;
594 EDGE_INFO (e
)->count_valid
= 1;
598 BB_INFO (e
->src
)->succ_count
--;
605 dump_flow_info (dump_file
, dump_flags
);
607 total_num_passes
+= passes
;
609 fprintf (dump_file
, "Graph solving took %d passes.\n\n", passes
);
611 /* If the graph has been correctly solved, every block will have a
612 succ and pred count of zero. */
615 gcc_assert (!BB_INFO (bb
)->succ_count
&& !BB_INFO (bb
)->pred_count
);
618 /* Check for inconsistent basic block counts */
619 inconsistent
= is_inconsistent ();
623 if (flag_profile_correction
)
625 /* Inconsistency detected. Make it flow-consistent. */
626 static int informed
= 0;
630 inform (input_location
, "correcting inconsistent profile data");
632 correct_negative_edge_counts ();
633 /* Set bb counts to the sum of the outgoing edge counts */
636 fprintf (dump_file
, "\nCalling mcf_smooth_cfg\n");
640 error ("corrupted profile info: profile data is not flow-consistent");
643 /* For every edge, calculate its branch probability and add a reg_note
644 to the branch insn to indicate this. */
646 for (i
= 0; i
< 20; i
++)
650 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
657 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
658 bb
->index
, (int)bb
->count
);
661 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
663 /* Function may return twice in the cased the called function is
664 setjmp or calls fork, but we can't represent this by extra
665 edge from the entry, since extra edge from the exit is
666 already present. We get negative frequency from the entry
669 && e
->dest
== EXIT_BLOCK_PTR
)
670 || (e
->count
> bb
->count
671 && e
->dest
!= EXIT_BLOCK_PTR
))
673 if (block_ends_with_call_p (bb
))
674 e
->count
= e
->count
< 0 ? 0 : bb
->count
;
676 if (e
->count
< 0 || e
->count
> bb
->count
)
678 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
679 e
->src
->index
, e
->dest
->index
,
681 e
->count
= bb
->count
/ 2;
686 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
687 e
->probability
= (e
->count
* REG_BR_PROB_BASE
+ bb
->count
/ 2) / bb
->count
;
688 if (bb
->index
>= NUM_FIXED_BLOCKS
689 && block_ends_with_condjump_p (bb
)
690 && EDGE_COUNT (bb
->succs
) >= 2)
696 /* Find the branch edge. It is possible that we do have fake
698 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
699 if (!(e
->flags
& (EDGE_FAKE
| EDGE_FALLTHRU
)))
702 prob
= e
->probability
;
703 index
= prob
* 20 / REG_BR_PROB_BASE
;
707 hist_br_prob
[index
]++;
712 /* As a last resort, distribute the probabilities evenly.
713 Use simple heuristics that if there are normal edges,
714 give all abnormals frequency of 0, otherwise distribute the
715 frequency over abnormals (this is the case of noreturn
717 else if (profile_status
== PROFILE_ABSENT
)
721 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
722 if (!(e
->flags
& (EDGE_COMPLEX
| EDGE_FAKE
)))
726 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
727 if (!(e
->flags
& (EDGE_COMPLEX
| EDGE_FAKE
)))
728 e
->probability
= REG_BR_PROB_BASE
/ total
;
734 total
+= EDGE_COUNT (bb
->succs
);
735 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
736 e
->probability
= REG_BR_PROB_BASE
/ total
;
738 if (bb
->index
>= NUM_FIXED_BLOCKS
739 && block_ends_with_condjump_p (bb
)
740 && EDGE_COUNT (bb
->succs
) >= 2)
745 profile_status
= PROFILE_READ
;
749 fprintf (dump_file
, "%d branches\n", num_branches
);
751 for (i
= 0; i
< 10; i
++)
752 fprintf (dump_file
, "%d%% branches in range %d-%d%%\n",
753 (hist_br_prob
[i
] + hist_br_prob
[19-i
]) * 100 / num_branches
,
756 total_num_branches
+= num_branches
;
757 for (i
= 0; i
< 20; i
++)
758 total_hist_br_prob
[i
] += hist_br_prob
[i
];
760 fputc ('\n', dump_file
);
761 fputc ('\n', dump_file
);
764 free_aux_for_blocks ();
767 /* Load value histograms values whose description is stored in VALUES array
771 compute_value_histograms (histogram_values values
)
773 unsigned i
, j
, t
, any
;
774 unsigned n_histogram_counters
[GCOV_N_VALUE_COUNTERS
];
775 gcov_type
*histogram_counts
[GCOV_N_VALUE_COUNTERS
];
776 gcov_type
*act_count
[GCOV_N_VALUE_COUNTERS
];
777 gcov_type
*aact_count
;
779 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
780 n_histogram_counters
[t
] = 0;
782 for (i
= 0; i
< VEC_length (histogram_value
, values
); i
++)
784 histogram_value hist
= VEC_index (histogram_value
, values
, i
);
785 n_histogram_counters
[(int) hist
->type
] += hist
->n_counters
;
789 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
791 if (!n_histogram_counters
[t
])
793 histogram_counts
[t
] = NULL
;
797 histogram_counts
[t
] =
798 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t
),
799 n_histogram_counters
[t
], NULL
);
800 if (histogram_counts
[t
])
802 act_count
[t
] = histogram_counts
[t
];
807 for (i
= 0; i
< VEC_length (histogram_value
, values
); i
++)
809 histogram_value hist
= VEC_index (histogram_value
, values
, i
);
810 gimple stmt
= hist
->hvalue
.stmt
;
812 t
= (int) hist
->type
;
814 aact_count
= act_count
[t
];
815 act_count
[t
] += hist
->n_counters
;
817 gimple_add_histogram_value (cfun
, stmt
, hist
);
818 hist
->hvalue
.counters
= XNEWVEC (gcov_type
, hist
->n_counters
);
819 for (j
= 0; j
< hist
->n_counters
; j
++)
820 hist
->hvalue
.counters
[j
] = aact_count
[j
];
823 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
824 if (histogram_counts
[t
])
825 free (histogram_counts
[t
]);
828 /* The entry basic block will be moved around so that it has index=1,
829 there is nothing at index 0 and the exit is at n_basic_block. */
830 #define BB_TO_GCOV_INDEX(bb) ((bb)->index - 1)
831 /* When passed NULL as file_name, initialize.
832 When passed something else, output the necessary commands to change
833 line to LINE and offset to FILE_NAME. */
835 output_location (char const *file_name
, int line
,
836 gcov_position_t
*offset
, basic_block bb
)
838 static char const *prev_file_name
;
839 static int prev_line
;
840 bool name_differs
, line_differs
;
844 prev_file_name
= NULL
;
849 name_differs
= !prev_file_name
|| strcmp (file_name
, prev_file_name
);
850 line_differs
= prev_line
!= line
;
852 if (name_differs
|| line_differs
)
856 *offset
= gcov_write_tag (GCOV_TAG_LINES
);
857 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb
));
858 name_differs
= line_differs
=true;
861 /* If this is a new source file, then output the
862 file's name to the .bb file. */
865 prev_file_name
= file_name
;
866 gcov_write_unsigned (0);
867 gcov_write_string (prev_file_name
);
871 gcov_write_unsigned (line
);
877 /* Instrument and/or analyze program behavior based on program flow graph.
878 In either case, this function builds a flow graph for the function being
879 compiled. The flow graph is stored in BB_GRAPH.
881 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
882 the flow graph that are needed to reconstruct the dynamic behavior of the
885 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
886 information from a data file containing edge count information from previous
887 executions of the function being compiled. In this case, the flow graph is
888 annotated with actual execution counts, which are later propagated into the
889 rtl for optimization purposes.
891 Main entry point of this file. */
898 unsigned num_edges
, ignored_edges
;
899 unsigned num_instrumented
;
900 struct edge_list
*el
;
901 histogram_values values
= NULL
;
903 total_num_times_called
++;
905 flow_call_edges_add (NULL
);
906 add_noreturn_fake_exit_edges ();
908 /* We can't handle cyclic regions constructed using abnormal edges.
909 To avoid these we replace every source of abnormal edge by a fake
910 edge from entry node and every destination by fake edge to exit.
911 This keeps graph acyclic and our calculation exact for all normal
912 edges except for exit and entrance ones.
914 We also add fake exit edges for each call and asm statement in the
915 basic, since it may not return. */
919 int need_exit_edge
= 0, need_entry_edge
= 0;
920 int have_exit_edge
= 0, have_entry_edge
= 0;
924 /* Functions returning multiple times are not handled by extra edges.
925 Instead we simply allow negative counts on edges from exit to the
926 block past call and corresponding probabilities. We can't go
927 with the extra edges because that would result in flowgraph that
928 needs to have fake edges outside the spanning tree. */
930 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
932 gimple_stmt_iterator gsi
;
935 /* It may happen that there are compiler generated statements
936 without a locus at all. Go through the basic block from the
937 last to the first statement looking for a locus. */
938 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
940 last
= gsi_stmt (gsi
);
941 if (gimple_has_location (last
))
945 /* Edge with goto locus might get wrong coverage info unless
946 it is the only edge out of BB.
947 Don't do that when the locuses match, so
948 if (blah) goto something;
949 is not computed twice. */
951 && gimple_has_location (last
)
952 && e
->goto_locus
!= UNKNOWN_LOCATION
953 && !single_succ_p (bb
)
954 && (LOCATION_FILE (e
->goto_locus
)
955 != LOCATION_FILE (gimple_location (last
))
956 || (LOCATION_LINE (e
->goto_locus
)
957 != LOCATION_LINE (gimple_location (last
)))))
959 basic_block new_bb
= split_edge (e
);
960 edge ne
= single_succ_edge (new_bb
);
961 ne
->goto_locus
= e
->goto_locus
;
962 ne
->goto_block
= e
->goto_block
;
964 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
965 && e
->dest
!= EXIT_BLOCK_PTR
)
967 if (e
->dest
== EXIT_BLOCK_PTR
)
970 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
972 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
973 && e
->src
!= ENTRY_BLOCK_PTR
)
975 if (e
->src
== ENTRY_BLOCK_PTR
)
979 if (need_exit_edge
&& !have_exit_edge
)
982 fprintf (dump_file
, "Adding fake exit edge to bb %i\n",
984 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
986 if (need_entry_edge
&& !have_entry_edge
)
989 fprintf (dump_file
, "Adding fake entry edge to bb %i\n",
991 make_edge (ENTRY_BLOCK_PTR
, bb
, EDGE_FAKE
);
995 el
= create_edge_list ();
996 num_edges
= NUM_EDGES (el
);
997 alloc_aux_for_edges (sizeof (struct edge_info
));
999 /* The basic blocks are expected to be numbered sequentially. */
1003 for (i
= 0 ; i
< num_edges
; i
++)
1005 edge e
= INDEX_EDGE (el
, i
);
1008 /* Mark edges we've replaced by fake edges above as ignored. */
1009 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
1010 && e
->src
!= ENTRY_BLOCK_PTR
&& e
->dest
!= EXIT_BLOCK_PTR
)
1012 EDGE_INFO (e
)->ignore
= 1;
1017 /* Create spanning tree from basic block graph, mark each edge that is
1018 on the spanning tree. We insert as many abnormal and critical edges
1019 as possible to minimize number of edge splits necessary. */
1021 find_spanning_tree (el
);
1023 /* Fake edges that are not on the tree will not be instrumented, so
1024 mark them ignored. */
1025 for (num_instrumented
= i
= 0; i
< num_edges
; i
++)
1027 edge e
= INDEX_EDGE (el
, i
);
1028 struct edge_info
*inf
= EDGE_INFO (e
);
1030 if (inf
->ignore
|| inf
->on_tree
)
1032 else if (e
->flags
& EDGE_FAKE
)
1041 total_num_blocks
+= n_basic_blocks
;
1043 fprintf (dump_file
, "%d basic blocks\n", n_basic_blocks
);
1045 total_num_edges
+= num_edges
;
1047 fprintf (dump_file
, "%d edges\n", num_edges
);
1049 total_num_edges_ignored
+= ignored_edges
;
1051 fprintf (dump_file
, "%d ignored edges\n", ignored_edges
);
1053 /* Write the data from which gcov can reconstruct the basic block
1056 /* Basic block flags */
1057 if (coverage_begin_output ())
1059 gcov_position_t offset
;
1061 offset
= gcov_write_tag (GCOV_TAG_BLOCKS
);
1062 for (i
= 0; i
!= (unsigned) (n_basic_blocks
); i
++)
1063 gcov_write_unsigned (0);
1064 gcov_write_length (offset
);
1067 /* Keep all basic block indexes nonnegative in the gcov output.
1068 Index 0 is used for entry block, last index is for exit block.
1070 ENTRY_BLOCK_PTR
->index
= 1;
1071 EXIT_BLOCK_PTR
->index
= last_basic_block
;
1074 if (coverage_begin_output ())
1076 gcov_position_t offset
;
1078 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, next_bb
)
1083 offset
= gcov_write_tag (GCOV_TAG_ARCS
);
1084 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb
));
1086 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1088 struct edge_info
*i
= EDGE_INFO (e
);
1091 unsigned flag_bits
= 0;
1094 flag_bits
|= GCOV_ARC_ON_TREE
;
1095 if (e
->flags
& EDGE_FAKE
)
1096 flag_bits
|= GCOV_ARC_FAKE
;
1097 if (e
->flags
& EDGE_FALLTHRU
)
1098 flag_bits
|= GCOV_ARC_FALLTHROUGH
;
1099 /* On trees we don't have fallthru flags, but we can
1100 recompute them from CFG shape. */
1101 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)
1102 && e
->src
->next_bb
== e
->dest
)
1103 flag_bits
|= GCOV_ARC_FALLTHROUGH
;
1105 gcov_write_unsigned (BB_TO_GCOV_INDEX (e
->dest
));
1106 gcov_write_unsigned (flag_bits
);
1110 gcov_write_length (offset
);
1115 if (coverage_begin_output ())
1117 gcov_position_t offset
;
1119 /* Initialize the output. */
1120 output_location (NULL
, 0, NULL
, NULL
);
1124 gimple_stmt_iterator gsi
;
1128 if (bb
== ENTRY_BLOCK_PTR
->next_bb
)
1130 expanded_location curr_location
=
1131 expand_location (DECL_SOURCE_LOCATION (current_function_decl
));
1132 output_location (curr_location
.file
, curr_location
.line
,
1136 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1138 gimple stmt
= gsi_stmt (gsi
);
1139 if (gimple_has_location (stmt
))
1140 output_location (gimple_filename (stmt
), gimple_lineno (stmt
),
1144 /* Notice GOTO expressions we eliminated while constructing the
1146 if (single_succ_p (bb
)
1147 && single_succ_edge (bb
)->goto_locus
!= UNKNOWN_LOCATION
)
1149 location_t curr_location
= single_succ_edge (bb
)->goto_locus
;
1150 /* ??? The FILE/LINE API is inconsistent for these cases. */
1151 output_location (LOCATION_FILE (curr_location
),
1152 LOCATION_LINE (curr_location
), &offset
, bb
);
1157 /* A file of NULL indicates the end of run. */
1158 gcov_write_unsigned (0);
1159 gcov_write_string (NULL
);
1160 gcov_write_length (offset
);
1165 ENTRY_BLOCK_PTR
->index
= ENTRY_BLOCK
;
1166 EXIT_BLOCK_PTR
->index
= EXIT_BLOCK
;
1167 #undef BB_TO_GCOV_INDEX
1169 if (flag_profile_values
)
1170 find_values_to_profile (&values
);
1172 if (flag_branch_probabilities
)
1174 compute_branch_probabilities ();
1175 if (flag_profile_values
)
1176 compute_value_histograms (values
);
1179 remove_fake_edges ();
1181 /* For each edge not on the spanning tree, add counting code. */
1182 if (profile_arc_flag
1183 && coverage_counter_alloc (GCOV_COUNTER_ARCS
, num_instrumented
))
1185 unsigned n_instrumented
;
1187 profile_hooks
->init_edge_profiler ();
1189 n_instrumented
= instrument_edges (el
);
1191 gcc_assert (n_instrumented
== num_instrumented
);
1193 if (flag_profile_values
)
1194 instrument_values (values
);
1196 /* Commit changes done by instrumentation. */
1197 gsi_commit_edge_inserts ();
1200 free_aux_for_edges ();
1202 VEC_free (histogram_value
, heap
, values
);
1203 free_edge_list (el
);
1204 coverage_end_function ();
1207 /* Union find algorithm implementation for the basic blocks using
1211 find_group (basic_block bb
)
1213 basic_block group
= bb
, bb1
;
1215 while ((basic_block
) group
->aux
!= group
)
1216 group
= (basic_block
) group
->aux
;
1218 /* Compress path. */
1219 while ((basic_block
) bb
->aux
!= group
)
1221 bb1
= (basic_block
) bb
->aux
;
1222 bb
->aux
= (void *) group
;
1229 union_groups (basic_block bb1
, basic_block bb2
)
1231 basic_block bb1g
= find_group (bb1
);
1232 basic_block bb2g
= find_group (bb2
);
1234 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1235 this code is unlikely going to be performance problem anyway. */
1236 gcc_assert (bb1g
!= bb2g
);
1241 /* This function searches all of the edges in the program flow graph, and puts
1242 as many bad edges as possible onto the spanning tree. Bad edges include
1243 abnormals edges, which can't be instrumented at the moment. Since it is
1244 possible for fake edges to form a cycle, we will have to develop some
1245 better way in the future. Also put critical edges to the tree, since they
1246 are more expensive to instrument. */
1249 find_spanning_tree (struct edge_list
*el
)
1252 int num_edges
= NUM_EDGES (el
);
1255 /* We use aux field for standard union-find algorithm. */
1256 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1259 /* Add fake edge exit to entry we can't instrument. */
1260 union_groups (EXIT_BLOCK_PTR
, ENTRY_BLOCK_PTR
);
1262 /* First add all abnormal edges to the tree unless they form a cycle. Also
1263 add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
1264 setting return value from function. */
1265 for (i
= 0; i
< num_edges
; i
++)
1267 edge e
= INDEX_EDGE (el
, i
);
1268 if (((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
| EDGE_FAKE
))
1269 || e
->dest
== EXIT_BLOCK_PTR
)
1270 && !EDGE_INFO (e
)->ignore
1271 && (find_group (e
->src
) != find_group (e
->dest
)))
1274 fprintf (dump_file
, "Abnormal edge %d to %d put to tree\n",
1275 e
->src
->index
, e
->dest
->index
);
1276 EDGE_INFO (e
)->on_tree
= 1;
1277 union_groups (e
->src
, e
->dest
);
1281 /* Now insert all critical edges to the tree unless they form a cycle. */
1282 for (i
= 0; i
< num_edges
; i
++)
1284 edge e
= INDEX_EDGE (el
, i
);
1285 if (EDGE_CRITICAL_P (e
) && !EDGE_INFO (e
)->ignore
1286 && find_group (e
->src
) != find_group (e
->dest
))
1289 fprintf (dump_file
, "Critical edge %d to %d put to tree\n",
1290 e
->src
->index
, e
->dest
->index
);
1291 EDGE_INFO (e
)->on_tree
= 1;
1292 union_groups (e
->src
, e
->dest
);
1296 /* And now the rest. */
1297 for (i
= 0; i
< num_edges
; i
++)
1299 edge e
= INDEX_EDGE (el
, i
);
1300 if (!EDGE_INFO (e
)->ignore
1301 && find_group (e
->src
) != find_group (e
->dest
))
1304 fprintf (dump_file
, "Normal edge %d to %d put to tree\n",
1305 e
->src
->index
, e
->dest
->index
);
1306 EDGE_INFO (e
)->on_tree
= 1;
1307 union_groups (e
->src
, e
->dest
);
1311 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1315 /* Perform file-level initialization for branch-prob processing. */
1318 init_branch_prob (void)
1322 total_num_blocks
= 0;
1323 total_num_edges
= 0;
1324 total_num_edges_ignored
= 0;
1325 total_num_edges_instrumented
= 0;
1326 total_num_blocks_created
= 0;
1327 total_num_passes
= 0;
1328 total_num_times_called
= 0;
1329 total_num_branches
= 0;
1330 for (i
= 0; i
< 20; i
++)
1331 total_hist_br_prob
[i
] = 0;
1334 /* Performs file-level cleanup after branch-prob processing
1338 end_branch_prob (void)
1342 fprintf (dump_file
, "\n");
1343 fprintf (dump_file
, "Total number of blocks: %d\n",
1345 fprintf (dump_file
, "Total number of edges: %d\n", total_num_edges
);
1346 fprintf (dump_file
, "Total number of ignored edges: %d\n",
1347 total_num_edges_ignored
);
1348 fprintf (dump_file
, "Total number of instrumented edges: %d\n",
1349 total_num_edges_instrumented
);
1350 fprintf (dump_file
, "Total number of blocks created: %d\n",
1351 total_num_blocks_created
);
1352 fprintf (dump_file
, "Total number of graph solution passes: %d\n",
1354 if (total_num_times_called
!= 0)
1355 fprintf (dump_file
, "Average number of graph solution passes: %d\n",
1356 (total_num_passes
+ (total_num_times_called
>> 1))
1357 / total_num_times_called
);
1358 fprintf (dump_file
, "Total number of branches: %d\n",
1359 total_num_branches
);
1360 if (total_num_branches
)
1364 for (i
= 0; i
< 10; i
++)
1365 fprintf (dump_file
, "%d%% branches in range %d-%d%%\n",
1366 (total_hist_br_prob
[i
] + total_hist_br_prob
[19-i
]) * 100
1367 / total_num_branches
, 5*i
, 5*i
+5);
1372 /* Set up hooks to enable tree-based profiling. */
1375 tree_register_profile_hooks (void)
1377 gcc_assert (current_ir_type () == IR_GIMPLE
);
1378 profile_hooks
= &tree_profile_hooks
;