1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008
4 Free Software Foundation, Inc.
5 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
6 based on some ideas from Dain Samples of UC Berkeley.
7 Further mangling by Bob Manson, Cygnus Support.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 /* Generate basic block profile instrumentation and auxiliary files.
26 Profile generation is optimized, so that not all arcs in the basic
27 block graph need instrumenting. First, the BB graph is closed with
28 one entry (function start), and one exit (function exit). Any
29 ABNORMAL_EDGE cannot be instrumented (because there is no control
30 path to place the code). We close the graph by inserting fake
31 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
32 edges that do not go to the exit_block. We ignore such abnormal
33 edges. Naturally these fake edges are never directly traversed,
34 and so *cannot* be directly instrumented. Some other graph
35 massaging is done. To optimize the instrumentation we generate the
36 BB minimal span tree, only edges that are not on the span tree
37 (plus the entry point) need instrumenting. From that information
38 all other edge counts can be deduced. By construction all fake
39 edges must be on the spanning tree. We also attempt to place
40 EDGE_CRITICAL edges on the spanning tree.
42 The auxiliary files generated are <dumpbase>.gcno (at compile time)
43 and <dumpbase>.gcda (at run time). The format is
44 described in full in gcov-io.h. */
46 /* ??? Register allocation should use basic block execution counts to
47 give preference to the most commonly executed blocks. */
49 /* ??? Should calculate branch probabilities before instrumenting code, since
50 then we can use arc counts to help decide which arcs to instrument. */
54 #include "coretypes.h"
64 #include "value-prof.h"
67 #include "tree-flow.h"
70 #include "tree-pass.h"
74 /* Hooks for profiling. */
75 static struct profile_hooks
* profile_hooks
;
78 unsigned int count_valid
: 1;
80 /* Number of successor and predecessor edges. */
85 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
88 /* Counter summary from the last set of coverage counts read. */
90 const struct gcov_ctr_summary
*profile_info
;
92 /* Collect statistics on the performance of this pass for the entire source
95 static int total_num_blocks
;
96 static int total_num_edges
;
97 static int total_num_edges_ignored
;
98 static int total_num_edges_instrumented
;
99 static int total_num_blocks_created
;
100 static int total_num_passes
;
101 static int total_num_times_called
;
102 static int total_hist_br_prob
[20];
103 static int total_num_never_executed
;
104 static int total_num_branches
;
106 /* Forward declarations. */
107 static void find_spanning_tree (struct edge_list
*);
108 static unsigned instrument_edges (struct edge_list
*);
109 static void instrument_values (histogram_values
);
110 static void compute_branch_probabilities (void);
111 static void compute_value_histograms (histogram_values
);
112 static gcov_type
* get_exec_counts (void);
113 static basic_block
find_group (basic_block
);
114 static void union_groups (basic_block
, basic_block
);
116 /* Add edge instrumentation code to the entire insn chain.
118 F is the first insn of the chain.
119 NUM_BLOCKS is the number of basic blocks found in F. */
122 instrument_edges (struct edge_list
*el
)
124 unsigned num_instr_edges
= 0;
125 int num_edges
= NUM_EDGES (el
);
128 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
133 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
135 struct edge_info
*inf
= EDGE_INFO (e
);
137 if (!inf
->ignore
&& !inf
->on_tree
)
139 gcc_assert (!(e
->flags
& EDGE_ABNORMAL
));
141 fprintf (dump_file
, "Edge %d to %d instrumented%s\n",
142 e
->src
->index
, e
->dest
->index
,
143 EDGE_CRITICAL_P (e
) ? " (and split)" : "");
144 (profile_hooks
->gen_edge_profiler
) (num_instr_edges
++, e
);
149 total_num_blocks_created
+= num_edges
;
151 fprintf (dump_file
, "%d edges instrumented\n", num_instr_edges
);
152 return num_instr_edges
;
155 /* Add code to measure histograms for values in list VALUES. */
157 instrument_values (histogram_values values
)
161 /* Emit code to generate the histograms before the insns. */
163 for (i
= 0; i
< VEC_length (histogram_value
, values
); i
++)
165 histogram_value hist
= VEC_index (histogram_value
, values
, i
);
168 case HIST_TYPE_INTERVAL
:
169 t
= GCOV_COUNTER_V_INTERVAL
;
173 t
= GCOV_COUNTER_V_POW2
;
176 case HIST_TYPE_SINGLE_VALUE
:
177 t
= GCOV_COUNTER_V_SINGLE
;
180 case HIST_TYPE_CONST_DELTA
:
181 t
= GCOV_COUNTER_V_DELTA
;
184 case HIST_TYPE_INDIR_CALL
:
185 t
= GCOV_COUNTER_V_INDIR
;
188 case HIST_TYPE_AVERAGE
:
189 t
= GCOV_COUNTER_AVERAGE
;
193 t
= GCOV_COUNTER_IOR
;
199 if (!coverage_counter_alloc (t
, hist
->n_counters
))
204 case HIST_TYPE_INTERVAL
:
205 (profile_hooks
->gen_interval_profiler
) (hist
, t
, 0);
209 (profile_hooks
->gen_pow2_profiler
) (hist
, t
, 0);
212 case HIST_TYPE_SINGLE_VALUE
:
213 (profile_hooks
->gen_one_value_profiler
) (hist
, t
, 0);
216 case HIST_TYPE_CONST_DELTA
:
217 (profile_hooks
->gen_const_delta_profiler
) (hist
, t
, 0);
220 case HIST_TYPE_INDIR_CALL
:
221 (profile_hooks
->gen_ic_profiler
) (hist
, t
, 0);
224 case HIST_TYPE_AVERAGE
:
225 (profile_hooks
->gen_average_profiler
) (hist
, t
, 0);
229 (profile_hooks
->gen_ior_profiler
) (hist
, t
, 0);
239 /* Computes hybrid profile for all matching entries in da_file. */
242 get_exec_counts (void)
244 unsigned num_edges
= 0;
248 /* Count the edges to be (possibly) instrumented. */
249 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
254 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
255 if (!EDGE_INFO (e
)->ignore
&& !EDGE_INFO (e
)->on_tree
)
259 counts
= get_coverage_counts (GCOV_COUNTER_ARCS
, num_edges
, &profile_info
);
263 if (dump_file
&& profile_info
)
264 fprintf(dump_file
, "Merged %u profiles with maximal count %u.\n",
265 profile_info
->runs
, (unsigned) profile_info
->sum_max
);
272 is_edge_inconsistent (VEC(edge
,gc
) *edges
)
276 FOR_EACH_EDGE (e
, ei
, edges
)
278 if (!EDGE_INFO (e
)->ignore
)
288 correct_negative_edge_counts (void)
294 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
296 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
304 /* Check consistency.
305 Return true if inconsistency is found. */
307 is_inconsistent (void)
312 if (is_edge_inconsistent (bb
->preds
))
314 if (is_edge_inconsistent (bb
->succs
))
316 if ( bb
->count
!= sum_edge_counts (bb
->preds
)
317 || (bb
->count
!= sum_edge_counts (bb
->succs
) &&
318 !(find_edge (bb
, EXIT_BLOCK_PTR
) != NULL
&&
319 block_ends_with_call_p (bb
))))
326 /* Set each basic block count to the sum of its outgoing edge counts */
331 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
333 bb
->count
= sum_edge_counts (bb
->succs
);
334 gcc_assert (bb
->count
>= 0);
338 /* Reads profile data and returns total number of edge counts read */
340 read_profile_edge_counts (gcov_type
*exec_counts
)
344 int exec_counts_pos
= 0;
345 /* For each edge not on the spanning tree, set its execution count from
347 /* The first count in the .da file is the number of times that the function
348 was entered. This is the exec_count for block zero. */
350 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
355 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
356 if (!EDGE_INFO (e
)->ignore
&& !EDGE_INFO (e
)->on_tree
)
361 e
->count
= exec_counts
[exec_counts_pos
++];
362 if (e
->count
> profile_info
->sum_max
)
364 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
365 bb
->index
, e
->dest
->index
);
371 EDGE_INFO (e
)->count_valid
= 1;
372 BB_INFO (bb
)->succ_count
--;
373 BB_INFO (e
->dest
)->pred_count
--;
376 fprintf (dump_file
, "\nRead edge from %i to %i, count:",
377 bb
->index
, e
->dest
->index
);
378 fprintf (dump_file
, HOST_WIDEST_INT_PRINT_DEC
,
379 (HOST_WIDEST_INT
) e
->count
);
387 /* Compute the branch probabilities for the various branches.
388 Annotate them accordingly. */
391 compute_branch_probabilities (void)
398 int hist_br_prob
[20];
399 int num_never_executed
;
401 gcov_type
*exec_counts
= get_exec_counts ();
402 int inconsistent
= 0;
404 /* Very simple sanity checks so we catch bugs in our profiling code. */
407 if (profile_info
->run_max
* profile_info
->runs
< profile_info
->sum_max
)
409 error ("corrupted profile info: run_max * runs < sum_max");
413 if (profile_info
->sum_all
< profile_info
->sum_max
)
415 error ("corrupted profile info: sum_all is smaller than sum_max");
420 /* Attach extra info block to each bb. */
421 alloc_aux_for_blocks (sizeof (struct bb_info
));
422 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
427 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
428 if (!EDGE_INFO (e
)->ignore
)
429 BB_INFO (bb
)->succ_count
++;
430 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
431 if (!EDGE_INFO (e
)->ignore
)
432 BB_INFO (bb
)->pred_count
++;
435 /* Avoid predicting entry on exit nodes. */
436 BB_INFO (EXIT_BLOCK_PTR
)->succ_count
= 2;
437 BB_INFO (ENTRY_BLOCK_PTR
)->pred_count
= 2;
439 num_edges
= read_profile_edge_counts (exec_counts
);
442 fprintf (dump_file
, "\n%d edge counts read\n", num_edges
);
444 /* For every block in the file,
445 - if every exit/entrance edge has a known count, then set the block count
446 - if the block count is known, and every exit/entrance edge but one has
447 a known execution count, then set the count of the remaining edge
449 As edge counts are set, decrement the succ/pred count, but don't delete
450 the edge, that way we can easily tell when all edges are known, or only
451 one edge is unknown. */
453 /* The order that the basic blocks are iterated through is important.
454 Since the code that finds spanning trees starts with block 0, low numbered
455 edges are put on the spanning tree in preference to high numbered edges.
456 Hence, most instrumented edges are at the end. Graph solving works much
457 faster if we propagate numbers from the end to the start.
459 This takes an average of slightly more than 3 passes. */
467 FOR_BB_BETWEEN (bb
, EXIT_BLOCK_PTR
, NULL
, prev_bb
)
469 struct bb_info
*bi
= BB_INFO (bb
);
470 if (! bi
->count_valid
)
472 if (bi
->succ_count
== 0)
478 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
484 else if (bi
->pred_count
== 0)
490 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
499 if (bi
->succ_count
== 1)
505 /* One of the counts will be invalid, but it is zero,
506 so adding it in also doesn't hurt. */
507 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
510 /* Search for the invalid edge, and set its count. */
511 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
512 if (! EDGE_INFO (e
)->count_valid
&& ! EDGE_INFO (e
)->ignore
)
515 /* Calculate count for remaining edge by conservation. */
516 total
= bb
->count
- total
;
519 EDGE_INFO (e
)->count_valid
= 1;
523 BB_INFO (e
->dest
)->pred_count
--;
526 if (bi
->pred_count
== 1)
532 /* One of the counts will be invalid, but it is zero,
533 so adding it in also doesn't hurt. */
534 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
537 /* Search for the invalid edge, and set its count. */
538 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
539 if (!EDGE_INFO (e
)->count_valid
&& !EDGE_INFO (e
)->ignore
)
542 /* Calculate count for remaining edge by conservation. */
543 total
= bb
->count
- total
+ e
->count
;
546 EDGE_INFO (e
)->count_valid
= 1;
550 BB_INFO (e
->src
)->succ_count
--;
557 dump_flow_info (dump_file
, dump_flags
);
559 total_num_passes
+= passes
;
561 fprintf (dump_file
, "Graph solving took %d passes.\n\n", passes
);
563 /* If the graph has been correctly solved, every block will have a
564 succ and pred count of zero. */
567 gcc_assert (!BB_INFO (bb
)->succ_count
&& !BB_INFO (bb
)->pred_count
);
570 /* Check for inconsistent basic block counts */
571 inconsistent
= is_inconsistent ();
575 if (flag_profile_correction
)
577 /* Inconsistency detected. Make it flow-consistent. */
578 static int informed
= 0;
582 inform (input_location
, "correcting inconsistent profile data");
584 correct_negative_edge_counts ();
585 /* Set bb counts to the sum of the outgoing edge counts */
588 fprintf (dump_file
, "\nCalling mcf_smooth_cfg\n");
592 error ("corrupted profile info: profile data is not flow-consistent");
595 /* For every edge, calculate its branch probability and add a reg_note
596 to the branch insn to indicate this. */
598 for (i
= 0; i
< 20; i
++)
600 num_never_executed
= 0;
603 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
610 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
611 bb
->index
, (int)bb
->count
);
614 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
616 /* Function may return twice in the cased the called function is
617 setjmp or calls fork, but we can't represent this by extra
618 edge from the entry, since extra edge from the exit is
619 already present. We get negative frequency from the entry
622 && e
->dest
== EXIT_BLOCK_PTR
)
623 || (e
->count
> bb
->count
624 && e
->dest
!= EXIT_BLOCK_PTR
))
626 if (block_ends_with_call_p (bb
))
627 e
->count
= e
->count
< 0 ? 0 : bb
->count
;
629 if (e
->count
< 0 || e
->count
> bb
->count
)
631 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
632 e
->src
->index
, e
->dest
->index
,
634 e
->count
= bb
->count
/ 2;
639 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
640 e
->probability
= (e
->count
* REG_BR_PROB_BASE
+ bb
->count
/ 2) / bb
->count
;
641 if (bb
->index
>= NUM_FIXED_BLOCKS
642 && block_ends_with_condjump_p (bb
)
643 && EDGE_COUNT (bb
->succs
) >= 2)
649 /* Find the branch edge. It is possible that we do have fake
651 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
652 if (!(e
->flags
& (EDGE_FAKE
| EDGE_FALLTHRU
)))
655 prob
= e
->probability
;
656 index
= prob
* 20 / REG_BR_PROB_BASE
;
660 hist_br_prob
[index
]++;
665 /* As a last resort, distribute the probabilities evenly.
666 Use simple heuristics that if there are normal edges,
667 give all abnormals frequency of 0, otherwise distribute the
668 frequency over abnormals (this is the case of noreturn
670 else if (profile_status
== PROFILE_ABSENT
)
674 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
675 if (!(e
->flags
& (EDGE_COMPLEX
| EDGE_FAKE
)))
679 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
680 if (!(e
->flags
& (EDGE_COMPLEX
| EDGE_FAKE
)))
681 e
->probability
= REG_BR_PROB_BASE
/ total
;
687 total
+= EDGE_COUNT (bb
->succs
);
688 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
689 e
->probability
= REG_BR_PROB_BASE
/ total
;
691 if (bb
->index
>= NUM_FIXED_BLOCKS
692 && block_ends_with_condjump_p (bb
)
693 && EDGE_COUNT (bb
->succs
) >= 2)
694 num_branches
++, num_never_executed
;
701 fprintf (dump_file
, "%d branches\n", num_branches
);
702 fprintf (dump_file
, "%d branches never executed\n",
705 for (i
= 0; i
< 10; i
++)
706 fprintf (dump_file
, "%d%% branches in range %d-%d%%\n",
707 (hist_br_prob
[i
] + hist_br_prob
[19-i
]) * 100 / num_branches
,
710 total_num_branches
+= num_branches
;
711 total_num_never_executed
+= num_never_executed
;
712 for (i
= 0; i
< 20; i
++)
713 total_hist_br_prob
[i
] += hist_br_prob
[i
];
715 fputc ('\n', dump_file
);
716 fputc ('\n', dump_file
);
719 free_aux_for_blocks ();
722 /* Load value histograms values whose description is stored in VALUES array
726 compute_value_histograms (histogram_values values
)
728 unsigned i
, j
, t
, any
;
729 unsigned n_histogram_counters
[GCOV_N_VALUE_COUNTERS
];
730 gcov_type
*histogram_counts
[GCOV_N_VALUE_COUNTERS
];
731 gcov_type
*act_count
[GCOV_N_VALUE_COUNTERS
];
732 gcov_type
*aact_count
;
734 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
735 n_histogram_counters
[t
] = 0;
737 for (i
= 0; i
< VEC_length (histogram_value
, values
); i
++)
739 histogram_value hist
= VEC_index (histogram_value
, values
, i
);
740 n_histogram_counters
[(int) hist
->type
] += hist
->n_counters
;
744 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
746 if (!n_histogram_counters
[t
])
748 histogram_counts
[t
] = NULL
;
752 histogram_counts
[t
] =
753 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t
),
754 n_histogram_counters
[t
], NULL
);
755 if (histogram_counts
[t
])
757 act_count
[t
] = histogram_counts
[t
];
762 for (i
= 0; i
< VEC_length (histogram_value
, values
); i
++)
764 histogram_value hist
= VEC_index (histogram_value
, values
, i
);
765 gimple stmt
= hist
->hvalue
.stmt
;
767 t
= (int) hist
->type
;
769 aact_count
= act_count
[t
];
770 act_count
[t
] += hist
->n_counters
;
772 gimple_add_histogram_value (cfun
, stmt
, hist
);
773 hist
->hvalue
.counters
= XNEWVEC (gcov_type
, hist
->n_counters
);
774 for (j
= 0; j
< hist
->n_counters
; j
++)
775 hist
->hvalue
.counters
[j
] = aact_count
[j
];
778 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
779 if (histogram_counts
[t
])
780 free (histogram_counts
[t
]);
783 /* The entry basic block will be moved around so that it has index=1,
784 there is nothing at index 0 and the exit is at n_basic_block. */
785 #define BB_TO_GCOV_INDEX(bb) ((bb)->index - 1)
786 /* When passed NULL as file_name, initialize.
787 When passed something else, output the necessary commands to change
788 line to LINE and offset to FILE_NAME. */
790 output_location (char const *file_name
, int line
,
791 gcov_position_t
*offset
, basic_block bb
)
793 static char const *prev_file_name
;
794 static int prev_line
;
795 bool name_differs
, line_differs
;
799 prev_file_name
= NULL
;
804 name_differs
= !prev_file_name
|| strcmp (file_name
, prev_file_name
);
805 line_differs
= prev_line
!= line
;
807 if (name_differs
|| line_differs
)
811 *offset
= gcov_write_tag (GCOV_TAG_LINES
);
812 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb
));
813 name_differs
= line_differs
=true;
816 /* If this is a new source file, then output the
817 file's name to the .bb file. */
820 prev_file_name
= file_name
;
821 gcov_write_unsigned (0);
822 gcov_write_string (prev_file_name
);
826 gcov_write_unsigned (line
);
832 /* Instrument and/or analyze program behavior based on program flow graph.
833 In either case, this function builds a flow graph for the function being
834 compiled. The flow graph is stored in BB_GRAPH.
836 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
837 the flow graph that are needed to reconstruct the dynamic behavior of the
840 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
841 information from a data file containing edge count information from previous
842 executions of the function being compiled. In this case, the flow graph is
843 annotated with actual execution counts, which are later propagated into the
844 rtl for optimization purposes.
846 Main entry point of this file. */
853 unsigned num_edges
, ignored_edges
;
854 unsigned num_instrumented
;
855 struct edge_list
*el
;
856 histogram_values values
= NULL
;
858 total_num_times_called
++;
860 flow_call_edges_add (NULL
);
861 add_noreturn_fake_exit_edges ();
863 /* We can't handle cyclic regions constructed using abnormal edges.
864 To avoid these we replace every source of abnormal edge by a fake
865 edge from entry node and every destination by fake edge to exit.
866 This keeps graph acyclic and our calculation exact for all normal
867 edges except for exit and entrance ones.
869 We also add fake exit edges for each call and asm statement in the
870 basic, since it may not return. */
874 int need_exit_edge
= 0, need_entry_edge
= 0;
875 int have_exit_edge
= 0, have_entry_edge
= 0;
879 /* Functions returning multiple times are not handled by extra edges.
880 Instead we simply allow negative counts on edges from exit to the
881 block past call and corresponding probabilities. We can't go
882 with the extra edges because that would result in flowgraph that
883 needs to have fake edges outside the spanning tree. */
885 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
887 gimple_stmt_iterator gsi
;
890 /* It may happen that there are compiler generated statements
891 without a locus at all. Go through the basic block from the
892 last to the first statement looking for a locus. */
893 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
895 last
= gsi_stmt (gsi
);
896 if (gimple_has_location (last
))
900 /* Edge with goto locus might get wrong coverage info unless
901 it is the only edge out of BB.
902 Don't do that when the locuses match, so
903 if (blah) goto something;
904 is not computed twice. */
906 && gimple_has_location (last
)
907 && e
->goto_locus
!= UNKNOWN_LOCATION
908 && !single_succ_p (bb
)
909 && (LOCATION_FILE (e
->goto_locus
)
910 != LOCATION_FILE (gimple_location (last
))
911 || (LOCATION_LINE (e
->goto_locus
)
912 != LOCATION_LINE (gimple_location (last
)))))
914 basic_block new_bb
= split_edge (e
);
915 single_succ_edge (new_bb
)->goto_locus
= e
->goto_locus
;
917 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
918 && e
->dest
!= EXIT_BLOCK_PTR
)
920 if (e
->dest
== EXIT_BLOCK_PTR
)
923 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
925 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
926 && e
->src
!= ENTRY_BLOCK_PTR
)
928 if (e
->src
== ENTRY_BLOCK_PTR
)
932 if (need_exit_edge
&& !have_exit_edge
)
935 fprintf (dump_file
, "Adding fake exit edge to bb %i\n",
937 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
939 if (need_entry_edge
&& !have_entry_edge
)
942 fprintf (dump_file
, "Adding fake entry edge to bb %i\n",
944 make_edge (ENTRY_BLOCK_PTR
, bb
, EDGE_FAKE
);
948 el
= create_edge_list ();
949 num_edges
= NUM_EDGES (el
);
950 alloc_aux_for_edges (sizeof (struct edge_info
));
952 /* The basic blocks are expected to be numbered sequentially. */
956 for (i
= 0 ; i
< num_edges
; i
++)
958 edge e
= INDEX_EDGE (el
, i
);
961 /* Mark edges we've replaced by fake edges above as ignored. */
962 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
963 && e
->src
!= ENTRY_BLOCK_PTR
&& e
->dest
!= EXIT_BLOCK_PTR
)
965 EDGE_INFO (e
)->ignore
= 1;
970 /* Create spanning tree from basic block graph, mark each edge that is
971 on the spanning tree. We insert as many abnormal and critical edges
972 as possible to minimize number of edge splits necessary. */
974 find_spanning_tree (el
);
976 /* Fake edges that are not on the tree will not be instrumented, so
977 mark them ignored. */
978 for (num_instrumented
= i
= 0; i
< num_edges
; i
++)
980 edge e
= INDEX_EDGE (el
, i
);
981 struct edge_info
*inf
= EDGE_INFO (e
);
983 if (inf
->ignore
|| inf
->on_tree
)
985 else if (e
->flags
& EDGE_FAKE
)
994 total_num_blocks
+= n_basic_blocks
;
996 fprintf (dump_file
, "%d basic blocks\n", n_basic_blocks
);
998 total_num_edges
+= num_edges
;
1000 fprintf (dump_file
, "%d edges\n", num_edges
);
1002 total_num_edges_ignored
+= ignored_edges
;
1004 fprintf (dump_file
, "%d ignored edges\n", ignored_edges
);
1006 /* Write the data from which gcov can reconstruct the basic block
1009 /* Basic block flags */
1010 if (coverage_begin_output ())
1012 gcov_position_t offset
;
1014 offset
= gcov_write_tag (GCOV_TAG_BLOCKS
);
1015 for (i
= 0; i
!= (unsigned) (n_basic_blocks
); i
++)
1016 gcov_write_unsigned (0);
1017 gcov_write_length (offset
);
1020 /* Keep all basic block indexes nonnegative in the gcov output.
1021 Index 0 is used for entry block, last index is for exit block.
1023 ENTRY_BLOCK_PTR
->index
= 1;
1024 EXIT_BLOCK_PTR
->index
= last_basic_block
;
1027 if (coverage_begin_output ())
1029 gcov_position_t offset
;
1031 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, next_bb
)
1036 offset
= gcov_write_tag (GCOV_TAG_ARCS
);
1037 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb
));
1039 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1041 struct edge_info
*i
= EDGE_INFO (e
);
1044 unsigned flag_bits
= 0;
1047 flag_bits
|= GCOV_ARC_ON_TREE
;
1048 if (e
->flags
& EDGE_FAKE
)
1049 flag_bits
|= GCOV_ARC_FAKE
;
1050 if (e
->flags
& EDGE_FALLTHRU
)
1051 flag_bits
|= GCOV_ARC_FALLTHROUGH
;
1052 /* On trees we don't have fallthru flags, but we can
1053 recompute them from CFG shape. */
1054 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)
1055 && e
->src
->next_bb
== e
->dest
)
1056 flag_bits
|= GCOV_ARC_FALLTHROUGH
;
1058 gcov_write_unsigned (BB_TO_GCOV_INDEX (e
->dest
));
1059 gcov_write_unsigned (flag_bits
);
1063 gcov_write_length (offset
);
1068 if (coverage_begin_output ())
1070 gcov_position_t offset
;
1072 /* Initialize the output. */
1073 output_location (NULL
, 0, NULL
, NULL
);
1077 gimple_stmt_iterator gsi
;
1081 if (bb
== ENTRY_BLOCK_PTR
->next_bb
)
1083 expanded_location curr_location
=
1084 expand_location (DECL_SOURCE_LOCATION (current_function_decl
));
1085 output_location (curr_location
.file
, curr_location
.line
,
1089 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1091 gimple stmt
= gsi_stmt (gsi
);
1092 if (gimple_has_location (stmt
))
1093 output_location (gimple_filename (stmt
), gimple_lineno (stmt
),
1097 /* Notice GOTO expressions we eliminated while constructing the
1099 if (single_succ_p (bb
)
1100 && single_succ_edge (bb
)->goto_locus
!= UNKNOWN_LOCATION
)
1102 location_t curr_location
= single_succ_edge (bb
)->goto_locus
;
1103 /* ??? The FILE/LINE API is inconsistent for these cases. */
1104 output_location (LOCATION_FILE (curr_location
),
1105 LOCATION_LINE (curr_location
), &offset
, bb
);
1110 /* A file of NULL indicates the end of run. */
1111 gcov_write_unsigned (0);
1112 gcov_write_string (NULL
);
1113 gcov_write_length (offset
);
1118 ENTRY_BLOCK_PTR
->index
= ENTRY_BLOCK
;
1119 EXIT_BLOCK_PTR
->index
= EXIT_BLOCK
;
1120 #undef BB_TO_GCOV_INDEX
1122 if (flag_profile_values
)
1123 find_values_to_profile (&values
);
1125 if (flag_branch_probabilities
)
1127 compute_branch_probabilities ();
1128 if (flag_profile_values
)
1129 compute_value_histograms (values
);
1132 remove_fake_edges ();
1134 /* For each edge not on the spanning tree, add counting code. */
1135 if (profile_arc_flag
1136 && coverage_counter_alloc (GCOV_COUNTER_ARCS
, num_instrumented
))
1138 unsigned n_instrumented
;
1140 profile_hooks
->init_edge_profiler ();
1142 n_instrumented
= instrument_edges (el
);
1144 gcc_assert (n_instrumented
== num_instrumented
);
1146 if (flag_profile_values
)
1147 instrument_values (values
);
1149 /* Commit changes done by instrumentation. */
1150 gsi_commit_edge_inserts ();
1153 free_aux_for_edges ();
1155 VEC_free (histogram_value
, heap
, values
);
1156 free_edge_list (el
);
1157 if (flag_branch_probabilities
)
1158 profile_status
= PROFILE_READ
;
1159 coverage_end_function ();
1162 /* Union find algorithm implementation for the basic blocks using
1166 find_group (basic_block bb
)
1168 basic_block group
= bb
, bb1
;
1170 while ((basic_block
) group
->aux
!= group
)
1171 group
= (basic_block
) group
->aux
;
1173 /* Compress path. */
1174 while ((basic_block
) bb
->aux
!= group
)
1176 bb1
= (basic_block
) bb
->aux
;
1177 bb
->aux
= (void *) group
;
1184 union_groups (basic_block bb1
, basic_block bb2
)
1186 basic_block bb1g
= find_group (bb1
);
1187 basic_block bb2g
= find_group (bb2
);
1189 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1190 this code is unlikely going to be performance problem anyway. */
1191 gcc_assert (bb1g
!= bb2g
);
1196 /* This function searches all of the edges in the program flow graph, and puts
1197 as many bad edges as possible onto the spanning tree. Bad edges include
1198 abnormals edges, which can't be instrumented at the moment. Since it is
1199 possible for fake edges to form a cycle, we will have to develop some
1200 better way in the future. Also put critical edges to the tree, since they
1201 are more expensive to instrument. */
1204 find_spanning_tree (struct edge_list
*el
)
1207 int num_edges
= NUM_EDGES (el
);
1210 /* We use aux field for standard union-find algorithm. */
1211 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1214 /* Add fake edge exit to entry we can't instrument. */
1215 union_groups (EXIT_BLOCK_PTR
, ENTRY_BLOCK_PTR
);
1217 /* First add all abnormal edges to the tree unless they form a cycle. Also
1218 add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
1219 setting return value from function. */
1220 for (i
= 0; i
< num_edges
; i
++)
1222 edge e
= INDEX_EDGE (el
, i
);
1223 if (((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
| EDGE_FAKE
))
1224 || e
->dest
== EXIT_BLOCK_PTR
)
1225 && !EDGE_INFO (e
)->ignore
1226 && (find_group (e
->src
) != find_group (e
->dest
)))
1229 fprintf (dump_file
, "Abnormal edge %d to %d put to tree\n",
1230 e
->src
->index
, e
->dest
->index
);
1231 EDGE_INFO (e
)->on_tree
= 1;
1232 union_groups (e
->src
, e
->dest
);
1236 /* Now insert all critical edges to the tree unless they form a cycle. */
1237 for (i
= 0; i
< num_edges
; i
++)
1239 edge e
= INDEX_EDGE (el
, i
);
1240 if (EDGE_CRITICAL_P (e
) && !EDGE_INFO (e
)->ignore
1241 && find_group (e
->src
) != find_group (e
->dest
))
1244 fprintf (dump_file
, "Critical edge %d to %d put to tree\n",
1245 e
->src
->index
, e
->dest
->index
);
1246 EDGE_INFO (e
)->on_tree
= 1;
1247 union_groups (e
->src
, e
->dest
);
1251 /* And now the rest. */
1252 for (i
= 0; i
< num_edges
; i
++)
1254 edge e
= INDEX_EDGE (el
, i
);
1255 if (!EDGE_INFO (e
)->ignore
1256 && find_group (e
->src
) != find_group (e
->dest
))
1259 fprintf (dump_file
, "Normal edge %d to %d put to tree\n",
1260 e
->src
->index
, e
->dest
->index
);
1261 EDGE_INFO (e
)->on_tree
= 1;
1262 union_groups (e
->src
, e
->dest
);
1266 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1270 /* Perform file-level initialization for branch-prob processing. */
1273 init_branch_prob (void)
1277 total_num_blocks
= 0;
1278 total_num_edges
= 0;
1279 total_num_edges_ignored
= 0;
1280 total_num_edges_instrumented
= 0;
1281 total_num_blocks_created
= 0;
1282 total_num_passes
= 0;
1283 total_num_times_called
= 0;
1284 total_num_branches
= 0;
1285 total_num_never_executed
= 0;
1286 for (i
= 0; i
< 20; i
++)
1287 total_hist_br_prob
[i
] = 0;
1290 /* Performs file-level cleanup after branch-prob processing
1294 end_branch_prob (void)
1298 fprintf (dump_file
, "\n");
1299 fprintf (dump_file
, "Total number of blocks: %d\n",
1301 fprintf (dump_file
, "Total number of edges: %d\n", total_num_edges
);
1302 fprintf (dump_file
, "Total number of ignored edges: %d\n",
1303 total_num_edges_ignored
);
1304 fprintf (dump_file
, "Total number of instrumented edges: %d\n",
1305 total_num_edges_instrumented
);
1306 fprintf (dump_file
, "Total number of blocks created: %d\n",
1307 total_num_blocks_created
);
1308 fprintf (dump_file
, "Total number of graph solution passes: %d\n",
1310 if (total_num_times_called
!= 0)
1311 fprintf (dump_file
, "Average number of graph solution passes: %d\n",
1312 (total_num_passes
+ (total_num_times_called
>> 1))
1313 / total_num_times_called
);
1314 fprintf (dump_file
, "Total number of branches: %d\n",
1315 total_num_branches
);
1316 fprintf (dump_file
, "Total number of branches never executed: %d\n",
1317 total_num_never_executed
);
1318 if (total_num_branches
)
1322 for (i
= 0; i
< 10; i
++)
1323 fprintf (dump_file
, "%d%% branches in range %d-%d%%\n",
1324 (total_hist_br_prob
[i
] + total_hist_br_prob
[19-i
]) * 100
1325 / total_num_branches
, 5*i
, 5*i
+5);
1330 /* Set up hooks to enable tree-based profiling. */
1333 tree_register_profile_hooks (void)
1335 gcc_assert (current_ir_type () == IR_GIMPLE
);
1336 profile_hooks
= &tree_profile_hooks
;