1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
5 based on some ideas from Dain Samples of UC Berkeley.
6 Further mangling by Bob Manson, Cygnus Support.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 2, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to the Free
22 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 /* Generate basic block profile instrumentation and auxiliary files.
26 Profile generation is optimized, so that not all arcs in the basic
27 block graph need instrumenting. First, the BB graph is closed with
28 one entry (function start), and one exit (function exit). Any
29 ABNORMAL_EDGE cannot be instrumented (because there is no control
30 path to place the code). We close the graph by inserting fake
31 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
32 edges that do not go to the exit_block. We ignore such abnormal
33 edges. Naturally these fake edges are never directly traversed,
34 and so *cannot* be directly instrumented. Some other graph
35 massaging is done. To optimize the instrumentation we generate the
36 BB minimal span tree, only edges that are not on the span tree
37 (plus the entry point) need instrumenting. From that information
38 all other edge counts can be deduced. By construction all fake
39 edges must be on the spanning tree. We also attempt to place
40 EDGE_CRITICAL edges on the spanning tree.
42 The auxiliary files generated are <dumpbase>.gcno (at compile time)
43 and <dumpbase>.gcda (at run time). The format is
44 described in full in gcov-io.h. */
46 /* ??? Register allocation should use basic block execution counts to
47 give preference to the most commonly executed blocks. */
49 /* ??? Should calculate branch probabilities before instrumenting code, since
50 then we can use arc counts to help decide which arcs to instrument. */
54 #include "coretypes.h"
64 #include "value-prof.h"
67 #include "tree-flow.h"
70 #include "tree-pass.h"
72 /* Hooks for profiling. */
73 static struct profile_hooks
* profile_hooks
;
75 /* File for profiling debug output. */
77 profile_dump_file (void) {
78 return profile_hooks
->profile_dump_file ();
81 /* Additional information about the edges we need. */
83 unsigned int count_valid
: 1;
85 /* Is on the spanning tree. */
86 unsigned int on_tree
: 1;
88 /* Pretend this edge does not exist (it is abnormal and we've
89 inserted a fake to compensate). */
90 unsigned int ignore
: 1;
94 unsigned int count_valid
: 1;
96 /* Number of successor and predecessor edges. */
101 #define EDGE_INFO(e) ((struct edge_info *) (e)->aux)
102 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
104 /* Counter summary from the last set of coverage counts read. */
106 const struct gcov_ctr_summary
*profile_info
;
108 /* Collect statistics on the performance of this pass for the entire source
111 static int total_num_blocks
;
112 static int total_num_edges
;
113 static int total_num_edges_ignored
;
114 static int total_num_edges_instrumented
;
115 static int total_num_blocks_created
;
116 static int total_num_passes
;
117 static int total_num_times_called
;
118 static int total_hist_br_prob
[20];
119 static int total_num_never_executed
;
120 static int total_num_branches
;
122 /* Forward declarations. */
123 static void find_spanning_tree (struct edge_list
*);
124 static unsigned instrument_edges (struct edge_list
*);
125 static void instrument_values (histogram_values
);
126 static void compute_branch_probabilities (void);
127 static void compute_value_histograms (histogram_values
);
128 static gcov_type
* get_exec_counts (void);
129 static basic_block
find_group (basic_block
);
130 static void union_groups (basic_block
, basic_block
);
133 /* Add edge instrumentation code to the entire insn chain.
135 F is the first insn of the chain.
136 NUM_BLOCKS is the number of basic blocks found in F. */
139 instrument_edges (struct edge_list
*el
)
141 unsigned num_instr_edges
= 0;
142 int num_edges
= NUM_EDGES (el
);
145 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
150 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
152 struct edge_info
*inf
= EDGE_INFO (e
);
154 if (!inf
->ignore
&& !inf
->on_tree
)
156 gcc_assert (!(e
->flags
& EDGE_ABNORMAL
));
158 fprintf (dump_file
, "Edge %d to %d instrumented%s\n",
159 e
->src
->index
, e
->dest
->index
,
160 EDGE_CRITICAL_P (e
) ? " (and split)" : "");
161 (profile_hooks
->gen_edge_profiler
) (num_instr_edges
++, e
);
166 total_num_blocks_created
+= num_edges
;
168 fprintf (dump_file
, "%d edges instrumented\n", num_instr_edges
);
169 return num_instr_edges
;
172 /* Add code to measure histograms for values in list VALUES. */
174 instrument_values (histogram_values values
)
178 /* Emit code to generate the histograms before the insns. */
180 for (i
= 0; i
< VEC_length (histogram_value
, values
); i
++)
182 histogram_value hist
= VEC_index (histogram_value
, values
, i
);
185 case HIST_TYPE_INTERVAL
:
186 t
= GCOV_COUNTER_V_INTERVAL
;
190 t
= GCOV_COUNTER_V_POW2
;
193 case HIST_TYPE_SINGLE_VALUE
:
194 t
= GCOV_COUNTER_V_SINGLE
;
197 case HIST_TYPE_CONST_DELTA
:
198 t
= GCOV_COUNTER_V_DELTA
;
204 if (!coverage_counter_alloc (t
, hist
->n_counters
))
209 case HIST_TYPE_INTERVAL
:
210 (profile_hooks
->gen_interval_profiler
) (hist
, t
, 0);
214 (profile_hooks
->gen_pow2_profiler
) (hist
, t
, 0);
217 case HIST_TYPE_SINGLE_VALUE
:
218 (profile_hooks
->gen_one_value_profiler
) (hist
, t
, 0);
221 case HIST_TYPE_CONST_DELTA
:
222 (profile_hooks
->gen_const_delta_profiler
) (hist
, t
, 0);
229 VEC_free (histogram_value
, heap
, values
);
233 /* Computes hybrid profile for all matching entries in da_file. */
236 get_exec_counts (void)
238 unsigned num_edges
= 0;
242 /* Count the edges to be (possibly) instrumented. */
243 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
248 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
249 if (!EDGE_INFO (e
)->ignore
&& !EDGE_INFO (e
)->on_tree
)
253 counts
= get_coverage_counts (GCOV_COUNTER_ARCS
, num_edges
, &profile_info
);
257 if (dump_file
&& profile_info
)
258 fprintf(dump_file
, "Merged %u profiles with maximal count %u.\n",
259 profile_info
->runs
, (unsigned) profile_info
->sum_max
);
265 /* Compute the branch probabilities for the various branches.
266 Annotate them accordingly. */
269 compute_branch_probabilities (void)
276 int hist_br_prob
[20];
277 int num_never_executed
;
279 gcov_type
*exec_counts
= get_exec_counts ();
280 int exec_counts_pos
= 0;
282 /* Very simple sanity checks so we catch bugs in our profiling code. */
285 if (profile_info
->run_max
* profile_info
->runs
< profile_info
->sum_max
)
287 error ("corrupted profile info: run_max * runs < sum_max");
291 if (profile_info
->sum_all
< profile_info
->sum_max
)
293 error ("corrupted profile info: sum_all is smaller than sum_max");
298 /* Attach extra info block to each bb. */
300 alloc_aux_for_blocks (sizeof (struct bb_info
));
301 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
306 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
307 if (!EDGE_INFO (e
)->ignore
)
308 BB_INFO (bb
)->succ_count
++;
309 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
310 if (!EDGE_INFO (e
)->ignore
)
311 BB_INFO (bb
)->pred_count
++;
314 /* Avoid predicting entry on exit nodes. */
315 BB_INFO (EXIT_BLOCK_PTR
)->succ_count
= 2;
316 BB_INFO (ENTRY_BLOCK_PTR
)->pred_count
= 2;
318 /* For each edge not on the spanning tree, set its execution count from
321 /* The first count in the .da file is the number of times that the function
322 was entered. This is the exec_count for block zero. */
324 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
329 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
330 if (!EDGE_INFO (e
)->ignore
&& !EDGE_INFO (e
)->on_tree
)
335 e
->count
= exec_counts
[exec_counts_pos
++];
336 if (e
->count
> profile_info
->sum_max
)
338 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
339 bb
->index
, e
->dest
->index
);
345 EDGE_INFO (e
)->count_valid
= 1;
346 BB_INFO (bb
)->succ_count
--;
347 BB_INFO (e
->dest
)->pred_count
--;
350 fprintf (dump_file
, "\nRead edge from %i to %i, count:",
351 bb
->index
, e
->dest
->index
);
352 fprintf (dump_file
, HOST_WIDEST_INT_PRINT_DEC
,
353 (HOST_WIDEST_INT
) e
->count
);
359 fprintf (dump_file
, "\n%d edge counts read\n", num_edges
);
361 /* For every block in the file,
362 - if every exit/entrance edge has a known count, then set the block count
363 - if the block count is known, and every exit/entrance edge but one has
364 a known execution count, then set the count of the remaining edge
366 As edge counts are set, decrement the succ/pred count, but don't delete
367 the edge, that way we can easily tell when all edges are known, or only
368 one edge is unknown. */
370 /* The order that the basic blocks are iterated through is important.
371 Since the code that finds spanning trees starts with block 0, low numbered
372 edges are put on the spanning tree in preference to high numbered edges.
373 Hence, most instrumented edges are at the end. Graph solving works much
374 faster if we propagate numbers from the end to the start.
376 This takes an average of slightly more than 3 passes. */
384 FOR_BB_BETWEEN (bb
, EXIT_BLOCK_PTR
, NULL
, prev_bb
)
386 struct bb_info
*bi
= BB_INFO (bb
);
387 if (! bi
->count_valid
)
389 if (bi
->succ_count
== 0)
395 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
401 else if (bi
->pred_count
== 0)
407 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
416 if (bi
->succ_count
== 1)
422 /* One of the counts will be invalid, but it is zero,
423 so adding it in also doesn't hurt. */
424 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
427 /* Seedgeh for the invalid edge, and set its count. */
428 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
429 if (! EDGE_INFO (e
)->count_valid
&& ! EDGE_INFO (e
)->ignore
)
432 /* Calculate count for remaining edge by conservation. */
433 total
= bb
->count
- total
;
436 EDGE_INFO (e
)->count_valid
= 1;
440 BB_INFO (e
->dest
)->pred_count
--;
443 if (bi
->pred_count
== 1)
449 /* One of the counts will be invalid, but it is zero,
450 so adding it in also doesn't hurt. */
451 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
454 /* Search for the invalid edge, and set its count. */
455 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
456 if (!EDGE_INFO (e
)->count_valid
&& !EDGE_INFO (e
)->ignore
)
459 /* Calculate count for remaining edge by conservation. */
460 total
= bb
->count
- total
+ e
->count
;
463 EDGE_INFO (e
)->count_valid
= 1;
467 BB_INFO (e
->src
)->succ_count
--;
474 dump_flow_info (dump_file
);
476 total_num_passes
+= passes
;
478 fprintf (dump_file
, "Graph solving took %d passes.\n\n", passes
);
480 /* If the graph has been correctly solved, every block will have a
481 succ and pred count of zero. */
484 gcc_assert (!BB_INFO (bb
)->succ_count
&& !BB_INFO (bb
)->pred_count
);
487 /* For every edge, calculate its branch probability and add a reg_note
488 to the branch insn to indicate this. */
490 for (i
= 0; i
< 20; i
++)
492 num_never_executed
= 0;
495 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
503 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
504 bb
->index
, (int)bb
->count
);
507 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
509 /* Function may return twice in the cased the called function is
510 setjmp or calls fork, but we can't represent this by extra
511 edge from the entry, since extra edge from the exit is
512 already present. We get negative frequency from the entry
515 && e
->dest
== EXIT_BLOCK_PTR
)
516 || (e
->count
> bb
->count
517 && e
->dest
!= EXIT_BLOCK_PTR
))
519 if (block_ends_with_call_p (bb
))
520 e
->count
= e
->count
< 0 ? 0 : bb
->count
;
522 if (e
->count
< 0 || e
->count
> bb
->count
)
524 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
525 e
->src
->index
, e
->dest
->index
,
527 e
->count
= bb
->count
/ 2;
532 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
533 e
->probability
= (e
->count
* REG_BR_PROB_BASE
+ bb
->count
/ 2) / bb
->count
;
535 && block_ends_with_condjump_p (bb
)
536 && EDGE_COUNT (bb
->succs
) >= 2)
542 /* Find the branch edge. It is possible that we do have fake
544 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
545 if (!(e
->flags
& (EDGE_FAKE
| EDGE_FALLTHRU
)))
548 prob
= e
->probability
;
549 index
= prob
* 20 / REG_BR_PROB_BASE
;
553 hist_br_prob
[index
]++;
555 /* Do this for RTL only. */
558 note
= find_reg_note (BB_END (bb
), REG_BR_PROB
, 0);
559 /* There may be already note put by some other pass, such
560 as builtin_expect expander. */
562 XEXP (note
, 0) = GEN_INT (prob
);
564 REG_NOTES (BB_END (bb
))
565 = gen_rtx_EXPR_LIST (REG_BR_PROB
, GEN_INT (prob
),
566 REG_NOTES (BB_END (bb
)));
571 /* Otherwise try to preserve the existing REG_BR_PROB probabilities
572 tree based profile guessing put into code. BB can be the
573 ENTRY_BLOCK, and it can have multiple (fake) successors in
574 EH cases, but it still has no code; don't crash in this case. */
575 else if (profile_status
== PROFILE_ABSENT
577 && EDGE_COUNT (bb
->succs
) > 1
579 && (note
= find_reg_note (BB_END (bb
), REG_BR_PROB
, 0)))
581 int prob
= INTVAL (XEXP (note
, 0));
583 BRANCH_EDGE (bb
)->probability
= prob
;
584 FALLTHRU_EDGE (bb
)->probability
= REG_BR_PROB_BASE
- prob
;
586 /* As a last resort, distribute the probabilities evenly.
587 Use simple heuristics that if there are normal edges,
588 give all abnormals frequency of 0, otherwise distribute the
589 frequency over abnormals (this is the case of noreturn
591 else if (profile_status
== PROFILE_ABSENT
)
595 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
596 if (!(e
->flags
& (EDGE_COMPLEX
| EDGE_FAKE
)))
600 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
601 if (!(e
->flags
& (EDGE_COMPLEX
| EDGE_FAKE
)))
602 e
->probability
= REG_BR_PROB_BASE
/ total
;
608 total
+= EDGE_COUNT (bb
->succs
);
609 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
610 e
->probability
= REG_BR_PROB_BASE
/ total
;
613 && block_ends_with_condjump_p (bb
)
614 && EDGE_COUNT (bb
->succs
) >= 2)
615 num_branches
++, num_never_executed
;
622 fprintf (dump_file
, "%d branches\n", num_branches
);
623 fprintf (dump_file
, "%d branches never executed\n",
626 for (i
= 0; i
< 10; i
++)
627 fprintf (dump_file
, "%d%% branches in range %d-%d%%\n",
628 (hist_br_prob
[i
] + hist_br_prob
[19-i
]) * 100 / num_branches
,
631 total_num_branches
+= num_branches
;
632 total_num_never_executed
+= num_never_executed
;
633 for (i
= 0; i
< 20; i
++)
634 total_hist_br_prob
[i
] += hist_br_prob
[i
];
636 fputc ('\n', dump_file
);
637 fputc ('\n', dump_file
);
640 free_aux_for_blocks ();
643 /* Load value histograms values whose description is stored in VALUES array
647 compute_value_histograms (histogram_values values
)
649 unsigned i
, j
, t
, any
;
650 unsigned n_histogram_counters
[GCOV_N_VALUE_COUNTERS
];
651 gcov_type
*histogram_counts
[GCOV_N_VALUE_COUNTERS
];
652 gcov_type
*act_count
[GCOV_N_VALUE_COUNTERS
];
653 gcov_type
*aact_count
;
655 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
656 n_histogram_counters
[t
] = 0;
658 for (i
= 0; i
< VEC_length (histogram_value
, values
); i
++)
660 histogram_value hist
= VEC_index (histogram_value
, values
, i
);
661 n_histogram_counters
[(int) hist
->type
] += hist
->n_counters
;
665 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
667 if (!n_histogram_counters
[t
])
669 histogram_counts
[t
] = NULL
;
673 histogram_counts
[t
] =
674 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t
),
675 n_histogram_counters
[t
], NULL
);
676 if (histogram_counts
[t
])
678 act_count
[t
] = histogram_counts
[t
];
683 for (i
= 0; i
< VEC_length (histogram_value
, values
); i
++)
685 histogram_value hist
= VEC_index (histogram_value
, values
, i
);
686 tree stmt
= hist
->hvalue
.stmt
;
687 stmt_ann_t ann
= get_stmt_ann (stmt
);
689 t
= (int) hist
->type
;
691 aact_count
= act_count
[t
];
692 act_count
[t
] += hist
->n_counters
;
694 hist
->hvalue
.next
= ann
->histograms
;
695 ann
->histograms
= hist
;
696 hist
->hvalue
.counters
=
697 xmalloc (sizeof (gcov_type
) * hist
->n_counters
);
698 for (j
= 0; j
< hist
->n_counters
; j
++)
699 hist
->hvalue
.counters
[j
] = aact_count
[j
];
702 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
703 if (histogram_counts
[t
])
704 free (histogram_counts
[t
]);
707 #define BB_TO_GCOV_INDEX(bb) ((bb)->index + 1)
708 /* When passed NULL as file_name, initialize.
709 When passed something else, output the necessary commands to change
710 line to LINE and offset to FILE_NAME. */
712 output_location (char const *file_name
, int line
,
713 gcov_position_t
*offset
, basic_block bb
)
715 static char const *prev_file_name
;
716 static int prev_line
;
717 bool name_differs
, line_differs
;
721 prev_file_name
= NULL
;
726 name_differs
= !prev_file_name
|| strcmp (file_name
, prev_file_name
);
727 line_differs
= prev_line
!= line
;
729 if (name_differs
|| line_differs
)
733 *offset
= gcov_write_tag (GCOV_TAG_LINES
);
734 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb
));
735 name_differs
= line_differs
=true;
738 /* If this is a new source file, then output the
739 file's name to the .bb file. */
742 prev_file_name
= file_name
;
743 gcov_write_unsigned (0);
744 gcov_write_string (prev_file_name
);
748 gcov_write_unsigned (line
);
754 /* Instrument and/or analyze program behavior based on program flow graph.
755 In either case, this function builds a flow graph for the function being
756 compiled. The flow graph is stored in BB_GRAPH.
758 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
759 the flow graph that are needed to reconstruct the dynamic behavior of the
762 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
763 information from a data file containing edge count information from previous
764 executions of the function being compiled. In this case, the flow graph is
765 annotated with actual execution counts, which are later propagated into the
766 rtl for optimization purposes.
768 Main entry point of this file. */
775 unsigned num_edges
, ignored_edges
;
776 unsigned num_instrumented
;
777 struct edge_list
*el
;
778 histogram_values values
= NULL
;
780 total_num_times_called
++;
782 flow_call_edges_add (NULL
);
783 add_noreturn_fake_exit_edges ();
785 /* We can't handle cyclic regions constructed using abnormal edges.
786 To avoid these we replace every source of abnormal edge by a fake
787 edge from entry node and every destination by fake edge to exit.
788 This keeps graph acyclic and our calculation exact for all normal
789 edges except for exit and entrance ones.
791 We also add fake exit edges for each call and asm statement in the
792 basic, since it may not return. */
796 int need_exit_edge
= 0, need_entry_edge
= 0;
797 int have_exit_edge
= 0, have_entry_edge
= 0;
801 /* Functions returning multiple times are not handled by extra edges.
802 Instead we simply allow negative counts on edges from exit to the
803 block past call and corresponding probabilities. We can't go
804 with the extra edges because that would result in flowgraph that
805 needs to have fake edges outside the spanning tree. */
807 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
809 tree last
= last_stmt (bb
);
810 /* Edge with goto locus might get wrong coverage info unless
811 it is the only edge out of BB.
812 Don't do that when the locuses match, so
813 if (blah) goto something;
814 is not computed twice. */
815 if (e
->goto_locus
&& !single_succ_p (bb
)
816 #ifdef USE_MAPPED_LOCATION
817 && (LOCATION_FILE (e
->goto_locus
)
818 != LOCATION_FILE (EXPR_LOCATION (last
))
819 || (LOCATION_LINE (e
->goto_locus
)
820 != LOCATION_LINE (EXPR_LOCATION (last
)))))
822 && (e
->goto_locus
->file
!= EXPR_LOCUS (last
)->file
823 || (e
->goto_locus
->line
824 != EXPR_LOCUS (last
)->line
)))
827 basic_block
new = split_edge (e
);
828 single_succ_edge (new)->goto_locus
= e
->goto_locus
;
830 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
831 && e
->dest
!= EXIT_BLOCK_PTR
)
833 if (e
->dest
== EXIT_BLOCK_PTR
)
836 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
838 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
839 && e
->src
!= ENTRY_BLOCK_PTR
)
841 if (e
->src
== ENTRY_BLOCK_PTR
)
845 if (need_exit_edge
&& !have_exit_edge
)
848 fprintf (dump_file
, "Adding fake exit edge to bb %i\n",
850 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
852 if (need_entry_edge
&& !have_entry_edge
)
855 fprintf (dump_file
, "Adding fake entry edge to bb %i\n",
857 make_edge (ENTRY_BLOCK_PTR
, bb
, EDGE_FAKE
);
861 el
= create_edge_list ();
862 num_edges
= NUM_EDGES (el
);
863 alloc_aux_for_edges (sizeof (struct edge_info
));
865 /* The basic blocks are expected to be numbered sequentially. */
869 for (i
= 0 ; i
< num_edges
; i
++)
871 edge e
= INDEX_EDGE (el
, i
);
874 /* Mark edges we've replaced by fake edges above as ignored. */
875 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
876 && e
->src
!= ENTRY_BLOCK_PTR
&& e
->dest
!= EXIT_BLOCK_PTR
)
878 EDGE_INFO (e
)->ignore
= 1;
883 /* Create spanning tree from basic block graph, mark each edge that is
884 on the spanning tree. We insert as many abnormal and critical edges
885 as possible to minimize number of edge splits necessary. */
887 find_spanning_tree (el
);
889 /* Fake edges that are not on the tree will not be instrumented, so
890 mark them ignored. */
891 for (num_instrumented
= i
= 0; i
< num_edges
; i
++)
893 edge e
= INDEX_EDGE (el
, i
);
894 struct edge_info
*inf
= EDGE_INFO (e
);
896 if (inf
->ignore
|| inf
->on_tree
)
898 else if (e
->flags
& EDGE_FAKE
)
907 total_num_blocks
+= n_basic_blocks
+ 2;
909 fprintf (dump_file
, "%d basic blocks\n", n_basic_blocks
);
911 total_num_edges
+= num_edges
;
913 fprintf (dump_file
, "%d edges\n", num_edges
);
915 total_num_edges_ignored
+= ignored_edges
;
917 fprintf (dump_file
, "%d ignored edges\n", ignored_edges
);
919 /* Write the data from which gcov can reconstruct the basic block
922 /* Basic block flags */
923 if (coverage_begin_output ())
925 gcov_position_t offset
;
927 offset
= gcov_write_tag (GCOV_TAG_BLOCKS
);
928 for (i
= 0; i
!= (unsigned) (n_basic_blocks
+ 2); i
++)
929 gcov_write_unsigned (0);
930 gcov_write_length (offset
);
933 /* Keep all basic block indexes nonnegative in the gcov output.
934 Index 0 is used for entry block, last index is for exit block.
936 ENTRY_BLOCK_PTR
->index
= -1;
937 EXIT_BLOCK_PTR
->index
= last_basic_block
;
940 if (coverage_begin_output ())
942 gcov_position_t offset
;
944 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, next_bb
)
949 offset
= gcov_write_tag (GCOV_TAG_ARCS
);
950 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb
));
952 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
954 struct edge_info
*i
= EDGE_INFO (e
);
957 unsigned flag_bits
= 0;
960 flag_bits
|= GCOV_ARC_ON_TREE
;
961 if (e
->flags
& EDGE_FAKE
)
962 flag_bits
|= GCOV_ARC_FAKE
;
963 if (e
->flags
& EDGE_FALLTHRU
)
964 flag_bits
|= GCOV_ARC_FALLTHROUGH
;
965 /* On trees we don't have fallthru flags, but we can
966 recompute them from CFG shape. */
968 && e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)
969 && e
->src
->next_bb
== e
->dest
)
970 flag_bits
|= GCOV_ARC_FALLTHROUGH
;
972 gcov_write_unsigned (BB_TO_GCOV_INDEX (e
->dest
));
973 gcov_write_unsigned (flag_bits
);
977 gcov_write_length (offset
);
982 if (coverage_begin_output ())
984 /* Initialize the output. */
985 output_location (NULL
, 0, NULL
, NULL
);
989 gcov_position_t offset
;
993 rtx insn
= BB_HEAD (bb
);
994 int ignore_next_note
= 0;
998 /* We are looking for line number notes. Search backward
999 before basic block to find correct ones. */
1000 insn
= prev_nonnote_insn (insn
);
1002 insn
= get_insns ();
1004 insn
= NEXT_INSN (insn
);
1006 while (insn
!= BB_END (bb
))
1010 /* Must ignore the line number notes that
1011 immediately follow the end of an inline function
1012 to avoid counting it twice. There is a note
1013 before the call, and one after the call. */
1014 if (NOTE_LINE_NUMBER (insn
)
1015 == NOTE_INSN_REPEATED_LINE_NUMBER
)
1016 ignore_next_note
= 1;
1017 else if (NOTE_LINE_NUMBER (insn
) <= 0)
1019 else if (ignore_next_note
)
1020 ignore_next_note
= 0;
1023 expanded_location s
;
1024 NOTE_EXPANDED_LOCATION (s
, insn
);
1025 output_location (s
.file
, s
.line
, &offset
, bb
);
1028 insn
= NEXT_INSN (insn
);
1033 /* A file of NULL indicates the end of run. */
1034 gcov_write_unsigned (0);
1035 gcov_write_string (NULL
);
1036 gcov_write_length (offset
);
1042 gcov_position_t offset
;
1046 block_stmt_iterator bsi
;
1050 if (bb
== ENTRY_BLOCK_PTR
->next_bb
)
1052 expanded_location curr_location
=
1053 expand_location (DECL_SOURCE_LOCATION
1054 (current_function_decl
));
1055 output_location (curr_location
.file
, curr_location
.line
,
1059 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
1061 tree stmt
= bsi_stmt (bsi
);
1062 if (EXPR_HAS_LOCATION (stmt
))
1063 output_location (EXPR_FILENAME (stmt
),
1068 /* Notice GOTO expressions we eliminated while constructing the
1070 if (single_succ_p (bb
) && single_succ_edge (bb
)->goto_locus
)
1072 /* ??? source_locus type is marked deprecated in input.h. */
1073 source_locus curr_location
= single_succ_edge (bb
)->goto_locus
;
1074 /* ??? The FILE/LINE API is inconsistent for these cases. */
1075 #ifdef USE_MAPPED_LOCATION
1076 output_location (LOCATION_FILE (curr_location
),
1077 LOCATION_LINE (curr_location
),
1080 output_location (curr_location
->file
, curr_location
->line
,
1087 /* A file of NULL indicates the end of run. */
1088 gcov_write_unsigned (0);
1089 gcov_write_string (NULL
);
1090 gcov_write_length (offset
);
1096 ENTRY_BLOCK_PTR
->index
= ENTRY_BLOCK
;
1097 EXIT_BLOCK_PTR
->index
= EXIT_BLOCK
;
1098 #undef BB_TO_GCOV_INDEX
1100 if (flag_profile_values
)
1101 find_values_to_profile (&values
);
1103 if (flag_branch_probabilities
)
1105 compute_branch_probabilities ();
1106 if (flag_profile_values
)
1107 compute_value_histograms (values
);
1110 remove_fake_edges ();
1112 /* For each edge not on the spanning tree, add counting code. */
1113 if (profile_arc_flag
1114 && coverage_counter_alloc (GCOV_COUNTER_ARCS
, num_instrumented
))
1116 unsigned n_instrumented
;
1118 profile_hooks
->init_edge_profiler ();
1120 n_instrumented
= instrument_edges (el
);
1122 gcc_assert (n_instrumented
== num_instrumented
);
1124 if (flag_profile_values
)
1125 instrument_values (values
);
1127 /* Commit changes done by instrumentation. */
1129 bsi_commit_edge_inserts ();
1132 commit_edge_insertions_watch_calls ();
1133 allocate_reg_info (max_reg_num (), FALSE
, FALSE
);
1137 free_aux_for_edges ();
1141 /* Re-merge split basic blocks and the mess introduced by
1142 insert_insn_on_edge. */
1143 cleanup_cfg (profile_arc_flag
? CLEANUP_EXPENSIVE
: 0);
1144 if (profile_dump_file())
1145 dump_flow_info (profile_dump_file());
1148 free_edge_list (el
);
1149 if (flag_branch_probabilities
)
1150 profile_status
= PROFILE_READ
;
1151 coverage_end_function ();
1154 /* Union find algorithm implementation for the basic blocks using
1158 find_group (basic_block bb
)
1160 basic_block group
= bb
, bb1
;
1162 while ((basic_block
) group
->aux
!= group
)
1163 group
= (basic_block
) group
->aux
;
1165 /* Compress path. */
1166 while ((basic_block
) bb
->aux
!= group
)
1168 bb1
= (basic_block
) bb
->aux
;
1169 bb
->aux
= (void *) group
;
1176 union_groups (basic_block bb1
, basic_block bb2
)
1178 basic_block bb1g
= find_group (bb1
);
1179 basic_block bb2g
= find_group (bb2
);
1181 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1182 this code is unlikely going to be performance problem anyway. */
1183 gcc_assert (bb1g
!= bb2g
);
1188 /* This function searches all of the edges in the program flow graph, and puts
1189 as many bad edges as possible onto the spanning tree. Bad edges include
1190 abnormals edges, which can't be instrumented at the moment. Since it is
1191 possible for fake edges to form a cycle, we will have to develop some
1192 better way in the future. Also put critical edges to the tree, since they
1193 are more expensive to instrument. */
1196 find_spanning_tree (struct edge_list
*el
)
1199 int num_edges
= NUM_EDGES (el
);
1202 /* We use aux field for standard union-find algorithm. */
1203 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1206 /* Add fake edge exit to entry we can't instrument. */
1207 union_groups (EXIT_BLOCK_PTR
, ENTRY_BLOCK_PTR
);
1209 /* First add all abnormal edges to the tree unless they form a cycle. Also
1210 add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
1211 setting return value from function. */
1212 for (i
= 0; i
< num_edges
; i
++)
1214 edge e
= INDEX_EDGE (el
, i
);
1215 if (((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
| EDGE_FAKE
))
1216 || e
->dest
== EXIT_BLOCK_PTR
)
1217 && !EDGE_INFO (e
)->ignore
1218 && (find_group (e
->src
) != find_group (e
->dest
)))
1221 fprintf (dump_file
, "Abnormal edge %d to %d put to tree\n",
1222 e
->src
->index
, e
->dest
->index
);
1223 EDGE_INFO (e
)->on_tree
= 1;
1224 union_groups (e
->src
, e
->dest
);
1228 /* Now insert all critical edges to the tree unless they form a cycle. */
1229 for (i
= 0; i
< num_edges
; i
++)
1231 edge e
= INDEX_EDGE (el
, i
);
1232 if (EDGE_CRITICAL_P (e
) && !EDGE_INFO (e
)->ignore
1233 && find_group (e
->src
) != find_group (e
->dest
))
1236 fprintf (dump_file
, "Critical edge %d to %d put to tree\n",
1237 e
->src
->index
, e
->dest
->index
);
1238 EDGE_INFO (e
)->on_tree
= 1;
1239 union_groups (e
->src
, e
->dest
);
1243 /* And now the rest. */
1244 for (i
= 0; i
< num_edges
; i
++)
1246 edge e
= INDEX_EDGE (el
, i
);
1247 if (!EDGE_INFO (e
)->ignore
1248 && find_group (e
->src
) != find_group (e
->dest
))
1251 fprintf (dump_file
, "Normal edge %d to %d put to tree\n",
1252 e
->src
->index
, e
->dest
->index
);
1253 EDGE_INFO (e
)->on_tree
= 1;
1254 union_groups (e
->src
, e
->dest
);
1258 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1262 /* Perform file-level initialization for branch-prob processing. */
1265 init_branch_prob (void)
1269 total_num_blocks
= 0;
1270 total_num_edges
= 0;
1271 total_num_edges_ignored
= 0;
1272 total_num_edges_instrumented
= 0;
1273 total_num_blocks_created
= 0;
1274 total_num_passes
= 0;
1275 total_num_times_called
= 0;
1276 total_num_branches
= 0;
1277 total_num_never_executed
= 0;
1278 for (i
= 0; i
< 20; i
++)
1279 total_hist_br_prob
[i
] = 0;
1282 /* Performs file-level cleanup after branch-prob processing
1286 end_branch_prob (void)
1290 fprintf (dump_file
, "\n");
1291 fprintf (dump_file
, "Total number of blocks: %d\n",
1293 fprintf (dump_file
, "Total number of edges: %d\n", total_num_edges
);
1294 fprintf (dump_file
, "Total number of ignored edges: %d\n",
1295 total_num_edges_ignored
);
1296 fprintf (dump_file
, "Total number of instrumented edges: %d\n",
1297 total_num_edges_instrumented
);
1298 fprintf (dump_file
, "Total number of blocks created: %d\n",
1299 total_num_blocks_created
);
1300 fprintf (dump_file
, "Total number of graph solution passes: %d\n",
1302 if (total_num_times_called
!= 0)
1303 fprintf (dump_file
, "Average number of graph solution passes: %d\n",
1304 (total_num_passes
+ (total_num_times_called
>> 1))
1305 / total_num_times_called
);
1306 fprintf (dump_file
, "Total number of branches: %d\n",
1307 total_num_branches
);
1308 fprintf (dump_file
, "Total number of branches never executed: %d\n",
1309 total_num_never_executed
);
1310 if (total_num_branches
)
1314 for (i
= 0; i
< 10; i
++)
1315 fprintf (dump_file
, "%d%% branches in range %d-%d%%\n",
1316 (total_hist_br_prob
[i
] + total_hist_br_prob
[19-i
]) * 100
1317 / total_num_branches
, 5*i
, 5*i
+5);
1322 /* Set up hooks to enable tree-based profiling. */
1325 tree_register_profile_hooks (void)
1327 gcc_assert (ir_type ());
1328 profile_hooks
= &tree_profile_hooks
;
1332 /* Do branch profiling and static profile estimation passes. */
1334 rest_of_handle_branch_prob (void)
1338 /* Discover and record the loop depth at the head of each basic
1339 block. The loop infrastructure does the real job for us. */
1340 flow_loops_find (&loops
);
1343 flow_loops_dump (&loops
, dump_file
, NULL
, 0);
1345 /* Estimate using heuristics if no profiling info is available. */
1346 if (flag_guess_branch_prob
1347 && profile_status
== PROFILE_ABSENT
)
1348 estimate_probability (&loops
);
1350 flow_loops_free (&loops
);
1351 free_dominance_info (CDI_DOMINATORS
);
1354 struct tree_opt_pass pass_branch_prob
=
1358 rest_of_handle_branch_prob
, /* execute */
1361 0, /* static_pass_number */
1362 TV_BRANCH_PROB
, /* tv_id */
1363 0, /* properties_required */
1364 0, /* properties_provided */
1365 0, /* properties_destroyed */
1366 0, /* todo_flags_start */
1367 TODO_dump_func
, /* todo_flags_finish */