1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
5 based on some ideas from Dain Samples of UC Berkeley.
6 Further mangling by Bob Manson, Cygnus Support.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 2, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to the Free
22 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 /* Generate basic block profile instrumentation and auxiliary files.
26 Profile generation is optimized, so that not all arcs in the basic
27 block graph need instrumenting. First, the BB graph is closed with
28 one entry (function start), and one exit (function exit). Any
29 ABNORMAL_EDGE cannot be instrumented (because there is no control
30 path to place the code). We close the graph by inserting fake
31 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
32 edges that do not go to the exit_block. We ignore such abnormal
33 edges. Naturally these fake edges are never directly traversed,
34 and so *cannot* be directly instrumented. Some other graph
35 massaging is done. To optimize the instrumentation we generate the
36 BB minimal span tree, only edges that are not on the span tree
37 (plus the entry point) need instrumenting. From that information
38 all other edge counts can be deduced. By construction all fake
39 edges must be on the spanning tree. We also attempt to place
40 EDGE_CRITICAL edges on the spanning tree.
42 The auxiliary files generated are <dumpbase>.gcno (at compile time)
43 and <dumpbase>.gcda (at run time). The format is
44 described in full in gcov-io.h. */
46 /* ??? Register allocation should use basic block execution counts to
47 give preference to the most commonly executed blocks. */
49 /* ??? Should calculate branch probabilities before instrumenting code, since
50 then we can use arc counts to help decide which arcs to instrument. */
54 #include "coretypes.h"
64 #include "value-prof.h"
67 #include "tree-flow.h"
70 #include "tree-pass.h"
72 /* Hooks for profiling. */
73 static struct profile_hooks
* profile_hooks
;
75 /* File for profiling debug output. */
77 profile_dump_file (void) {
78 return profile_hooks
->profile_dump_file ();
81 /* Additional information about the edges we need. */
83 unsigned int count_valid
: 1;
85 /* Is on the spanning tree. */
86 unsigned int on_tree
: 1;
88 /* Pretend this edge does not exist (it is abnormal and we've
89 inserted a fake to compensate). */
90 unsigned int ignore
: 1;
94 unsigned int count_valid
: 1;
96 /* Number of successor and predecessor edges. */
101 #define EDGE_INFO(e) ((struct edge_info *) (e)->aux)
102 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
104 /* Counter summary from the last set of coverage counts read. */
106 const struct gcov_ctr_summary
*profile_info
;
108 /* Collect statistics on the performance of this pass for the entire source
111 static int total_num_blocks
;
112 static int total_num_edges
;
113 static int total_num_edges_ignored
;
114 static int total_num_edges_instrumented
;
115 static int total_num_blocks_created
;
116 static int total_num_passes
;
117 static int total_num_times_called
;
118 static int total_hist_br_prob
[20];
119 static int total_num_never_executed
;
120 static int total_num_branches
;
122 /* Forward declarations. */
123 static void find_spanning_tree (struct edge_list
*);
124 static unsigned instrument_edges (struct edge_list
*);
125 static void instrument_values (histogram_values
);
126 static void compute_branch_probabilities (void);
127 static void compute_value_histograms (histogram_values
);
128 static gcov_type
* get_exec_counts (void);
129 static basic_block
find_group (basic_block
);
130 static void union_groups (basic_block
, basic_block
);
133 /* Add edge instrumentation code to the entire insn chain.
135 F is the first insn of the chain.
136 NUM_BLOCKS is the number of basic blocks found in F. */
139 instrument_edges (struct edge_list
*el
)
141 unsigned num_instr_edges
= 0;
142 int num_edges
= NUM_EDGES (el
);
145 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
150 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
152 struct edge_info
*inf
= EDGE_INFO (e
);
154 if (!inf
->ignore
&& !inf
->on_tree
)
156 gcc_assert (!(e
->flags
& EDGE_ABNORMAL
));
158 fprintf (dump_file
, "Edge %d to %d instrumented%s\n",
159 e
->src
->index
, e
->dest
->index
,
160 EDGE_CRITICAL_P (e
) ? " (and split)" : "");
161 (profile_hooks
->gen_edge_profiler
) (num_instr_edges
++, e
);
166 total_num_blocks_created
+= num_edges
;
168 fprintf (dump_file
, "%d edges instrumented\n", num_instr_edges
);
169 return num_instr_edges
;
172 /* Add code to measure histograms for values in list VALUES. */
174 instrument_values (histogram_values values
)
178 /* Emit code to generate the histograms before the insns. */
180 for (i
= 0; i
< VEC_length (histogram_value
, values
); i
++)
182 histogram_value hist
= VEC_index (histogram_value
, values
, i
);
185 case HIST_TYPE_INTERVAL
:
186 t
= GCOV_COUNTER_V_INTERVAL
;
190 t
= GCOV_COUNTER_V_POW2
;
193 case HIST_TYPE_SINGLE_VALUE
:
194 t
= GCOV_COUNTER_V_SINGLE
;
197 case HIST_TYPE_CONST_DELTA
:
198 t
= GCOV_COUNTER_V_DELTA
;
204 if (!coverage_counter_alloc (t
, hist
->n_counters
))
209 case HIST_TYPE_INTERVAL
:
210 (profile_hooks
->gen_interval_profiler
) (hist
, t
, 0);
214 (profile_hooks
->gen_pow2_profiler
) (hist
, t
, 0);
217 case HIST_TYPE_SINGLE_VALUE
:
218 (profile_hooks
->gen_one_value_profiler
) (hist
, t
, 0);
221 case HIST_TYPE_CONST_DELTA
:
222 (profile_hooks
->gen_const_delta_profiler
) (hist
, t
, 0);
229 VEC_free (histogram_value
, heap
, values
);
233 /* Computes hybrid profile for all matching entries in da_file. */
236 get_exec_counts (void)
238 unsigned num_edges
= 0;
242 /* Count the edges to be (possibly) instrumented. */
243 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
248 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
249 if (!EDGE_INFO (e
)->ignore
&& !EDGE_INFO (e
)->on_tree
)
253 counts
= get_coverage_counts (GCOV_COUNTER_ARCS
, num_edges
, &profile_info
);
257 if (dump_file
&& profile_info
)
258 fprintf(dump_file
, "Merged %u profiles with maximal count %u.\n",
259 profile_info
->runs
, (unsigned) profile_info
->sum_max
);
265 /* Compute the branch probabilities for the various branches.
266 Annotate them accordingly. */
269 compute_branch_probabilities (void)
276 int hist_br_prob
[20];
277 int num_never_executed
;
279 gcov_type
*exec_counts
= get_exec_counts ();
280 int exec_counts_pos
= 0;
282 /* Very simple sanity checks so we catch bugs in our profiling code. */
285 if (profile_info
->run_max
* profile_info
->runs
< profile_info
->sum_max
)
287 error ("corrupted profile info: run_max * runs < sum_max");
291 if (profile_info
->sum_all
< profile_info
->sum_max
)
293 error ("corrupted profile info: sum_all is smaller than sum_max");
298 /* Attach extra info block to each bb. */
300 alloc_aux_for_blocks (sizeof (struct bb_info
));
301 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
306 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
307 if (!EDGE_INFO (e
)->ignore
)
308 BB_INFO (bb
)->succ_count
++;
309 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
310 if (!EDGE_INFO (e
)->ignore
)
311 BB_INFO (bb
)->pred_count
++;
314 /* Avoid predicting entry on exit nodes. */
315 BB_INFO (EXIT_BLOCK_PTR
)->succ_count
= 2;
316 BB_INFO (ENTRY_BLOCK_PTR
)->pred_count
= 2;
318 /* For each edge not on the spanning tree, set its execution count from
321 /* The first count in the .da file is the number of times that the function
322 was entered. This is the exec_count for block zero. */
324 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
329 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
330 if (!EDGE_INFO (e
)->ignore
&& !EDGE_INFO (e
)->on_tree
)
335 e
->count
= exec_counts
[exec_counts_pos
++];
336 if (e
->count
> profile_info
->sum_max
)
338 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
339 bb
->index
, e
->dest
->index
);
345 EDGE_INFO (e
)->count_valid
= 1;
346 BB_INFO (bb
)->succ_count
--;
347 BB_INFO (e
->dest
)->pred_count
--;
350 fprintf (dump_file
, "\nRead edge from %i to %i, count:",
351 bb
->index
, e
->dest
->index
);
352 fprintf (dump_file
, HOST_WIDEST_INT_PRINT_DEC
,
353 (HOST_WIDEST_INT
) e
->count
);
359 fprintf (dump_file
, "\n%d edge counts read\n", num_edges
);
361 /* For every block in the file,
362 - if every exit/entrance edge has a known count, then set the block count
363 - if the block count is known, and every exit/entrance edge but one has
364 a known execution count, then set the count of the remaining edge
366 As edge counts are set, decrement the succ/pred count, but don't delete
367 the edge, that way we can easily tell when all edges are known, or only
368 one edge is unknown. */
370 /* The order that the basic blocks are iterated through is important.
371 Since the code that finds spanning trees starts with block 0, low numbered
372 edges are put on the spanning tree in preference to high numbered edges.
373 Hence, most instrumented edges are at the end. Graph solving works much
374 faster if we propagate numbers from the end to the start.
376 This takes an average of slightly more than 3 passes. */
384 FOR_BB_BETWEEN (bb
, EXIT_BLOCK_PTR
, NULL
, prev_bb
)
386 struct bb_info
*bi
= BB_INFO (bb
);
387 if (! bi
->count_valid
)
389 if (bi
->succ_count
== 0)
395 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
401 else if (bi
->pred_count
== 0)
407 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
416 if (bi
->succ_count
== 1)
422 /* One of the counts will be invalid, but it is zero,
423 so adding it in also doesn't hurt. */
424 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
427 /* Seedgeh for the invalid edge, and set its count. */
428 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
429 if (! EDGE_INFO (e
)->count_valid
&& ! EDGE_INFO (e
)->ignore
)
432 /* Calculate count for remaining edge by conservation. */
433 total
= bb
->count
- total
;
436 EDGE_INFO (e
)->count_valid
= 1;
440 BB_INFO (e
->dest
)->pred_count
--;
443 if (bi
->pred_count
== 1)
449 /* One of the counts will be invalid, but it is zero,
450 so adding it in also doesn't hurt. */
451 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
454 /* Search for the invalid edge, and set its count. */
455 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
456 if (!EDGE_INFO (e
)->count_valid
&& !EDGE_INFO (e
)->ignore
)
459 /* Calculate count for remaining edge by conservation. */
460 total
= bb
->count
- total
+ e
->count
;
463 EDGE_INFO (e
)->count_valid
= 1;
467 BB_INFO (e
->src
)->succ_count
--;
474 dump_flow_info (dump_file
);
476 total_num_passes
+= passes
;
478 fprintf (dump_file
, "Graph solving took %d passes.\n\n", passes
);
480 /* If the graph has been correctly solved, every block will have a
481 succ and pred count of zero. */
484 gcc_assert (!BB_INFO (bb
)->succ_count
&& !BB_INFO (bb
)->pred_count
);
487 /* For every edge, calculate its branch probability and add a reg_note
488 to the branch insn to indicate this. */
490 for (i
= 0; i
< 20; i
++)
492 num_never_executed
= 0;
495 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
503 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
504 bb
->index
, (int)bb
->count
);
507 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
509 /* Function may return twice in the cased the called function is
510 setjmp or calls fork, but we can't represent this by extra
511 edge from the entry, since extra edge from the exit is
512 already present. We get negative frequency from the entry
515 && e
->dest
== EXIT_BLOCK_PTR
)
516 || (e
->count
> bb
->count
517 && e
->dest
!= EXIT_BLOCK_PTR
))
519 if (block_ends_with_call_p (bb
))
520 e
->count
= e
->count
< 0 ? 0 : bb
->count
;
522 if (e
->count
< 0 || e
->count
> bb
->count
)
524 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
525 e
->src
->index
, e
->dest
->index
,
527 e
->count
= bb
->count
/ 2;
532 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
533 e
->probability
= (e
->count
* REG_BR_PROB_BASE
+ bb
->count
/ 2) / bb
->count
;
535 && block_ends_with_condjump_p (bb
)
536 && EDGE_COUNT (bb
->succs
) >= 2)
542 /* Find the branch edge. It is possible that we do have fake
544 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
545 if (!(e
->flags
& (EDGE_FAKE
| EDGE_FALLTHRU
)))
548 prob
= e
->probability
;
549 index
= prob
* 20 / REG_BR_PROB_BASE
;
553 hist_br_prob
[index
]++;
555 /* Do this for RTL only. */
558 note
= find_reg_note (BB_END (bb
), REG_BR_PROB
, 0);
559 /* There may be already note put by some other pass, such
560 as builtin_expect expander. */
562 XEXP (note
, 0) = GEN_INT (prob
);
564 REG_NOTES (BB_END (bb
))
565 = gen_rtx_EXPR_LIST (REG_BR_PROB
, GEN_INT (prob
),
566 REG_NOTES (BB_END (bb
)));
571 /* Otherwise try to preserve the existing REG_BR_PROB probabilities
572 tree based profile guessing put into code. BB can be the
573 ENTRY_BLOCK, and it can have multiple (fake) successors in
574 EH cases, but it still has no code; don't crash in this case. */
575 else if (profile_status
== PROFILE_ABSENT
577 && EDGE_COUNT (bb
->succs
) > 1
579 && (note
= find_reg_note (BB_END (bb
), REG_BR_PROB
, 0)))
581 int prob
= INTVAL (XEXP (note
, 0));
583 BRANCH_EDGE (bb
)->probability
= prob
;
584 FALLTHRU_EDGE (bb
)->probability
= REG_BR_PROB_BASE
- prob
;
586 /* As a last resort, distribute the probabilities evenly.
587 Use simple heuristics that if there are normal edges,
588 give all abnormals frequency of 0, otherwise distribute the
589 frequency over abnormals (this is the case of noreturn
591 else if (profile_status
== PROFILE_ABSENT
)
595 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
596 if (!(e
->flags
& (EDGE_COMPLEX
| EDGE_FAKE
)))
600 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
601 if (!(e
->flags
& (EDGE_COMPLEX
| EDGE_FAKE
)))
602 e
->probability
= REG_BR_PROB_BASE
/ total
;
608 total
+= EDGE_COUNT (bb
->succs
);
609 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
610 e
->probability
= REG_BR_PROB_BASE
/ total
;
613 && block_ends_with_condjump_p (bb
)
614 && EDGE_COUNT (bb
->succs
) >= 2)
615 num_branches
++, num_never_executed
;
622 fprintf (dump_file
, "%d branches\n", num_branches
);
623 fprintf (dump_file
, "%d branches never executed\n",
626 for (i
= 0; i
< 10; i
++)
627 fprintf (dump_file
, "%d%% branches in range %d-%d%%\n",
628 (hist_br_prob
[i
] + hist_br_prob
[19-i
]) * 100 / num_branches
,
631 total_num_branches
+= num_branches
;
632 total_num_never_executed
+= num_never_executed
;
633 for (i
= 0; i
< 20; i
++)
634 total_hist_br_prob
[i
] += hist_br_prob
[i
];
636 fputc ('\n', dump_file
);
637 fputc ('\n', dump_file
);
640 free_aux_for_blocks ();
643 /* Load value histograms values whose description is stored in VALUES array
647 compute_value_histograms (histogram_values values
)
649 unsigned i
, j
, t
, any
;
650 unsigned n_histogram_counters
[GCOV_N_VALUE_COUNTERS
];
651 gcov_type
*histogram_counts
[GCOV_N_VALUE_COUNTERS
];
652 gcov_type
*act_count
[GCOV_N_VALUE_COUNTERS
];
653 gcov_type
*aact_count
;
655 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
656 n_histogram_counters
[t
] = 0;
658 for (i
= 0; i
< VEC_length (histogram_value
, values
); i
++)
660 histogram_value hist
= VEC_index (histogram_value
, values
, i
);
661 n_histogram_counters
[(int) hist
->type
] += hist
->n_counters
;
665 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
667 if (!n_histogram_counters
[t
])
669 histogram_counts
[t
] = NULL
;
673 histogram_counts
[t
] =
674 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t
),
675 n_histogram_counters
[t
], NULL
);
676 if (histogram_counts
[t
])
678 act_count
[t
] = histogram_counts
[t
];
683 for (i
= 0; i
< VEC_length (histogram_value
, values
); i
++)
685 histogram_value hist
= VEC_index (histogram_value
, values
, i
);
686 tree stmt
= hist
->hvalue
.stmt
;
687 stmt_ann_t ann
= get_stmt_ann (stmt
);
689 t
= (int) hist
->type
;
691 aact_count
= act_count
[t
];
692 act_count
[t
] += hist
->n_counters
;
694 hist
->hvalue
.next
= ann
->histograms
;
695 ann
->histograms
= hist
;
696 hist
->hvalue
.counters
=
697 xmalloc (sizeof (gcov_type
) * hist
->n_counters
);
698 for (j
= 0; j
< hist
->n_counters
; j
++)
699 hist
->hvalue
.counters
[j
] = aact_count
[j
];
702 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
703 if (histogram_counts
[t
])
704 free (histogram_counts
[t
]);
707 #define BB_TO_GCOV_INDEX(bb) ((bb)->index + 1)
708 /* When passed NULL as file_name, initialize.
709 When passed something else, output the necessary commands to change
710 line to LINE and offset to FILE_NAME. */
712 output_location (char const *file_name
, int line
,
713 gcov_position_t
*offset
, basic_block bb
)
715 static char const *prev_file_name
;
716 static int prev_line
;
717 bool name_differs
, line_differs
;
721 prev_file_name
= NULL
;
726 name_differs
= !prev_file_name
|| strcmp (file_name
, prev_file_name
);
727 line_differs
= prev_line
!= line
;
729 if (name_differs
|| line_differs
)
733 *offset
= gcov_write_tag (GCOV_TAG_LINES
);
734 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb
));
735 name_differs
= line_differs
=true;
738 /* If this is a new source file, then output the
739 file's name to the .bb file. */
742 prev_file_name
= file_name
;
743 gcov_write_unsigned (0);
744 gcov_write_string (prev_file_name
);
748 gcov_write_unsigned (line
);
754 /* Instrument and/or analyze program behavior based on program flow graph.
755 In either case, this function builds a flow graph for the function being
756 compiled. The flow graph is stored in BB_GRAPH.
758 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
759 the flow graph that are needed to reconstruct the dynamic behavior of the
762 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
763 information from a data file containing edge count information from previous
764 executions of the function being compiled. In this case, the flow graph is
765 annotated with actual execution counts, which are later propagated into the
766 rtl for optimization purposes.
768 Main entry point of this file. */
775 unsigned num_edges
, ignored_edges
;
776 unsigned num_instrumented
;
777 struct edge_list
*el
;
778 histogram_values values
= NULL
;
780 total_num_times_called
++;
782 flow_call_edges_add (NULL
);
783 add_noreturn_fake_exit_edges ();
785 /* We can't handle cyclic regions constructed using abnormal edges.
786 To avoid these we replace every source of abnormal edge by a fake
787 edge from entry node and every destination by fake edge to exit.
788 This keeps graph acyclic and our calculation exact for all normal
789 edges except for exit and entrance ones.
791 We also add fake exit edges for each call and asm statement in the
792 basic, since it may not return. */
796 int need_exit_edge
= 0, need_entry_edge
= 0;
797 int have_exit_edge
= 0, have_entry_edge
= 0;
801 /* Functions returning multiple times are not handled by extra edges.
802 Instead we simply allow negative counts on edges from exit to the
803 block past call and corresponding probabilities. We can't go
804 with the extra edges because that would result in flowgraph that
805 needs to have fake edges outside the spanning tree. */
807 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
809 block_stmt_iterator bsi
;
812 /* It may happen that there are compiler generated statements
813 without a locus at all. Go through the basic block from the
814 last to the first statement looking for a locus. */
815 for (bsi
= bsi_last (bb
); !bsi_end_p (bsi
); bsi_prev (&bsi
))
817 last
= bsi_stmt (bsi
);
818 if (EXPR_LOCUS (last
))
822 /* Edge with goto locus might get wrong coverage info unless
823 it is the only edge out of BB.
824 Don't do that when the locuses match, so
825 if (blah) goto something;
826 is not computed twice. */
827 if (last
&& EXPR_LOCUS (last
)
829 && !single_succ_p (bb
)
830 #ifdef USE_MAPPED_LOCATION
831 && (LOCATION_FILE (e
->goto_locus
)
832 != LOCATION_FILE (EXPR_LOCATION (last
))
833 || (LOCATION_LINE (e
->goto_locus
)
834 != LOCATION_LINE (EXPR_LOCATION (last
)))))
836 && (e
->goto_locus
->file
!= EXPR_LOCUS (last
)->file
837 || (e
->goto_locus
->line
!= EXPR_LOCUS (last
)->line
)))
840 basic_block
new = split_edge (e
);
841 single_succ_edge (new)->goto_locus
= e
->goto_locus
;
843 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
844 && e
->dest
!= EXIT_BLOCK_PTR
)
846 if (e
->dest
== EXIT_BLOCK_PTR
)
849 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
851 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
852 && e
->src
!= ENTRY_BLOCK_PTR
)
854 if (e
->src
== ENTRY_BLOCK_PTR
)
858 if (need_exit_edge
&& !have_exit_edge
)
861 fprintf (dump_file
, "Adding fake exit edge to bb %i\n",
863 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
865 if (need_entry_edge
&& !have_entry_edge
)
868 fprintf (dump_file
, "Adding fake entry edge to bb %i\n",
870 make_edge (ENTRY_BLOCK_PTR
, bb
, EDGE_FAKE
);
874 el
= create_edge_list ();
875 num_edges
= NUM_EDGES (el
);
876 alloc_aux_for_edges (sizeof (struct edge_info
));
878 /* The basic blocks are expected to be numbered sequentially. */
882 for (i
= 0 ; i
< num_edges
; i
++)
884 edge e
= INDEX_EDGE (el
, i
);
887 /* Mark edges we've replaced by fake edges above as ignored. */
888 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
889 && e
->src
!= ENTRY_BLOCK_PTR
&& e
->dest
!= EXIT_BLOCK_PTR
)
891 EDGE_INFO (e
)->ignore
= 1;
896 /* Create spanning tree from basic block graph, mark each edge that is
897 on the spanning tree. We insert as many abnormal and critical edges
898 as possible to minimize number of edge splits necessary. */
900 find_spanning_tree (el
);
902 /* Fake edges that are not on the tree will not be instrumented, so
903 mark them ignored. */
904 for (num_instrumented
= i
= 0; i
< num_edges
; i
++)
906 edge e
= INDEX_EDGE (el
, i
);
907 struct edge_info
*inf
= EDGE_INFO (e
);
909 if (inf
->ignore
|| inf
->on_tree
)
911 else if (e
->flags
& EDGE_FAKE
)
920 total_num_blocks
+= n_basic_blocks
+ 2;
922 fprintf (dump_file
, "%d basic blocks\n", n_basic_blocks
);
924 total_num_edges
+= num_edges
;
926 fprintf (dump_file
, "%d edges\n", num_edges
);
928 total_num_edges_ignored
+= ignored_edges
;
930 fprintf (dump_file
, "%d ignored edges\n", ignored_edges
);
932 /* Write the data from which gcov can reconstruct the basic block
935 /* Basic block flags */
936 if (coverage_begin_output ())
938 gcov_position_t offset
;
940 offset
= gcov_write_tag (GCOV_TAG_BLOCKS
);
941 for (i
= 0; i
!= (unsigned) (n_basic_blocks
+ 2); i
++)
942 gcov_write_unsigned (0);
943 gcov_write_length (offset
);
946 /* Keep all basic block indexes nonnegative in the gcov output.
947 Index 0 is used for entry block, last index is for exit block.
949 ENTRY_BLOCK_PTR
->index
= -1;
950 EXIT_BLOCK_PTR
->index
= last_basic_block
;
953 if (coverage_begin_output ())
955 gcov_position_t offset
;
957 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, next_bb
)
962 offset
= gcov_write_tag (GCOV_TAG_ARCS
);
963 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb
));
965 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
967 struct edge_info
*i
= EDGE_INFO (e
);
970 unsigned flag_bits
= 0;
973 flag_bits
|= GCOV_ARC_ON_TREE
;
974 if (e
->flags
& EDGE_FAKE
)
975 flag_bits
|= GCOV_ARC_FAKE
;
976 if (e
->flags
& EDGE_FALLTHRU
)
977 flag_bits
|= GCOV_ARC_FALLTHROUGH
;
978 /* On trees we don't have fallthru flags, but we can
979 recompute them from CFG shape. */
981 && e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)
982 && e
->src
->next_bb
== e
->dest
)
983 flag_bits
|= GCOV_ARC_FALLTHROUGH
;
985 gcov_write_unsigned (BB_TO_GCOV_INDEX (e
->dest
));
986 gcov_write_unsigned (flag_bits
);
990 gcov_write_length (offset
);
995 if (coverage_begin_output ())
997 /* Initialize the output. */
998 output_location (NULL
, 0, NULL
, NULL
);
1002 gcov_position_t offset
;
1006 rtx insn
= BB_HEAD (bb
);
1007 int ignore_next_note
= 0;
1011 /* We are looking for line number notes. Search backward
1012 before basic block to find correct ones. */
1013 insn
= prev_nonnote_insn (insn
);
1015 insn
= get_insns ();
1017 insn
= NEXT_INSN (insn
);
1019 while (insn
!= BB_END (bb
))
1023 /* Must ignore the line number notes that
1024 immediately follow the end of an inline function
1025 to avoid counting it twice. There is a note
1026 before the call, and one after the call. */
1027 if (NOTE_LINE_NUMBER (insn
)
1028 == NOTE_INSN_REPEATED_LINE_NUMBER
)
1029 ignore_next_note
= 1;
1030 else if (NOTE_LINE_NUMBER (insn
) <= 0)
1032 else if (ignore_next_note
)
1033 ignore_next_note
= 0;
1036 expanded_location s
;
1037 NOTE_EXPANDED_LOCATION (s
, insn
);
1038 output_location (s
.file
, s
.line
, &offset
, bb
);
1041 insn
= NEXT_INSN (insn
);
1046 /* A file of NULL indicates the end of run. */
1047 gcov_write_unsigned (0);
1048 gcov_write_string (NULL
);
1049 gcov_write_length (offset
);
1055 gcov_position_t offset
;
1059 block_stmt_iterator bsi
;
1063 if (bb
== ENTRY_BLOCK_PTR
->next_bb
)
1065 expanded_location curr_location
=
1066 expand_location (DECL_SOURCE_LOCATION
1067 (current_function_decl
));
1068 output_location (curr_location
.file
, curr_location
.line
,
1072 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
1074 tree stmt
= bsi_stmt (bsi
);
1075 if (EXPR_HAS_LOCATION (stmt
))
1076 output_location (EXPR_FILENAME (stmt
),
1081 /* Notice GOTO expressions we eliminated while constructing the
1083 if (single_succ_p (bb
) && single_succ_edge (bb
)->goto_locus
)
1085 /* ??? source_locus type is marked deprecated in input.h. */
1086 source_locus curr_location
= single_succ_edge (bb
)->goto_locus
;
1087 /* ??? The FILE/LINE API is inconsistent for these cases. */
1088 #ifdef USE_MAPPED_LOCATION
1089 output_location (LOCATION_FILE (curr_location
),
1090 LOCATION_LINE (curr_location
),
1093 output_location (curr_location
->file
, curr_location
->line
,
1100 /* A file of NULL indicates the end of run. */
1101 gcov_write_unsigned (0);
1102 gcov_write_string (NULL
);
1103 gcov_write_length (offset
);
1109 ENTRY_BLOCK_PTR
->index
= ENTRY_BLOCK
;
1110 EXIT_BLOCK_PTR
->index
= EXIT_BLOCK
;
1111 #undef BB_TO_GCOV_INDEX
1113 if (flag_profile_values
)
1114 find_values_to_profile (&values
);
1116 if (flag_branch_probabilities
)
1118 compute_branch_probabilities ();
1119 if (flag_profile_values
)
1120 compute_value_histograms (values
);
1123 remove_fake_edges ();
1125 /* For each edge not on the spanning tree, add counting code. */
1126 if (profile_arc_flag
1127 && coverage_counter_alloc (GCOV_COUNTER_ARCS
, num_instrumented
))
1129 unsigned n_instrumented
;
1131 profile_hooks
->init_edge_profiler ();
1133 n_instrumented
= instrument_edges (el
);
1135 gcc_assert (n_instrumented
== num_instrumented
);
1137 if (flag_profile_values
)
1138 instrument_values (values
);
1140 /* Commit changes done by instrumentation. */
1142 bsi_commit_edge_inserts ();
1145 commit_edge_insertions_watch_calls ();
1146 allocate_reg_info (max_reg_num (), FALSE
, FALSE
);
1150 free_aux_for_edges ();
1154 /* Re-merge split basic blocks and the mess introduced by
1155 insert_insn_on_edge. */
1156 cleanup_cfg (profile_arc_flag
? CLEANUP_EXPENSIVE
: 0);
1157 if (profile_dump_file())
1158 dump_flow_info (profile_dump_file());
1161 free_edge_list (el
);
1162 if (flag_branch_probabilities
)
1163 profile_status
= PROFILE_READ
;
1164 coverage_end_function ();
1167 /* Union find algorithm implementation for the basic blocks using
1171 find_group (basic_block bb
)
1173 basic_block group
= bb
, bb1
;
1175 while ((basic_block
) group
->aux
!= group
)
1176 group
= (basic_block
) group
->aux
;
1178 /* Compress path. */
1179 while ((basic_block
) bb
->aux
!= group
)
1181 bb1
= (basic_block
) bb
->aux
;
1182 bb
->aux
= (void *) group
;
1189 union_groups (basic_block bb1
, basic_block bb2
)
1191 basic_block bb1g
= find_group (bb1
);
1192 basic_block bb2g
= find_group (bb2
);
1194 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1195 this code is unlikely going to be performance problem anyway. */
1196 gcc_assert (bb1g
!= bb2g
);
1201 /* This function searches all of the edges in the program flow graph, and puts
1202 as many bad edges as possible onto the spanning tree. Bad edges include
1203 abnormals edges, which can't be instrumented at the moment. Since it is
1204 possible for fake edges to form a cycle, we will have to develop some
1205 better way in the future. Also put critical edges to the tree, since they
1206 are more expensive to instrument. */
1209 find_spanning_tree (struct edge_list
*el
)
1212 int num_edges
= NUM_EDGES (el
);
1215 /* We use aux field for standard union-find algorithm. */
1216 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1219 /* Add fake edge exit to entry we can't instrument. */
1220 union_groups (EXIT_BLOCK_PTR
, ENTRY_BLOCK_PTR
);
1222 /* First add all abnormal edges to the tree unless they form a cycle. Also
1223 add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
1224 setting return value from function. */
1225 for (i
= 0; i
< num_edges
; i
++)
1227 edge e
= INDEX_EDGE (el
, i
);
1228 if (((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
| EDGE_FAKE
))
1229 || e
->dest
== EXIT_BLOCK_PTR
)
1230 && !EDGE_INFO (e
)->ignore
1231 && (find_group (e
->src
) != find_group (e
->dest
)))
1234 fprintf (dump_file
, "Abnormal edge %d to %d put to tree\n",
1235 e
->src
->index
, e
->dest
->index
);
1236 EDGE_INFO (e
)->on_tree
= 1;
1237 union_groups (e
->src
, e
->dest
);
1241 /* Now insert all critical edges to the tree unless they form a cycle. */
1242 for (i
= 0; i
< num_edges
; i
++)
1244 edge e
= INDEX_EDGE (el
, i
);
1245 if (EDGE_CRITICAL_P (e
) && !EDGE_INFO (e
)->ignore
1246 && find_group (e
->src
) != find_group (e
->dest
))
1249 fprintf (dump_file
, "Critical edge %d to %d put to tree\n",
1250 e
->src
->index
, e
->dest
->index
);
1251 EDGE_INFO (e
)->on_tree
= 1;
1252 union_groups (e
->src
, e
->dest
);
1256 /* And now the rest. */
1257 for (i
= 0; i
< num_edges
; i
++)
1259 edge e
= INDEX_EDGE (el
, i
);
1260 if (!EDGE_INFO (e
)->ignore
1261 && find_group (e
->src
) != find_group (e
->dest
))
1264 fprintf (dump_file
, "Normal edge %d to %d put to tree\n",
1265 e
->src
->index
, e
->dest
->index
);
1266 EDGE_INFO (e
)->on_tree
= 1;
1267 union_groups (e
->src
, e
->dest
);
1271 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1275 /* Perform file-level initialization for branch-prob processing. */
1278 init_branch_prob (void)
1282 total_num_blocks
= 0;
1283 total_num_edges
= 0;
1284 total_num_edges_ignored
= 0;
1285 total_num_edges_instrumented
= 0;
1286 total_num_blocks_created
= 0;
1287 total_num_passes
= 0;
1288 total_num_times_called
= 0;
1289 total_num_branches
= 0;
1290 total_num_never_executed
= 0;
1291 for (i
= 0; i
< 20; i
++)
1292 total_hist_br_prob
[i
] = 0;
1295 /* Performs file-level cleanup after branch-prob processing
1299 end_branch_prob (void)
1303 fprintf (dump_file
, "\n");
1304 fprintf (dump_file
, "Total number of blocks: %d\n",
1306 fprintf (dump_file
, "Total number of edges: %d\n", total_num_edges
);
1307 fprintf (dump_file
, "Total number of ignored edges: %d\n",
1308 total_num_edges_ignored
);
1309 fprintf (dump_file
, "Total number of instrumented edges: %d\n",
1310 total_num_edges_instrumented
);
1311 fprintf (dump_file
, "Total number of blocks created: %d\n",
1312 total_num_blocks_created
);
1313 fprintf (dump_file
, "Total number of graph solution passes: %d\n",
1315 if (total_num_times_called
!= 0)
1316 fprintf (dump_file
, "Average number of graph solution passes: %d\n",
1317 (total_num_passes
+ (total_num_times_called
>> 1))
1318 / total_num_times_called
);
1319 fprintf (dump_file
, "Total number of branches: %d\n",
1320 total_num_branches
);
1321 fprintf (dump_file
, "Total number of branches never executed: %d\n",
1322 total_num_never_executed
);
1323 if (total_num_branches
)
1327 for (i
= 0; i
< 10; i
++)
1328 fprintf (dump_file
, "%d%% branches in range %d-%d%%\n",
1329 (total_hist_br_prob
[i
] + total_hist_br_prob
[19-i
]) * 100
1330 / total_num_branches
, 5*i
, 5*i
+5);
1335 /* Set up hooks to enable tree-based profiling. */
1338 tree_register_profile_hooks (void)
1340 gcc_assert (ir_type ());
1341 profile_hooks
= &tree_profile_hooks
;
1345 /* Do branch profiling and static profile estimation passes. */
1347 rest_of_handle_branch_prob (void)
1351 /* Discover and record the loop depth at the head of each basic
1352 block. The loop infrastructure does the real job for us. */
1353 flow_loops_find (&loops
);
1356 flow_loops_dump (&loops
, dump_file
, NULL
, 0);
1358 /* Estimate using heuristics if no profiling info is available. */
1359 if (flag_guess_branch_prob
1360 && profile_status
== PROFILE_ABSENT
)
1361 estimate_probability (&loops
);
1363 flow_loops_free (&loops
);
1364 free_dominance_info (CDI_DOMINATORS
);
1367 struct tree_opt_pass pass_branch_prob
=
1371 rest_of_handle_branch_prob
, /* execute */
1374 0, /* static_pass_number */
1375 TV_BRANCH_PROB
, /* tv_id */
1376 0, /* properties_required */
1377 0, /* properties_provided */
1378 0, /* properties_destroyed */
1379 0, /* todo_flags_start */
1380 TODO_dump_func
, /* todo_flags_finish */