1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
5 based on some ideas from Dain Samples of UC Berkeley.
6 Further mangling by Bob Manson, Cygnus Support.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 2, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to the Free
22 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 /* Generate basic block profile instrumentation and auxiliary files.
26 Profile generation is optimized, so that not all arcs in the basic
27 block graph need instrumenting. First, the BB graph is closed with
28 one entry (function start), and one exit (function exit). Any
29 ABNORMAL_EDGE cannot be instrumented (because there is no control
30 path to place the code). We close the graph by inserting fake
31 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
32 edges that do not go to the exit_block. We ignore such abnormal
33 edges. Naturally these fake edges are never directly traversed,
34 and so *cannot* be directly instrumented. Some other graph
35 massaging is done. To optimize the instrumentation we generate the
36 BB minimal span tree, only edges that are not on the span tree
37 (plus the entry point) need instrumenting. From that information
38 all other edge counts can be deduced. By construction all fake
39 edges must be on the spanning tree. We also attempt to place
40 EDGE_CRITICAL edges on the spanning tree.
42 The auxiliary file generated is <dumpbase>.bbg. The format is
43 described in full in gcov-io.h. */
45 /* ??? Register allocation should use basic block execution counts to
46 give preference to the most commonly executed blocks. */
48 /* ??? Should calculate branch probabilities before instrumenting code, since
49 then we can use arc counts to help decide which arcs to instrument. */
53 #include "coretypes.h"
63 #include "value-prof.h"
66 /* Additional information about the edges we need. */
68 unsigned int count_valid
: 1;
70 /* Is on the spanning tree. */
71 unsigned int on_tree
: 1;
73 /* Pretend this edge does not exist (it is abnormal and we've
74 inserted a fake to compensate). */
75 unsigned int ignore
: 1;
79 unsigned int count_valid
: 1;
81 /* Number of successor and predecessor edges. */
86 #define EDGE_INFO(e) ((struct edge_info *) (e)->aux)
87 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
89 /* Counter summary from the last set of coverage counts read. */
91 const struct gcov_ctr_summary
*profile_info
;
93 /* Collect statistics on the performance of this pass for the entire source
96 static int total_num_blocks
;
97 static int total_num_edges
;
98 static int total_num_edges_ignored
;
99 static int total_num_edges_instrumented
;
100 static int total_num_blocks_created
;
101 static int total_num_passes
;
102 static int total_num_times_called
;
103 static int total_hist_br_prob
[20];
104 static int total_num_never_executed
;
105 static int total_num_branches
;
107 /* Forward declarations. */
108 static void find_spanning_tree (struct edge_list
*);
109 static rtx
gen_edge_profiler (int);
110 static rtx
gen_interval_profiler (struct histogram_value
*, unsigned,
112 static rtx
gen_pow2_profiler (struct histogram_value
*, unsigned, unsigned);
113 static rtx
gen_one_value_profiler (struct histogram_value
*, unsigned,
115 static rtx
gen_const_delta_profiler (struct histogram_value
*, unsigned,
117 static unsigned instrument_edges (struct edge_list
*);
118 static void instrument_values (unsigned, struct histogram_value
*);
119 static void compute_branch_probabilities (void);
120 static void compute_value_histograms (unsigned, struct histogram_value
*);
121 static gcov_type
* get_exec_counts (void);
122 static basic_block
find_group (basic_block
);
123 static void union_groups (basic_block
, basic_block
);
126 /* Add edge instrumentation code to the entire insn chain.
128 F is the first insn of the chain.
129 NUM_BLOCKS is the number of basic blocks found in F. */
132 instrument_edges (struct edge_list
*el
)
134 unsigned num_instr_edges
= 0;
135 int num_edges
= NUM_EDGES (el
);
138 remove_fake_edges ();
140 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
144 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
146 struct edge_info
*inf
= EDGE_INFO (e
);
148 if (!inf
->ignore
&& !inf
->on_tree
)
152 if (e
->flags
& EDGE_ABNORMAL
)
155 fprintf (rtl_dump_file
, "Edge %d to %d instrumented%s\n",
156 e
->src
->index
, e
->dest
->index
,
157 EDGE_CRITICAL_P (e
) ? " (and split)" : "");
158 edge_profile
= gen_edge_profiler (num_instr_edges
++);
159 insert_insn_on_edge (edge_profile
, e
);
160 rebuild_jump_labels (e
->insns
);
165 total_num_blocks_created
+= num_edges
;
167 fprintf (rtl_dump_file
, "%d edges instrumented\n", num_instr_edges
);
168 return num_instr_edges
;
171 /* Add code to measure histograms list of VALUES of length N_VALUES. */
173 instrument_values (unsigned n_values
, struct histogram_value
*values
)
179 /* Emit code to generate the histograms before the insns. */
181 for (i
= 0; i
< n_values
; i
++)
183 e
= split_block (BLOCK_FOR_INSN (values
[i
].insn
),
184 PREV_INSN (values
[i
].insn
));
185 switch (values
[i
].type
)
187 case HIST_TYPE_INTERVAL
:
188 t
= GCOV_COUNTER_V_INTERVAL
;
192 t
= GCOV_COUNTER_V_POW2
;
195 case HIST_TYPE_SINGLE_VALUE
:
196 t
= GCOV_COUNTER_V_SINGLE
;
199 case HIST_TYPE_CONST_DELTA
:
200 t
= GCOV_COUNTER_V_DELTA
;
206 if (!coverage_counter_alloc (t
, values
[i
].n_counters
))
209 switch (values
[i
].type
)
211 case HIST_TYPE_INTERVAL
:
212 sequence
= gen_interval_profiler (values
+ i
, t
, 0);
216 sequence
= gen_pow2_profiler (values
+ i
, t
, 0);
219 case HIST_TYPE_SINGLE_VALUE
:
220 sequence
= gen_one_value_profiler (values
+ i
, t
, 0);
223 case HIST_TYPE_CONST_DELTA
:
224 sequence
= gen_const_delta_profiler (values
+ i
, t
, 0);
231 safe_insert_insn_on_edge (sequence
, e
);
236 /* Computes hybrid profile for all matching entries in da_file. */
239 get_exec_counts (void)
241 unsigned num_edges
= 0;
245 /* Count the edges to be (possibly) instrumented. */
246 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
249 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
250 if (!EDGE_INFO (e
)->ignore
&& !EDGE_INFO (e
)->on_tree
)
254 counts
= get_coverage_counts (GCOV_COUNTER_ARCS
, num_edges
, &profile_info
);
258 if (rtl_dump_file
&& profile_info
)
259 fprintf(rtl_dump_file
, "Merged %u profiles with maximal count %u.\n",
260 profile_info
->runs
, (unsigned) profile_info
->sum_max
);
266 /* Compute the branch probabilities for the various branches.
267 Annotate them accordingly. */
270 compute_branch_probabilities (void)
277 int hist_br_prob
[20];
278 int num_never_executed
;
280 gcov_type
*exec_counts
= get_exec_counts ();
281 int exec_counts_pos
= 0;
283 /* Very simple sanity checks so we catch bugs in our profiling code. */
286 if (profile_info
->run_max
* profile_info
->runs
< profile_info
->sum_max
)
288 error ("corrupted profile info: run_max * runs < sum_max");
292 if (profile_info
->sum_all
< profile_info
->sum_max
)
294 error ("corrupted profile info: sum_all is smaller than sum_max");
299 /* Attach extra info block to each bb. */
301 alloc_aux_for_blocks (sizeof (struct bb_info
));
302 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
306 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
307 if (!EDGE_INFO (e
)->ignore
)
308 BB_INFO (bb
)->succ_count
++;
309 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
310 if (!EDGE_INFO (e
)->ignore
)
311 BB_INFO (bb
)->pred_count
++;
314 /* Avoid predicting entry on exit nodes. */
315 BB_INFO (EXIT_BLOCK_PTR
)->succ_count
= 2;
316 BB_INFO (ENTRY_BLOCK_PTR
)->pred_count
= 2;
318 /* For each edge not on the spanning tree, set its execution count from
321 /* The first count in the .da file is the number of times that the function
322 was entered. This is the exec_count for block zero. */
324 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
327 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
328 if (!EDGE_INFO (e
)->ignore
&& !EDGE_INFO (e
)->on_tree
)
333 e
->count
= exec_counts
[exec_counts_pos
++];
334 if (e
->count
> profile_info
->sum_max
)
336 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
337 bb
->index
, e
->dest
->index
);
343 EDGE_INFO (e
)->count_valid
= 1;
344 BB_INFO (bb
)->succ_count
--;
345 BB_INFO (e
->dest
)->pred_count
--;
348 fprintf (rtl_dump_file
, "\nRead edge from %i to %i, count:",
349 bb
->index
, e
->dest
->index
);
350 fprintf (rtl_dump_file
, HOST_WIDEST_INT_PRINT_DEC
,
351 (HOST_WIDEST_INT
) e
->count
);
357 fprintf (rtl_dump_file
, "\n%d edge counts read\n", num_edges
);
359 /* For every block in the file,
360 - if every exit/entrance edge has a known count, then set the block count
361 - if the block count is known, and every exit/entrance edge but one has
362 a known execution count, then set the count of the remaining edge
364 As edge counts are set, decrement the succ/pred count, but don't delete
365 the edge, that way we can easily tell when all edges are known, or only
366 one edge is unknown. */
368 /* The order that the basic blocks are iterated through is important.
369 Since the code that finds spanning trees starts with block 0, low numbered
370 edges are put on the spanning tree in preference to high numbered edges.
371 Hence, most instrumented edges are at the end. Graph solving works much
372 faster if we propagate numbers from the end to the start.
374 This takes an average of slightly more than 3 passes. */
382 FOR_BB_BETWEEN (bb
, EXIT_BLOCK_PTR
, NULL
, prev_bb
)
384 struct bb_info
*bi
= BB_INFO (bb
);
385 if (! bi
->count_valid
)
387 if (bi
->succ_count
== 0)
392 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
398 else if (bi
->pred_count
== 0)
403 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
412 if (bi
->succ_count
== 1)
417 /* One of the counts will be invalid, but it is zero,
418 so adding it in also doesn't hurt. */
419 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
422 /* Seedgeh for the invalid edge, and set its count. */
423 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
424 if (! EDGE_INFO (e
)->count_valid
&& ! EDGE_INFO (e
)->ignore
)
427 /* Calculate count for remaining edge by conservation. */
428 total
= bb
->count
- total
;
432 EDGE_INFO (e
)->count_valid
= 1;
436 BB_INFO (e
->dest
)->pred_count
--;
439 if (bi
->pred_count
== 1)
444 /* One of the counts will be invalid, but it is zero,
445 so adding it in also doesn't hurt. */
446 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
449 /* Search for the invalid edge, and set its count. */
450 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
451 if (!EDGE_INFO (e
)->count_valid
&& !EDGE_INFO (e
)->ignore
)
454 /* Calculate count for remaining edge by conservation. */
455 total
= bb
->count
- total
+ e
->count
;
459 EDGE_INFO (e
)->count_valid
= 1;
463 BB_INFO (e
->src
)->succ_count
--;
470 dump_flow_info (rtl_dump_file
);
472 total_num_passes
+= passes
;
474 fprintf (rtl_dump_file
, "Graph solving took %d passes.\n\n", passes
);
476 /* If the graph has been correctly solved, every block will have a
477 succ and pred count of zero. */
480 if (BB_INFO (bb
)->succ_count
|| BB_INFO (bb
)->pred_count
)
484 /* For every edge, calculate its branch probability and add a reg_note
485 to the branch insn to indicate this. */
487 for (i
= 0; i
< 20; i
++)
489 num_never_executed
= 0;
492 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
499 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
500 bb
->index
, (int)bb
->count
);
503 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
505 /* Function may return twice in the cased the called fucntion is
506 setjmp or calls fork, but we can't represent this by extra
507 edge from the entry, since extra edge from the exit is
508 already present. We get negative frequency from the entry
511 && e
->dest
== EXIT_BLOCK_PTR
)
512 || (e
->count
> bb
->count
513 && e
->dest
!= EXIT_BLOCK_PTR
))
517 while (GET_CODE (insn
) != CALL_INSN
519 && keep_with_call_p (insn
))
520 insn
= PREV_INSN (insn
);
521 if (GET_CODE (insn
) == CALL_INSN
)
522 e
->count
= e
->count
< 0 ? 0 : bb
->count
;
524 if (e
->count
< 0 || e
->count
> bb
->count
)
526 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
527 e
->src
->index
, e
->dest
->index
,
529 e
->count
= bb
->count
/ 2;
534 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
535 e
->probability
= (e
->count
* REG_BR_PROB_BASE
+ bb
->count
/ 2) / bb
->count
;
537 && any_condjump_p (bb
->end
)
538 && bb
->succ
->succ_next
)
544 /* Find the branch edge. It is possible that we do have fake
546 for (e
= bb
->succ
; e
->flags
& (EDGE_FAKE
| EDGE_FALLTHRU
);
548 continue; /* Loop body has been intentionally left blank. */
550 prob
= e
->probability
;
551 index
= prob
* 20 / REG_BR_PROB_BASE
;
555 hist_br_prob
[index
]++;
557 note
= find_reg_note (bb
->end
, REG_BR_PROB
, 0);
558 /* There may be already note put by some other pass, such
559 as builtin_expect expander. */
561 XEXP (note
, 0) = GEN_INT (prob
);
564 = gen_rtx_EXPR_LIST (REG_BR_PROB
, GEN_INT (prob
),
565 REG_NOTES (bb
->end
));
569 /* Otherwise distribute the probabilities evenly so we get sane
570 sum. Use simple heuristics that if there are normal edges,
571 give all abnormals frequency of 0, otherwise distribute the
572 frequency over abnormals (this is the case of noreturn
578 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
579 if (!(e
->flags
& (EDGE_COMPLEX
| EDGE_FAKE
)))
583 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
584 if (!(e
->flags
& (EDGE_COMPLEX
| EDGE_FAKE
)))
585 e
->probability
= REG_BR_PROB_BASE
/ total
;
591 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
593 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
594 e
->probability
= REG_BR_PROB_BASE
/ total
;
597 && any_condjump_p (bb
->end
)
598 && bb
->succ
->succ_next
)
599 num_branches
++, num_never_executed
;
605 fprintf (rtl_dump_file
, "%d branches\n", num_branches
);
606 fprintf (rtl_dump_file
, "%d branches never executed\n",
609 for (i
= 0; i
< 10; i
++)
610 fprintf (rtl_dump_file
, "%d%% branches in range %d-%d%%\n",
611 (hist_br_prob
[i
] + hist_br_prob
[19-i
]) * 100 / num_branches
,
614 total_num_branches
+= num_branches
;
615 total_num_never_executed
+= num_never_executed
;
616 for (i
= 0; i
< 20; i
++)
617 total_hist_br_prob
[i
] += hist_br_prob
[i
];
619 fputc ('\n', rtl_dump_file
);
620 fputc ('\n', rtl_dump_file
);
623 free_aux_for_blocks ();
626 /* Load value histograms for N_VALUES values whose description is stored
627 in VALUES array from .da file. */
629 compute_value_histograms (unsigned n_values
, struct histogram_value
*values
)
631 unsigned i
, j
, t
, any
;
632 unsigned n_histogram_counters
[GCOV_N_VALUE_COUNTERS
];
633 gcov_type
*histogram_counts
[GCOV_N_VALUE_COUNTERS
];
634 gcov_type
*act_count
[GCOV_N_VALUE_COUNTERS
];
635 gcov_type
*aact_count
;
637 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
638 n_histogram_counters
[t
] = 0;
640 for (i
= 0; i
< n_values
; i
++)
641 n_histogram_counters
[(int) (values
[i
].type
)] += values
[i
].n_counters
;
644 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
646 if (!n_histogram_counters
[t
])
648 histogram_counts
[t
] = NULL
;
652 histogram_counts
[t
] =
653 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t
),
654 n_histogram_counters
[t
], NULL
);
655 if (histogram_counts
[t
])
657 act_count
[t
] = histogram_counts
[t
];
662 for (i
= 0; i
< n_values
; i
++)
664 rtx hist_list
= NULL_RTX
;
665 t
= (int) (values
[i
].type
);
667 aact_count
= act_count
[t
];
668 act_count
[t
] += values
[i
].n_counters
;
669 for (j
= values
[i
].n_counters
; j
> 0; j
--)
670 hist_list
= alloc_EXPR_LIST (0, GEN_INT (aact_count
[j
- 1]), hist_list
);
671 hist_list
= alloc_EXPR_LIST (0, copy_rtx (values
[i
].value
), hist_list
);
672 hist_list
= alloc_EXPR_LIST (0, GEN_INT (values
[i
].type
), hist_list
);
673 REG_NOTES (values
[i
].insn
) =
674 alloc_EXPR_LIST (REG_VALUE_PROFILE
, hist_list
,
675 REG_NOTES (values
[i
].insn
));
678 for (t
= 0; t
< GCOV_N_VALUE_COUNTERS
; t
++)
679 if (histogram_counts
[t
])
680 free (histogram_counts
[t
]);
683 /* Instrument and/or analyze program behavior based on program flow graph.
684 In either case, this function builds a flow graph for the function being
685 compiled. The flow graph is stored in BB_GRAPH.
687 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
688 the flow graph that are needed to reconstruct the dynamic behavior of the
691 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
692 information from a data file containing edge count information from previous
693 executions of the function being compiled. In this case, the flow graph is
694 annotated with actual execution counts, which are later propagated into the
695 rtl for optimization purposes.
697 Main entry point of this file. */
704 unsigned num_edges
, ignored_edges
;
705 unsigned num_instrumented
;
706 struct edge_list
*el
;
707 unsigned n_values
= 0;
708 struct histogram_value
*values
= NULL
;
710 total_num_times_called
++;
712 flow_call_edges_add (NULL
);
713 add_noreturn_fake_exit_edges ();
715 /* We can't handle cyclic regions constructed using abnormal edges.
716 To avoid these we replace every source of abnormal edge by a fake
717 edge from entry node and every destination by fake edge to exit.
718 This keeps graph acyclic and our calculation exact for all normal
719 edges except for exit and entrance ones.
721 We also add fake exit edges for each call and asm statement in the
722 basic, since it may not return. */
726 int need_exit_edge
= 0, need_entry_edge
= 0;
727 int have_exit_edge
= 0, have_entry_edge
= 0;
730 /* Functions returning multiple times are not handled by extra edges.
731 Instead we simply allow negative counts on edges from exit to the
732 block past call and corresponding probabilities. We can't go
733 with the extra edges because that would result in flowgraph that
734 needs to have fake edges outside the spanning tree. */
736 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
738 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
739 && e
->dest
!= EXIT_BLOCK_PTR
)
741 if (e
->dest
== EXIT_BLOCK_PTR
)
744 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
746 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
747 && e
->src
!= ENTRY_BLOCK_PTR
)
749 if (e
->src
== ENTRY_BLOCK_PTR
)
753 if (need_exit_edge
&& !have_exit_edge
)
756 fprintf (rtl_dump_file
, "Adding fake exit edge to bb %i\n",
758 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
760 if (need_entry_edge
&& !have_entry_edge
)
763 fprintf (rtl_dump_file
, "Adding fake entry edge to bb %i\n",
765 make_edge (ENTRY_BLOCK_PTR
, bb
, EDGE_FAKE
);
769 el
= create_edge_list ();
770 num_edges
= NUM_EDGES (el
);
771 alloc_aux_for_edges (sizeof (struct edge_info
));
773 /* The basic blocks are expected to be numbered sequentially. */
777 for (i
= 0 ; i
< num_edges
; i
++)
779 edge e
= INDEX_EDGE (el
, i
);
782 /* Mark edges we've replaced by fake edges above as ignored. */
783 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
))
784 && e
->src
!= ENTRY_BLOCK_PTR
&& e
->dest
!= EXIT_BLOCK_PTR
)
786 EDGE_INFO (e
)->ignore
= 1;
791 #ifdef ENABLE_CHECKING
795 /* Create spanning tree from basic block graph, mark each edge that is
796 on the spanning tree. We insert as many abnormal and critical edges
797 as possible to minimize number of edge splits necessary. */
799 find_spanning_tree (el
);
801 /* Fake edges that are not on the tree will not be instrumented, so
802 mark them ignored. */
803 for (num_instrumented
= i
= 0; i
< num_edges
; i
++)
805 edge e
= INDEX_EDGE (el
, i
);
806 struct edge_info
*inf
= EDGE_INFO (e
);
808 if (inf
->ignore
|| inf
->on_tree
)
810 else if (e
->flags
& EDGE_FAKE
)
819 total_num_blocks
+= n_basic_blocks
+ 2;
821 fprintf (rtl_dump_file
, "%d basic blocks\n", n_basic_blocks
);
823 total_num_edges
+= num_edges
;
825 fprintf (rtl_dump_file
, "%d edges\n", num_edges
);
827 total_num_edges_ignored
+= ignored_edges
;
829 fprintf (rtl_dump_file
, "%d ignored edges\n", ignored_edges
);
831 /* Write the data from which gcov can reconstruct the basic block
834 /* Basic block flags */
835 if (coverage_begin_output ())
837 gcov_position_t offset
;
839 offset
= gcov_write_tag (GCOV_TAG_BLOCKS
);
840 for (i
= 0; i
!= (unsigned) (n_basic_blocks
+ 2); i
++)
841 gcov_write_unsigned (0);
842 gcov_write_length (offset
);
845 /* Keep all basic block indexes nonnegative in the gcov output.
846 Index 0 is used for entry block, last index is for exit block.
848 ENTRY_BLOCK_PTR
->index
= -1;
849 EXIT_BLOCK_PTR
->index
= last_basic_block
;
850 #define BB_TO_GCOV_INDEX(bb) ((bb)->index + 1)
853 if (coverage_begin_output ())
855 gcov_position_t offset
;
857 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, next_bb
)
861 offset
= gcov_write_tag (GCOV_TAG_ARCS
);
862 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb
));
864 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
866 struct edge_info
*i
= EDGE_INFO (e
);
869 unsigned flag_bits
= 0;
872 flag_bits
|= GCOV_ARC_ON_TREE
;
873 if (e
->flags
& EDGE_FAKE
)
874 flag_bits
|= GCOV_ARC_FAKE
;
875 if (e
->flags
& EDGE_FALLTHRU
)
876 flag_bits
|= GCOV_ARC_FALLTHROUGH
;
878 gcov_write_unsigned (BB_TO_GCOV_INDEX (e
->dest
));
879 gcov_write_unsigned (flag_bits
);
883 gcov_write_length (offset
);
888 if (coverage_begin_output ())
890 char const *prev_file_name
= NULL
;
891 gcov_position_t offset
;
896 int ignore_next_note
= 0;
900 /* We are looking for line number notes. Search backward
901 before basic block to find correct ones. */
902 insn
= prev_nonnote_insn (insn
);
906 insn
= NEXT_INSN (insn
);
908 while (insn
!= bb
->end
)
910 if (GET_CODE (insn
) == NOTE
)
912 /* Must ignore the line number notes that
913 immediately follow the end of an inline function
914 to avoid counting it twice. There is a note
915 before the call, and one after the call. */
916 if (NOTE_LINE_NUMBER (insn
)
917 == NOTE_INSN_REPEATED_LINE_NUMBER
)
918 ignore_next_note
= 1;
919 else if (NOTE_LINE_NUMBER (insn
) <= 0)
921 else if (ignore_next_note
)
922 ignore_next_note
= 0;
927 offset
= gcov_write_tag (GCOV_TAG_LINES
);
928 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb
));
931 /* If this is a new source file, then output the
932 file's name to the .bb file. */
934 || strcmp (NOTE_SOURCE_FILE (insn
),
937 prev_file_name
= NOTE_SOURCE_FILE (insn
);
938 gcov_write_unsigned (0);
939 gcov_write_string (prev_file_name
);
941 gcov_write_unsigned (NOTE_LINE_NUMBER (insn
));
944 insn
= NEXT_INSN (insn
);
949 /* A file of NULL indicates the end of run. */
950 gcov_write_unsigned (0);
951 gcov_write_string (NULL
);
952 gcov_write_length (offset
);
956 ENTRY_BLOCK_PTR
->index
= ENTRY_BLOCK
;
957 EXIT_BLOCK_PTR
->index
= EXIT_BLOCK
;
958 #undef BB_TO_GCOV_INDEX
960 if (flag_profile_values
)
962 life_analysis (get_insns (), NULL
, PROP_DEATH_NOTES
);
963 find_values_to_profile (&n_values
, &values
);
964 allocate_reg_info (max_reg_num (), FALSE
, FALSE
);
967 if (flag_branch_probabilities
)
969 compute_branch_probabilities ();
970 if (flag_profile_values
)
971 compute_value_histograms (n_values
, values
);
974 /* For each edge not on the spanning tree, add counting code as rtl. */
976 && coverage_counter_alloc (GCOV_COUNTER_ARCS
, num_instrumented
))
978 unsigned n_instrumented
= instrument_edges (el
);
980 if (n_instrumented
!= num_instrumented
)
983 if (flag_profile_values
)
984 instrument_values (n_values
, values
);
986 /* Commit changes done by instrumentation. */
987 commit_edge_insertions_watch_calls ();
988 allocate_reg_info (max_reg_num (), FALSE
, FALSE
);
991 if (flag_profile_values
)
992 count_or_remove_death_notes (NULL
, 1);
993 remove_fake_edges ();
994 free_aux_for_edges ();
995 /* Re-merge split basic blocks and the mess introduced by
996 insert_insn_on_edge. */
997 cleanup_cfg (profile_arc_flag
? CLEANUP_EXPENSIVE
: 0);
999 dump_flow_info (rtl_dump_file
);
1001 free_edge_list (el
);
1004 /* Union find algorithm implementation for the basic blocks using
1008 find_group (basic_block bb
)
1010 basic_block group
= bb
, bb1
;
1012 while ((basic_block
) group
->aux
!= group
)
1013 group
= (basic_block
) group
->aux
;
1015 /* Compress path. */
1016 while ((basic_block
) bb
->aux
!= group
)
1018 bb1
= (basic_block
) bb
->aux
;
1019 bb
->aux
= (void *) group
;
1026 union_groups (basic_block bb1
, basic_block bb2
)
1028 basic_block bb1g
= find_group (bb1
);
1029 basic_block bb2g
= find_group (bb2
);
1031 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1032 this code is unlikely going to be performance problem anyway. */
1039 /* This function searches all of the edges in the program flow graph, and puts
1040 as many bad edges as possible onto the spanning tree. Bad edges include
1041 abnormals edges, which can't be instrumented at the moment. Since it is
1042 possible for fake edges to form a cycle, we will have to develop some
1043 better way in the future. Also put critical edges to the tree, since they
1044 are more expensive to instrument. */
1047 find_spanning_tree (struct edge_list
*el
)
1050 int num_edges
= NUM_EDGES (el
);
1053 /* We use aux field for standard union-find algorithm. */
1054 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1057 /* Add fake edge exit to entry we can't instrument. */
1058 union_groups (EXIT_BLOCK_PTR
, ENTRY_BLOCK_PTR
);
1060 /* First add all abnormal edges to the tree unless they form a cycle. Also
1061 add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
1062 setting return value from function. */
1063 for (i
= 0; i
< num_edges
; i
++)
1065 edge e
= INDEX_EDGE (el
, i
);
1066 if (((e
->flags
& (EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
| EDGE_FAKE
))
1067 || e
->dest
== EXIT_BLOCK_PTR
)
1068 && !EDGE_INFO (e
)->ignore
1069 && (find_group (e
->src
) != find_group (e
->dest
)))
1072 fprintf (rtl_dump_file
, "Abnormal edge %d to %d put to tree\n",
1073 e
->src
->index
, e
->dest
->index
);
1074 EDGE_INFO (e
)->on_tree
= 1;
1075 union_groups (e
->src
, e
->dest
);
1079 /* Now insert all critical edges to the tree unless they form a cycle. */
1080 for (i
= 0; i
< num_edges
; i
++)
1082 edge e
= INDEX_EDGE (el
, i
);
1083 if (EDGE_CRITICAL_P (e
) && !EDGE_INFO (e
)->ignore
1084 && find_group (e
->src
) != find_group (e
->dest
))
1087 fprintf (rtl_dump_file
, "Critical edge %d to %d put to tree\n",
1088 e
->src
->index
, e
->dest
->index
);
1089 EDGE_INFO (e
)->on_tree
= 1;
1090 union_groups (e
->src
, e
->dest
);
1094 /* And now the rest. */
1095 for (i
= 0; i
< num_edges
; i
++)
1097 edge e
= INDEX_EDGE (el
, i
);
1098 if (!EDGE_INFO (e
)->ignore
1099 && find_group (e
->src
) != find_group (e
->dest
))
1102 fprintf (rtl_dump_file
, "Normal edge %d to %d put to tree\n",
1103 e
->src
->index
, e
->dest
->index
);
1104 EDGE_INFO (e
)->on_tree
= 1;
1105 union_groups (e
->src
, e
->dest
);
1109 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, NULL
, next_bb
)
1113 /* Perform file-level initialization for branch-prob processing. */
1116 init_branch_prob (void)
1120 total_num_blocks
= 0;
1121 total_num_edges
= 0;
1122 total_num_edges_ignored
= 0;
1123 total_num_edges_instrumented
= 0;
1124 total_num_blocks_created
= 0;
1125 total_num_passes
= 0;
1126 total_num_times_called
= 0;
1127 total_num_branches
= 0;
1128 total_num_never_executed
= 0;
1129 for (i
= 0; i
< 20; i
++)
1130 total_hist_br_prob
[i
] = 0;
1133 /* Performs file-level cleanup after branch-prob processing
1137 end_branch_prob (void)
1141 fprintf (rtl_dump_file
, "\n");
1142 fprintf (rtl_dump_file
, "Total number of blocks: %d\n",
1144 fprintf (rtl_dump_file
, "Total number of edges: %d\n", total_num_edges
);
1145 fprintf (rtl_dump_file
, "Total number of ignored edges: %d\n",
1146 total_num_edges_ignored
);
1147 fprintf (rtl_dump_file
, "Total number of instrumented edges: %d\n",
1148 total_num_edges_instrumented
);
1149 fprintf (rtl_dump_file
, "Total number of blocks created: %d\n",
1150 total_num_blocks_created
);
1151 fprintf (rtl_dump_file
, "Total number of graph solution passes: %d\n",
1153 if (total_num_times_called
!= 0)
1154 fprintf (rtl_dump_file
, "Average number of graph solution passes: %d\n",
1155 (total_num_passes
+ (total_num_times_called
>> 1))
1156 / total_num_times_called
);
1157 fprintf (rtl_dump_file
, "Total number of branches: %d\n",
1158 total_num_branches
);
1159 fprintf (rtl_dump_file
, "Total number of branches never executed: %d\n",
1160 total_num_never_executed
);
1161 if (total_num_branches
)
1165 for (i
= 0; i
< 10; i
++)
1166 fprintf (rtl_dump_file
, "%d%% branches in range %d-%d%%\n",
1167 (total_hist_br_prob
[i
] + total_hist_br_prob
[19-i
]) * 100
1168 / total_num_branches
, 5*i
, 5*i
+5);
1174 /* Output instructions as RTL to increment the edge execution count. */
1177 gen_edge_profiler (int edgeno
)
1179 rtx ref
= coverage_counter_ref (GCOV_COUNTER_ARCS
, edgeno
);
1181 enum machine_mode mode
= GET_MODE (ref
);
1185 ref
= validize_mem (ref
);
1187 tmp
= expand_simple_binop (mode
, PLUS
, ref
, const1_rtx
,
1188 ref
, 0, OPTAB_WIDEN
);
1191 emit_move_insn (copy_rtx (ref
), tmp
);
1193 sequence
= get_insns ();
1198 /* Output instructions as RTL to increment the interval histogram counter.
1199 VALUE is the expression whose value is profiled. TAG is the tag of the
1200 section for counters, BASE is offset of the counter position. */
1203 gen_interval_profiler (struct histogram_value
*value
, unsigned tag
,
1206 unsigned gcov_size
= tree_low_cst (TYPE_SIZE (GCOV_TYPE_NODE
), 1);
1207 enum machine_mode mode
= mode_for_size (gcov_size
, MODE_INT
, 0);
1208 rtx mem_ref
, tmp
, tmp1
, mr
, val
;
1210 rtx more_label
= gen_label_rtx ();
1211 rtx less_label
= gen_label_rtx ();
1212 rtx end_of_code_label
= gen_label_rtx ();
1213 int per_counter
= gcov_size
/ BITS_PER_UNIT
;
1218 emit_insn (value
->seq
);
1220 mr
= gen_reg_rtx (Pmode
);
1222 tmp
= coverage_counter_ref (tag
, base
);
1223 tmp
= force_reg (Pmode
, XEXP (tmp
, 0));
1225 val
= expand_simple_binop (value
->mode
, MINUS
,
1226 copy_rtx (value
->value
),
1227 GEN_INT (value
->hdata
.intvl
.int_start
),
1228 NULL_RTX
, 0, OPTAB_WIDEN
);
1230 if (value
->hdata
.intvl
.may_be_more
)
1231 do_compare_rtx_and_jump (copy_rtx (val
), GEN_INT (value
->hdata
.intvl
.steps
),
1232 GE
, 0, value
->mode
, NULL_RTX
, NULL_RTX
, more_label
);
1233 if (value
->hdata
.intvl
.may_be_less
)
1234 do_compare_rtx_and_jump (copy_rtx (val
), const0_rtx
, LT
, 0, value
->mode
,
1235 NULL_RTX
, NULL_RTX
, less_label
);
1237 /* We are in range. */
1238 tmp1
= expand_simple_binop (value
->mode
, MULT
,
1239 copy_rtx (val
), GEN_INT (per_counter
),
1240 NULL_RTX
, 0, OPTAB_WIDEN
);
1241 tmp1
= expand_simple_binop (Pmode
, PLUS
, copy_rtx (tmp
), tmp1
, mr
,
1244 emit_move_insn (copy_rtx (mr
), tmp1
);
1246 if (value
->hdata
.intvl
.may_be_more
1247 || value
->hdata
.intvl
.may_be_less
)
1249 emit_jump_insn (gen_jump (end_of_code_label
));
1253 /* Above the interval. */
1254 if (value
->hdata
.intvl
.may_be_more
)
1256 emit_label (more_label
);
1257 tmp1
= expand_simple_binop (Pmode
, PLUS
, copy_rtx (tmp
),
1258 GEN_INT (per_counter
* value
->hdata
.intvl
.steps
),
1259 mr
, 0, OPTAB_WIDEN
);
1261 emit_move_insn (copy_rtx (mr
), tmp1
);
1262 if (value
->hdata
.intvl
.may_be_less
)
1264 emit_jump_insn (gen_jump (end_of_code_label
));
1269 /* Below the interval. */
1270 if (value
->hdata
.intvl
.may_be_less
)
1272 emit_label (less_label
);
1273 tmp1
= expand_simple_binop (Pmode
, PLUS
, copy_rtx (tmp
),
1274 GEN_INT (per_counter
* (value
->hdata
.intvl
.steps
1275 + (value
->hdata
.intvl
.may_be_more
? 1 : 0))),
1276 mr
, 0, OPTAB_WIDEN
);
1278 emit_move_insn (copy_rtx (mr
), tmp1
);
1281 if (value
->hdata
.intvl
.may_be_more
1282 || value
->hdata
.intvl
.may_be_less
)
1283 emit_label (end_of_code_label
);
1285 mem_ref
= validize_mem (gen_rtx_MEM (mode
, mr
));
1287 tmp
= expand_simple_binop (mode
, PLUS
, copy_rtx (mem_ref
), const1_rtx
,
1288 mem_ref
, 0, OPTAB_WIDEN
);
1291 emit_move_insn (copy_rtx (mem_ref
), tmp
);
1293 sequence
= get_insns ();
1295 rebuild_jump_labels (sequence
);
1299 /* Output instructions as RTL to increment the power of two histogram counter.
1300 VALUE is the expression whose value is profiled. TAG is the tag of the
1301 section for counters, BASE is offset of the counter position. */
1304 gen_pow2_profiler (struct histogram_value
*value
, unsigned tag
, unsigned base
)
1306 unsigned gcov_size
= tree_low_cst (TYPE_SIZE (GCOV_TYPE_NODE
), 1);
1307 enum machine_mode mode
= mode_for_size (gcov_size
, MODE_INT
, 0);
1308 rtx mem_ref
, tmp
, mr
, uval
;
1310 rtx end_of_code_label
= gen_label_rtx ();
1311 rtx loop_label
= gen_label_rtx ();
1312 int per_counter
= gcov_size
/ BITS_PER_UNIT
;
1317 emit_insn (value
->seq
);
1319 mr
= gen_reg_rtx (Pmode
);
1320 tmp
= coverage_counter_ref (tag
, base
);
1321 tmp
= force_reg (Pmode
, XEXP (tmp
, 0));
1322 emit_move_insn (mr
, tmp
);
1324 uval
= gen_reg_rtx (value
->mode
);
1325 emit_move_insn (uval
, copy_rtx (value
->value
));
1327 /* Check for non-power of 2. */
1328 if (value
->hdata
.pow2
.may_be_other
)
1330 do_compare_rtx_and_jump (copy_rtx (uval
), const0_rtx
, LE
, 0, value
->mode
,
1331 NULL_RTX
, NULL_RTX
, end_of_code_label
);
1332 tmp
= expand_simple_binop (value
->mode
, PLUS
, copy_rtx (uval
),
1333 constm1_rtx
, NULL_RTX
, 0, OPTAB_WIDEN
);
1334 tmp
= expand_simple_binop (value
->mode
, AND
, copy_rtx (uval
), tmp
,
1335 NULL_RTX
, 0, OPTAB_WIDEN
);
1336 do_compare_rtx_and_jump (tmp
, const0_rtx
, NE
, 0, value
->mode
, NULL_RTX
,
1337 NULL_RTX
, end_of_code_label
);
1340 /* Count log_2(value). */
1341 emit_label (loop_label
);
1343 tmp
= expand_simple_binop (Pmode
, PLUS
, copy_rtx (mr
), GEN_INT (per_counter
), mr
, 0, OPTAB_WIDEN
);
1345 emit_move_insn (copy_rtx (mr
), tmp
);
1347 tmp
= expand_simple_binop (value
->mode
, ASHIFTRT
, copy_rtx (uval
), const1_rtx
,
1348 uval
, 0, OPTAB_WIDEN
);
1350 emit_move_insn (copy_rtx (uval
), tmp
);
1352 do_compare_rtx_and_jump (copy_rtx (uval
), const0_rtx
, NE
, 0, value
->mode
,
1353 NULL_RTX
, NULL_RTX
, loop_label
);
1355 /* Increase the counter. */
1356 emit_label (end_of_code_label
);
1358 mem_ref
= validize_mem (gen_rtx_MEM (mode
, mr
));
1360 tmp
= expand_simple_binop (mode
, PLUS
, copy_rtx (mem_ref
), const1_rtx
,
1361 mem_ref
, 0, OPTAB_WIDEN
);
1364 emit_move_insn (copy_rtx (mem_ref
), tmp
);
1366 sequence
= get_insns ();
1368 rebuild_jump_labels (sequence
);
1372 /* Output instructions as RTL for code to find the most common value.
1373 VALUE is the expression whose value is profiled. TAG is the tag of the
1374 section for counters, BASE is offset of the counter position. */
1377 gen_one_value_profiler (struct histogram_value
*value
, unsigned tag
,
1380 unsigned gcov_size
= tree_low_cst (TYPE_SIZE (GCOV_TYPE_NODE
), 1);
1381 enum machine_mode mode
= mode_for_size (gcov_size
, MODE_INT
, 0);
1382 rtx stored_value_ref
, counter_ref
, all_ref
, stored_value
, counter
, all
;
1385 rtx same_label
= gen_label_rtx ();
1386 rtx zero_label
= gen_label_rtx ();
1387 rtx end_of_code_label
= gen_label_rtx ();
1392 emit_insn (value
->seq
);
1394 stored_value_ref
= coverage_counter_ref (tag
, base
);
1395 counter_ref
= coverage_counter_ref (tag
, base
+ 1);
1396 all_ref
= coverage_counter_ref (tag
, base
+ 2);
1397 stored_value
= validize_mem (stored_value_ref
);
1398 counter
= validize_mem (counter_ref
);
1399 all
= validize_mem (all_ref
);
1401 uval
= gen_reg_rtx (mode
);
1402 convert_move (uval
, copy_rtx (value
->value
), 0);
1404 /* Check if the stored value matches. */
1405 do_compare_rtx_and_jump (copy_rtx (uval
), copy_rtx (stored_value
), EQ
,
1406 0, mode
, NULL_RTX
, NULL_RTX
, same_label
);
1408 /* Does not match; check whether the counter is zero. */
1409 do_compare_rtx_and_jump (copy_rtx (counter
), const0_rtx
, EQ
, 0, mode
,
1410 NULL_RTX
, NULL_RTX
, zero_label
);
1412 /* The counter is not zero yet. */
1413 tmp
= expand_simple_binop (mode
, PLUS
, copy_rtx (counter
), constm1_rtx
,
1414 counter
, 0, OPTAB_WIDEN
);
1417 emit_move_insn (copy_rtx (counter
), tmp
);
1419 emit_jump_insn (gen_jump (end_of_code_label
));
1422 emit_label (zero_label
);
1423 /* Set new value. */
1424 emit_move_insn (copy_rtx (stored_value
), copy_rtx (uval
));
1426 emit_label (same_label
);
1427 /* Increase the counter. */
1428 tmp
= expand_simple_binop (mode
, PLUS
, copy_rtx (counter
), const1_rtx
,
1429 counter
, 0, OPTAB_WIDEN
);
1432 emit_move_insn (copy_rtx (counter
), tmp
);
1434 emit_label (end_of_code_label
);
1436 /* Increase the counter of all executions; this seems redundant given
1437 that ve have counts for edges in cfg, but it may happen that some
1438 optimization will change the counts for the block (either because
1439 it is unable to update them correctly, or because it will duplicate
1440 the block or its part). */
1441 tmp
= expand_simple_binop (mode
, PLUS
, copy_rtx (all
), const1_rtx
,
1442 all
, 0, OPTAB_WIDEN
);
1445 emit_move_insn (copy_rtx (all
), tmp
);
1446 sequence
= get_insns ();
1448 rebuild_jump_labels (sequence
);
1452 /* Output instructions as RTL for code to find the most common value of
1453 a difference between two evaluations of an expression.
1454 VALUE is the expression whose value is profiled. TAG is the tag of the
1455 section for counters, BASE is offset of the counter position. */
1458 gen_const_delta_profiler (struct histogram_value
*value
, unsigned tag
,
1461 struct histogram_value one_value_delta
;
1462 unsigned gcov_size
= tree_low_cst (TYPE_SIZE (GCOV_TYPE_NODE
), 1);
1463 enum machine_mode mode
= mode_for_size (gcov_size
, MODE_INT
, 0);
1464 rtx stored_value_ref
, stored_value
, tmp
, uval
;
1470 emit_insn (value
->seq
);
1472 stored_value_ref
= coverage_counter_ref (tag
, base
);
1473 stored_value
= validize_mem (stored_value_ref
);
1475 uval
= gen_reg_rtx (mode
);
1476 convert_move (uval
, copy_rtx (value
->value
), 0);
1477 tmp
= expand_simple_binop (mode
, MINUS
,
1478 copy_rtx (uval
), copy_rtx (stored_value
),
1479 NULL_RTX
, 0, OPTAB_WIDEN
);
1481 one_value_delta
.value
= tmp
;
1482 one_value_delta
.mode
= mode
;
1483 one_value_delta
.seq
= NULL_RTX
;
1484 one_value_delta
.insn
= value
->insn
;
1485 one_value_delta
.type
= HIST_TYPE_SINGLE_VALUE
;
1486 emit_insn (gen_one_value_profiler (&one_value_delta
, tag
, base
+ 1));
1488 emit_move_insn (copy_rtx (stored_value
), uval
);
1489 sequence
= get_insns ();
1491 rebuild_jump_labels (sequence
);