PR rtl-optimization/20370
[official-gcc.git] / gcc / profile.c
blob1e0c3b87d93a150f989f6c2e92ad6d9fbc56d70f
1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
5 based on some ideas from Dain Samples of UC Berkeley.
6 Further mangling by Bob Manson, Cygnus Support.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 2, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to the Free
22 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 02110-1301, USA. */
25 /* Generate basic block profile instrumentation and auxiliary files.
26 Profile generation is optimized, so that not all arcs in the basic
27 block graph need instrumenting. First, the BB graph is closed with
28 one entry (function start), and one exit (function exit). Any
29 ABNORMAL_EDGE cannot be instrumented (because there is no control
30 path to place the code). We close the graph by inserting fake
31 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
32 edges that do not go to the exit_block. We ignore such abnormal
33 edges. Naturally these fake edges are never directly traversed,
34 and so *cannot* be directly instrumented. Some other graph
35 massaging is done. To optimize the instrumentation we generate the
36 BB minimal span tree, only edges that are not on the span tree
37 (plus the entry point) need instrumenting. From that information
38 all other edge counts can be deduced. By construction all fake
39 edges must be on the spanning tree. We also attempt to place
40 EDGE_CRITICAL edges on the spanning tree.
42 The auxiliary files generated are <dumpbase>.gcno (at compile time)
43 and <dumpbase>.gcda (at run time). The format is
44 described in full in gcov-io.h. */
46 /* ??? Register allocation should use basic block execution counts to
47 give preference to the most commonly executed blocks. */
49 /* ??? Should calculate branch probabilities before instrumenting code, since
50 then we can use arc counts to help decide which arcs to instrument. */
52 #include "config.h"
53 #include "system.h"
54 #include "coretypes.h"
55 #include "tm.h"
56 #include "rtl.h"
57 #include "flags.h"
58 #include "output.h"
59 #include "regs.h"
60 #include "expr.h"
61 #include "function.h"
62 #include "toplev.h"
63 #include "coverage.h"
64 #include "value-prof.h"
65 #include "tree.h"
66 #include "cfghooks.h"
67 #include "tree-flow.h"
68 #include "timevar.h"
69 #include "cfgloop.h"
70 #include "tree-pass.h"
72 /* Hooks for profiling. */
73 static struct profile_hooks* profile_hooks;
75 /* File for profiling debug output. */
76 static inline FILE*
77 profile_dump_file (void) {
78 return profile_hooks->profile_dump_file ();
81 /* Additional information about the edges we need. */
82 struct edge_info {
83 unsigned int count_valid : 1;
85 /* Is on the spanning tree. */
86 unsigned int on_tree : 1;
88 /* Pretend this edge does not exist (it is abnormal and we've
89 inserted a fake to compensate). */
90 unsigned int ignore : 1;
93 struct bb_info {
94 unsigned int count_valid : 1;
96 /* Number of successor and predecessor edges. */
97 gcov_type succ_count;
98 gcov_type pred_count;
101 #define EDGE_INFO(e) ((struct edge_info *) (e)->aux)
102 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
104 /* Counter summary from the last set of coverage counts read. */
106 const struct gcov_ctr_summary *profile_info;
108 /* Collect statistics on the performance of this pass for the entire source
109 file. */
111 static int total_num_blocks;
112 static int total_num_edges;
113 static int total_num_edges_ignored;
114 static int total_num_edges_instrumented;
115 static int total_num_blocks_created;
116 static int total_num_passes;
117 static int total_num_times_called;
118 static int total_hist_br_prob[20];
119 static int total_num_never_executed;
120 static int total_num_branches;
122 /* Forward declarations. */
123 static void find_spanning_tree (struct edge_list *);
124 static unsigned instrument_edges (struct edge_list *);
125 static void instrument_values (histogram_values);
126 static void compute_branch_probabilities (void);
127 static void compute_value_histograms (histogram_values);
128 static gcov_type * get_exec_counts (void);
129 static basic_block find_group (basic_block);
130 static void union_groups (basic_block, basic_block);
133 /* Add edge instrumentation code to the entire insn chain.
135 F is the first insn of the chain.
136 NUM_BLOCKS is the number of basic blocks found in F. */
138 static unsigned
139 instrument_edges (struct edge_list *el)
141 unsigned num_instr_edges = 0;
142 int num_edges = NUM_EDGES (el);
143 basic_block bb;
145 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
147 edge e;
148 edge_iterator ei;
150 FOR_EACH_EDGE (e, ei, bb->succs)
152 struct edge_info *inf = EDGE_INFO (e);
154 if (!inf->ignore && !inf->on_tree)
156 gcc_assert (!(e->flags & EDGE_ABNORMAL));
157 if (dump_file)
158 fprintf (dump_file, "Edge %d to %d instrumented%s\n",
159 e->src->index, e->dest->index,
160 EDGE_CRITICAL_P (e) ? " (and split)" : "");
161 (profile_hooks->gen_edge_profiler) (num_instr_edges++, e);
166 total_num_blocks_created += num_edges;
167 if (dump_file)
168 fprintf (dump_file, "%d edges instrumented\n", num_instr_edges);
169 return num_instr_edges;
172 /* Add code to measure histograms for values in list VALUES. */
173 static void
174 instrument_values (histogram_values values)
176 unsigned i, t;
178 /* Emit code to generate the histograms before the insns. */
180 for (i = 0; i < VEC_length (histogram_value, values); i++)
182 histogram_value hist = VEC_index (histogram_value, values, i);
183 switch (hist->type)
185 case HIST_TYPE_INTERVAL:
186 t = GCOV_COUNTER_V_INTERVAL;
187 break;
189 case HIST_TYPE_POW2:
190 t = GCOV_COUNTER_V_POW2;
191 break;
193 case HIST_TYPE_SINGLE_VALUE:
194 t = GCOV_COUNTER_V_SINGLE;
195 break;
197 case HIST_TYPE_CONST_DELTA:
198 t = GCOV_COUNTER_V_DELTA;
199 break;
201 default:
202 gcc_unreachable ();
204 if (!coverage_counter_alloc (t, hist->n_counters))
205 continue;
207 switch (hist->type)
209 case HIST_TYPE_INTERVAL:
210 (profile_hooks->gen_interval_profiler) (hist, t, 0);
211 break;
213 case HIST_TYPE_POW2:
214 (profile_hooks->gen_pow2_profiler) (hist, t, 0);
215 break;
217 case HIST_TYPE_SINGLE_VALUE:
218 (profile_hooks->gen_one_value_profiler) (hist, t, 0);
219 break;
221 case HIST_TYPE_CONST_DELTA:
222 (profile_hooks->gen_const_delta_profiler) (hist, t, 0);
223 break;
225 default:
226 gcc_unreachable ();
229 VEC_free (histogram_value, heap, values);
233 /* Computes hybrid profile for all matching entries in da_file. */
235 static gcov_type *
236 get_exec_counts (void)
238 unsigned num_edges = 0;
239 basic_block bb;
240 gcov_type *counts;
242 /* Count the edges to be (possibly) instrumented. */
243 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
245 edge e;
246 edge_iterator ei;
248 FOR_EACH_EDGE (e, ei, bb->succs)
249 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
250 num_edges++;
253 counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, &profile_info);
254 if (!counts)
255 return NULL;
257 if (dump_file && profile_info)
258 fprintf(dump_file, "Merged %u profiles with maximal count %u.\n",
259 profile_info->runs, (unsigned) profile_info->sum_max);
261 return counts;
265 /* Compute the branch probabilities for the various branches.
266 Annotate them accordingly. */
268 static void
269 compute_branch_probabilities (void)
271 basic_block bb;
272 int i;
273 int num_edges = 0;
274 int changes;
275 int passes;
276 int hist_br_prob[20];
277 int num_never_executed;
278 int num_branches;
279 gcov_type *exec_counts = get_exec_counts ();
280 int exec_counts_pos = 0;
282 /* Very simple sanity checks so we catch bugs in our profiling code. */
283 if (profile_info)
285 if (profile_info->run_max * profile_info->runs < profile_info->sum_max)
287 error ("corrupted profile info: run_max * runs < sum_max");
288 exec_counts = NULL;
291 if (profile_info->sum_all < profile_info->sum_max)
293 error ("corrupted profile info: sum_all is smaller than sum_max");
294 exec_counts = NULL;
298 /* Attach extra info block to each bb. */
300 alloc_aux_for_blocks (sizeof (struct bb_info));
301 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
303 edge e;
304 edge_iterator ei;
306 FOR_EACH_EDGE (e, ei, bb->succs)
307 if (!EDGE_INFO (e)->ignore)
308 BB_INFO (bb)->succ_count++;
309 FOR_EACH_EDGE (e, ei, bb->preds)
310 if (!EDGE_INFO (e)->ignore)
311 BB_INFO (bb)->pred_count++;
314 /* Avoid predicting entry on exit nodes. */
315 BB_INFO (EXIT_BLOCK_PTR)->succ_count = 2;
316 BB_INFO (ENTRY_BLOCK_PTR)->pred_count = 2;
318 /* For each edge not on the spanning tree, set its execution count from
319 the .da file. */
321 /* The first count in the .da file is the number of times that the function
322 was entered. This is the exec_count for block zero. */
324 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
326 edge e;
327 edge_iterator ei;
329 FOR_EACH_EDGE (e, ei, bb->succs)
330 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
332 num_edges++;
333 if (exec_counts)
335 e->count = exec_counts[exec_counts_pos++];
336 if (e->count > profile_info->sum_max)
338 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
339 bb->index, e->dest->index);
342 else
343 e->count = 0;
345 EDGE_INFO (e)->count_valid = 1;
346 BB_INFO (bb)->succ_count--;
347 BB_INFO (e->dest)->pred_count--;
348 if (dump_file)
350 fprintf (dump_file, "\nRead edge from %i to %i, count:",
351 bb->index, e->dest->index);
352 fprintf (dump_file, HOST_WIDEST_INT_PRINT_DEC,
353 (HOST_WIDEST_INT) e->count);
358 if (dump_file)
359 fprintf (dump_file, "\n%d edge counts read\n", num_edges);
361 /* For every block in the file,
362 - if every exit/entrance edge has a known count, then set the block count
363 - if the block count is known, and every exit/entrance edge but one has
364 a known execution count, then set the count of the remaining edge
366 As edge counts are set, decrement the succ/pred count, but don't delete
367 the edge, that way we can easily tell when all edges are known, or only
368 one edge is unknown. */
370 /* The order that the basic blocks are iterated through is important.
371 Since the code that finds spanning trees starts with block 0, low numbered
372 edges are put on the spanning tree in preference to high numbered edges.
373 Hence, most instrumented edges are at the end. Graph solving works much
374 faster if we propagate numbers from the end to the start.
376 This takes an average of slightly more than 3 passes. */
378 changes = 1;
379 passes = 0;
380 while (changes)
382 passes++;
383 changes = 0;
384 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
386 struct bb_info *bi = BB_INFO (bb);
387 if (! bi->count_valid)
389 if (bi->succ_count == 0)
391 edge e;
392 edge_iterator ei;
393 gcov_type total = 0;
395 FOR_EACH_EDGE (e, ei, bb->succs)
396 total += e->count;
397 bb->count = total;
398 bi->count_valid = 1;
399 changes = 1;
401 else if (bi->pred_count == 0)
403 edge e;
404 edge_iterator ei;
405 gcov_type total = 0;
407 FOR_EACH_EDGE (e, ei, bb->preds)
408 total += e->count;
409 bb->count = total;
410 bi->count_valid = 1;
411 changes = 1;
414 if (bi->count_valid)
416 if (bi->succ_count == 1)
418 edge e;
419 edge_iterator ei;
420 gcov_type total = 0;
422 /* One of the counts will be invalid, but it is zero,
423 so adding it in also doesn't hurt. */
424 FOR_EACH_EDGE (e, ei, bb->succs)
425 total += e->count;
427 /* Seedgeh for the invalid edge, and set its count. */
428 FOR_EACH_EDGE (e, ei, bb->succs)
429 if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
430 break;
432 /* Calculate count for remaining edge by conservation. */
433 total = bb->count - total;
435 gcc_assert (e);
436 EDGE_INFO (e)->count_valid = 1;
437 e->count = total;
438 bi->succ_count--;
440 BB_INFO (e->dest)->pred_count--;
441 changes = 1;
443 if (bi->pred_count == 1)
445 edge e;
446 edge_iterator ei;
447 gcov_type total = 0;
449 /* One of the counts will be invalid, but it is zero,
450 so adding it in also doesn't hurt. */
451 FOR_EACH_EDGE (e, ei, bb->preds)
452 total += e->count;
454 /* Search for the invalid edge, and set its count. */
455 FOR_EACH_EDGE (e, ei, bb->preds)
456 if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
457 break;
459 /* Calculate count for remaining edge by conservation. */
460 total = bb->count - total + e->count;
462 gcc_assert (e);
463 EDGE_INFO (e)->count_valid = 1;
464 e->count = total;
465 bi->pred_count--;
467 BB_INFO (e->src)->succ_count--;
468 changes = 1;
473 if (dump_file)
474 dump_flow_info (dump_file);
476 total_num_passes += passes;
477 if (dump_file)
478 fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
480 /* If the graph has been correctly solved, every block will have a
481 succ and pred count of zero. */
482 FOR_EACH_BB (bb)
484 gcc_assert (!BB_INFO (bb)->succ_count && !BB_INFO (bb)->pred_count);
487 /* For every edge, calculate its branch probability and add a reg_note
488 to the branch insn to indicate this. */
490 for (i = 0; i < 20; i++)
491 hist_br_prob[i] = 0;
492 num_never_executed = 0;
493 num_branches = 0;
495 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
497 edge e;
498 edge_iterator ei;
499 rtx note;
501 if (bb->count < 0)
503 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
504 bb->index, (int)bb->count);
505 bb->count = 0;
507 FOR_EACH_EDGE (e, ei, bb->succs)
509 /* Function may return twice in the cased the called function is
510 setjmp or calls fork, but we can't represent this by extra
511 edge from the entry, since extra edge from the exit is
512 already present. We get negative frequency from the entry
513 point. */
514 if ((e->count < 0
515 && e->dest == EXIT_BLOCK_PTR)
516 || (e->count > bb->count
517 && e->dest != EXIT_BLOCK_PTR))
519 if (block_ends_with_call_p (bb))
520 e->count = e->count < 0 ? 0 : bb->count;
522 if (e->count < 0 || e->count > bb->count)
524 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
525 e->src->index, e->dest->index,
526 (int)e->count);
527 e->count = bb->count / 2;
530 if (bb->count)
532 FOR_EACH_EDGE (e, ei, bb->succs)
533 e->probability = (e->count * REG_BR_PROB_BASE + bb->count / 2) / bb->count;
534 if (bb->index >= 0
535 && block_ends_with_condjump_p (bb)
536 && EDGE_COUNT (bb->succs) >= 2)
538 int prob;
539 edge e;
540 int index;
542 /* Find the branch edge. It is possible that we do have fake
543 edges here. */
544 FOR_EACH_EDGE (e, ei, bb->succs)
545 if (!(e->flags & (EDGE_FAKE | EDGE_FALLTHRU)))
546 break;
548 prob = e->probability;
549 index = prob * 20 / REG_BR_PROB_BASE;
551 if (index == 20)
552 index = 19;
553 hist_br_prob[index]++;
555 /* Do this for RTL only. */
556 if (!ir_type ())
558 note = find_reg_note (BB_END (bb), REG_BR_PROB, 0);
559 /* There may be already note put by some other pass, such
560 as builtin_expect expander. */
561 if (note)
562 XEXP (note, 0) = GEN_INT (prob);
563 else
564 REG_NOTES (BB_END (bb))
565 = gen_rtx_EXPR_LIST (REG_BR_PROB, GEN_INT (prob),
566 REG_NOTES (BB_END (bb)));
568 num_branches++;
571 /* Otherwise try to preserve the existing REG_BR_PROB probabilities
572 tree based profile guessing put into code. BB can be the
573 ENTRY_BLOCK, and it can have multiple (fake) successors in
574 EH cases, but it still has no code; don't crash in this case. */
575 else if (profile_status == PROFILE_ABSENT
576 && !ir_type ()
577 && EDGE_COUNT (bb->succs) > 1
578 && BB_END (bb)
579 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, 0)))
581 int prob = INTVAL (XEXP (note, 0));
583 BRANCH_EDGE (bb)->probability = prob;
584 FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
586 /* As a last resort, distribute the probabilities evenly.
587 Use simple heuristics that if there are normal edges,
588 give all abnormals frequency of 0, otherwise distribute the
589 frequency over abnormals (this is the case of noreturn
590 calls). */
591 else if (profile_status == PROFILE_ABSENT)
593 int total = 0;
595 FOR_EACH_EDGE (e, ei, bb->succs)
596 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
597 total ++;
598 if (total)
600 FOR_EACH_EDGE (e, ei, bb->succs)
601 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
602 e->probability = REG_BR_PROB_BASE / total;
603 else
604 e->probability = 0;
606 else
608 total += EDGE_COUNT (bb->succs);
609 FOR_EACH_EDGE (e, ei, bb->succs)
610 e->probability = REG_BR_PROB_BASE / total;
612 if (bb->index >= 0
613 && block_ends_with_condjump_p (bb)
614 && EDGE_COUNT (bb->succs) >= 2)
615 num_branches++, num_never_executed;
618 counts_to_freqs ();
620 if (dump_file)
622 fprintf (dump_file, "%d branches\n", num_branches);
623 fprintf (dump_file, "%d branches never executed\n",
624 num_never_executed);
625 if (num_branches)
626 for (i = 0; i < 10; i++)
627 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
628 (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
629 5 * i, 5 * i + 5);
631 total_num_branches += num_branches;
632 total_num_never_executed += num_never_executed;
633 for (i = 0; i < 20; i++)
634 total_hist_br_prob[i] += hist_br_prob[i];
636 fputc ('\n', dump_file);
637 fputc ('\n', dump_file);
640 free_aux_for_blocks ();
643 /* Load value histograms values whose description is stored in VALUES array
644 from .gcda file. */
646 static void
647 compute_value_histograms (histogram_values values)
649 unsigned i, j, t, any;
650 unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS];
651 gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS];
652 gcov_type *act_count[GCOV_N_VALUE_COUNTERS];
653 gcov_type *aact_count;
654 histogram_value hist;
656 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
657 n_histogram_counters[t] = 0;
659 for (i = 0; i < VEC_length (histogram_value, values); i++)
661 hist = VEC_index (histogram_value, values, i);
662 n_histogram_counters[(int) hist->type] += hist->n_counters;
665 any = 0;
666 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
668 if (!n_histogram_counters[t])
670 histogram_counts[t] = NULL;
671 continue;
674 histogram_counts[t] =
675 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t),
676 n_histogram_counters[t], NULL);
677 if (histogram_counts[t])
678 any = 1;
679 act_count[t] = histogram_counts[t];
681 if (!any)
682 return;
684 for (i = 0; i < VEC_length (histogram_value, values); i++)
686 rtx hist_list = NULL_RTX;
688 hist = VEC_index (histogram_value, values, i);
689 t = (int) hist->type;
691 aact_count = act_count[t];
692 act_count[t] += hist->n_counters;
694 if (!ir_type ())
696 for (j = hist->n_counters; j > 0; j--)
697 hist_list = alloc_EXPR_LIST (0, GEN_INT (aact_count[j - 1]),
698 hist_list);
699 hist_list = alloc_EXPR_LIST (0,
700 copy_rtx (hist->hvalue.rtl.value), hist_list);
701 hist_list = alloc_EXPR_LIST (0, GEN_INT (hist->type), hist_list);
702 REG_NOTES (hist->hvalue.rtl.insn) =
703 alloc_EXPR_LIST (REG_VALUE_PROFILE, hist_list,
704 REG_NOTES (hist->hvalue.rtl.insn));
706 else
708 tree stmt = hist->hvalue.tree.stmt;
709 stmt_ann_t ann = get_stmt_ann (stmt);
710 hist->hvalue.tree.next = ann->histograms;
711 ann->histograms = hist;
712 hist->hvalue.tree.counters =
713 xmalloc (sizeof (gcov_type) * hist->n_counters);
714 for (j = 0; j < hist->n_counters; j++)
715 hist->hvalue.tree.counters[j] = aact_count[j];
719 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
720 if (histogram_counts[t])
721 free (histogram_counts[t]);
724 #define BB_TO_GCOV_INDEX(bb) ((bb)->index + 1)
725 /* When passed NULL as file_name, initialize.
726 When passed something else, output the necessary commands to change
727 line to LINE and offset to FILE_NAME. */
728 static void
729 output_location (char const *file_name, int line,
730 gcov_position_t *offset, basic_block bb)
732 static char const *prev_file_name;
733 static int prev_line;
734 bool name_differs, line_differs;
736 if (!file_name)
738 prev_file_name = NULL;
739 prev_line = -1;
740 return;
743 name_differs = !prev_file_name || strcmp (file_name, prev_file_name);
744 line_differs = prev_line != line;
746 if (name_differs || line_differs)
748 if (!*offset)
750 *offset = gcov_write_tag (GCOV_TAG_LINES);
751 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
752 name_differs = line_differs=true;
755 /* If this is a new source file, then output the
756 file's name to the .bb file. */
757 if (name_differs)
759 prev_file_name = file_name;
760 gcov_write_unsigned (0);
761 gcov_write_string (prev_file_name);
763 if (line_differs)
765 gcov_write_unsigned (line);
766 prev_line = line;
771 /* Instrument and/or analyze program behavior based on program flow graph.
772 In either case, this function builds a flow graph for the function being
773 compiled. The flow graph is stored in BB_GRAPH.
775 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
776 the flow graph that are needed to reconstruct the dynamic behavior of the
777 flow graph.
779 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
780 information from a data file containing edge count information from previous
781 executions of the function being compiled. In this case, the flow graph is
782 annotated with actual execution counts, which are later propagated into the
783 rtl for optimization purposes.
785 Main entry point of this file. */
787 void
788 branch_prob (void)
790 basic_block bb;
791 unsigned i;
792 unsigned num_edges, ignored_edges;
793 unsigned num_instrumented;
794 struct edge_list *el;
795 histogram_values values = NULL;
797 total_num_times_called++;
799 flow_call_edges_add (NULL);
800 add_noreturn_fake_exit_edges ();
802 /* We can't handle cyclic regions constructed using abnormal edges.
803 To avoid these we replace every source of abnormal edge by a fake
804 edge from entry node and every destination by fake edge to exit.
805 This keeps graph acyclic and our calculation exact for all normal
806 edges except for exit and entrance ones.
808 We also add fake exit edges for each call and asm statement in the
809 basic, since it may not return. */
811 FOR_EACH_BB (bb)
813 int need_exit_edge = 0, need_entry_edge = 0;
814 int have_exit_edge = 0, have_entry_edge = 0;
815 edge e;
816 edge_iterator ei;
818 /* Functions returning multiple times are not handled by extra edges.
819 Instead we simply allow negative counts on edges from exit to the
820 block past call and corresponding probabilities. We can't go
821 with the extra edges because that would result in flowgraph that
822 needs to have fake edges outside the spanning tree. */
824 FOR_EACH_EDGE (e, ei, bb->succs)
826 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
827 && e->dest != EXIT_BLOCK_PTR)
828 need_exit_edge = 1;
829 if (e->dest == EXIT_BLOCK_PTR)
830 have_exit_edge = 1;
832 FOR_EACH_EDGE (e, ei, bb->preds)
834 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
835 && e->src != ENTRY_BLOCK_PTR)
836 need_entry_edge = 1;
837 if (e->src == ENTRY_BLOCK_PTR)
838 have_entry_edge = 1;
841 if (need_exit_edge && !have_exit_edge)
843 if (dump_file)
844 fprintf (dump_file, "Adding fake exit edge to bb %i\n",
845 bb->index);
846 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
848 if (need_entry_edge && !have_entry_edge)
850 if (dump_file)
851 fprintf (dump_file, "Adding fake entry edge to bb %i\n",
852 bb->index);
853 make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FAKE);
857 el = create_edge_list ();
858 num_edges = NUM_EDGES (el);
859 alloc_aux_for_edges (sizeof (struct edge_info));
861 /* The basic blocks are expected to be numbered sequentially. */
862 compact_blocks ();
864 ignored_edges = 0;
865 for (i = 0 ; i < num_edges ; i++)
867 edge e = INDEX_EDGE (el, i);
868 e->count = 0;
870 /* Mark edges we've replaced by fake edges above as ignored. */
871 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
872 && e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR)
874 EDGE_INFO (e)->ignore = 1;
875 ignored_edges++;
879 /* Create spanning tree from basic block graph, mark each edge that is
880 on the spanning tree. We insert as many abnormal and critical edges
881 as possible to minimize number of edge splits necessary. */
883 find_spanning_tree (el);
885 /* Fake edges that are not on the tree will not be instrumented, so
886 mark them ignored. */
887 for (num_instrumented = i = 0; i < num_edges; i++)
889 edge e = INDEX_EDGE (el, i);
890 struct edge_info *inf = EDGE_INFO (e);
892 if (inf->ignore || inf->on_tree)
893 /*NOP*/;
894 else if (e->flags & EDGE_FAKE)
896 inf->ignore = 1;
897 ignored_edges++;
899 else
900 num_instrumented++;
903 total_num_blocks += n_basic_blocks + 2;
904 if (dump_file)
905 fprintf (dump_file, "%d basic blocks\n", n_basic_blocks);
907 total_num_edges += num_edges;
908 if (dump_file)
909 fprintf (dump_file, "%d edges\n", num_edges);
911 total_num_edges_ignored += ignored_edges;
912 if (dump_file)
913 fprintf (dump_file, "%d ignored edges\n", ignored_edges);
915 /* Write the data from which gcov can reconstruct the basic block
916 graph. */
918 /* Basic block flags */
919 if (coverage_begin_output ())
921 gcov_position_t offset;
923 offset = gcov_write_tag (GCOV_TAG_BLOCKS);
924 for (i = 0; i != (unsigned) (n_basic_blocks + 2); i++)
925 gcov_write_unsigned (0);
926 gcov_write_length (offset);
929 /* Keep all basic block indexes nonnegative in the gcov output.
930 Index 0 is used for entry block, last index is for exit block.
932 ENTRY_BLOCK_PTR->index = -1;
933 EXIT_BLOCK_PTR->index = last_basic_block;
935 /* Arcs */
936 if (coverage_begin_output ())
938 gcov_position_t offset;
940 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
942 edge e;
943 edge_iterator ei;
945 offset = gcov_write_tag (GCOV_TAG_ARCS);
946 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
948 FOR_EACH_EDGE (e, ei, bb->succs)
950 struct edge_info *i = EDGE_INFO (e);
951 if (!i->ignore)
953 unsigned flag_bits = 0;
955 if (i->on_tree)
956 flag_bits |= GCOV_ARC_ON_TREE;
957 if (e->flags & EDGE_FAKE)
958 flag_bits |= GCOV_ARC_FAKE;
959 if (e->flags & EDGE_FALLTHRU)
960 flag_bits |= GCOV_ARC_FALLTHROUGH;
961 /* On trees we don't have fallthru flags, but we can
962 recompute them from CFG shape. */
963 if (ir_type ()
964 && e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)
965 && e->src->next_bb == e->dest)
966 flag_bits |= GCOV_ARC_FALLTHROUGH;
968 gcov_write_unsigned (BB_TO_GCOV_INDEX (e->dest));
969 gcov_write_unsigned (flag_bits);
973 gcov_write_length (offset);
977 /* Line numbers. */
978 if (coverage_begin_output ())
980 /* Initialize the output. */
981 output_location (NULL, 0, NULL, NULL);
983 if (!ir_type ())
985 gcov_position_t offset;
987 FOR_EACH_BB (bb)
989 rtx insn = BB_HEAD (bb);
990 int ignore_next_note = 0;
992 offset = 0;
994 /* We are looking for line number notes. Search backward
995 before basic block to find correct ones. */
996 insn = prev_nonnote_insn (insn);
997 if (!insn)
998 insn = get_insns ();
999 else
1000 insn = NEXT_INSN (insn);
1002 while (insn != BB_END (bb))
1004 if (NOTE_P (insn))
1006 /* Must ignore the line number notes that
1007 immediately follow the end of an inline function
1008 to avoid counting it twice. There is a note
1009 before the call, and one after the call. */
1010 if (NOTE_LINE_NUMBER (insn)
1011 == NOTE_INSN_REPEATED_LINE_NUMBER)
1012 ignore_next_note = 1;
1013 else if (NOTE_LINE_NUMBER (insn) <= 0)
1014 /*NOP*/;
1015 else if (ignore_next_note)
1016 ignore_next_note = 0;
1017 else
1019 expanded_location s;
1020 NOTE_EXPANDED_LOCATION (s, insn);
1021 output_location (s.file, s.line, &offset, bb);
1024 insn = NEXT_INSN (insn);
1027 if (offset)
1029 /* A file of NULL indicates the end of run. */
1030 gcov_write_unsigned (0);
1031 gcov_write_string (NULL);
1032 gcov_write_length (offset);
1036 else
1038 gcov_position_t offset;
1040 FOR_EACH_BB (bb)
1042 block_stmt_iterator bsi;
1044 offset = 0;
1046 if (bb == ENTRY_BLOCK_PTR->next_bb)
1048 expanded_location curr_location =
1049 expand_location (DECL_SOURCE_LOCATION
1050 (current_function_decl));
1051 output_location (curr_location.file, curr_location.line,
1052 &offset, bb);
1055 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1057 tree stmt = bsi_stmt (bsi);
1058 if (EXPR_HAS_LOCATION (stmt))
1059 output_location (EXPR_FILENAME (stmt),
1060 EXPR_LINENO (stmt),
1061 &offset, bb);
1064 /* Notice GOTO expressions we eliminated while constructing the
1065 CFG. */
1066 if (single_succ_p (bb) && single_succ_edge (bb)->goto_locus)
1068 /* ??? source_locus type is marked deprecated in input.h. */
1069 source_locus curr_location = single_succ_edge (bb)->goto_locus;
1070 /* ??? The FILE/LINE API is inconsistent for these cases. */
1071 #ifdef USE_MAPPED_LOCATION
1072 output_location (LOCATION_FILE (curr_location),
1073 LOCATION_LINE (curr_location),
1074 &offset, bb);
1075 #else
1076 output_location (curr_location->file, curr_location->line,
1077 &offset, bb);
1078 #endif
1081 if (offset)
1083 /* A file of NULL indicates the end of run. */
1084 gcov_write_unsigned (0);
1085 gcov_write_string (NULL);
1086 gcov_write_length (offset);
1092 ENTRY_BLOCK_PTR->index = ENTRY_BLOCK;
1093 EXIT_BLOCK_PTR->index = EXIT_BLOCK;
1094 #undef BB_TO_GCOV_INDEX
1096 if (flag_profile_values)
1097 find_values_to_profile (&values);
1099 if (flag_branch_probabilities)
1101 compute_branch_probabilities ();
1102 if (flag_profile_values)
1103 compute_value_histograms (values);
1106 remove_fake_edges ();
1108 /* For each edge not on the spanning tree, add counting code. */
1109 if (profile_arc_flag
1110 && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
1112 unsigned n_instrumented;
1114 profile_hooks->init_edge_profiler ();
1116 n_instrumented = instrument_edges (el);
1118 gcc_assert (n_instrumented == num_instrumented);
1120 if (flag_profile_values)
1121 instrument_values (values);
1123 /* Commit changes done by instrumentation. */
1124 if (ir_type ())
1125 bsi_commit_edge_inserts ();
1126 else
1128 commit_edge_insertions_watch_calls ();
1129 allocate_reg_info (max_reg_num (), FALSE, FALSE);
1133 free_aux_for_edges ();
1135 if (!ir_type ())
1137 /* Re-merge split basic blocks and the mess introduced by
1138 insert_insn_on_edge. */
1139 cleanup_cfg (profile_arc_flag ? CLEANUP_EXPENSIVE : 0);
1140 if (profile_dump_file())
1141 dump_flow_info (profile_dump_file());
1144 free_edge_list (el);
1145 if (flag_branch_probabilities)
1146 profile_status = PROFILE_READ;
1147 coverage_end_function ();
1150 /* Union find algorithm implementation for the basic blocks using
1151 aux fields. */
1153 static basic_block
1154 find_group (basic_block bb)
1156 basic_block group = bb, bb1;
1158 while ((basic_block) group->aux != group)
1159 group = (basic_block) group->aux;
1161 /* Compress path. */
1162 while ((basic_block) bb->aux != group)
1164 bb1 = (basic_block) bb->aux;
1165 bb->aux = (void *) group;
1166 bb = bb1;
1168 return group;
1171 static void
1172 union_groups (basic_block bb1, basic_block bb2)
1174 basic_block bb1g = find_group (bb1);
1175 basic_block bb2g = find_group (bb2);
1177 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1178 this code is unlikely going to be performance problem anyway. */
1179 gcc_assert (bb1g != bb2g);
1181 bb1g->aux = bb2g;
1184 /* This function searches all of the edges in the program flow graph, and puts
1185 as many bad edges as possible onto the spanning tree. Bad edges include
1186 abnormals edges, which can't be instrumented at the moment. Since it is
1187 possible for fake edges to form a cycle, we will have to develop some
1188 better way in the future. Also put critical edges to the tree, since they
1189 are more expensive to instrument. */
1191 static void
1192 find_spanning_tree (struct edge_list *el)
1194 int i;
1195 int num_edges = NUM_EDGES (el);
1196 basic_block bb;
1198 /* We use aux field for standard union-find algorithm. */
1199 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1200 bb->aux = bb;
1202 /* Add fake edge exit to entry we can't instrument. */
1203 union_groups (EXIT_BLOCK_PTR, ENTRY_BLOCK_PTR);
1205 /* First add all abnormal edges to the tree unless they form a cycle. Also
1206 add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
1207 setting return value from function. */
1208 for (i = 0; i < num_edges; i++)
1210 edge e = INDEX_EDGE (el, i);
1211 if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
1212 || e->dest == EXIT_BLOCK_PTR)
1213 && !EDGE_INFO (e)->ignore
1214 && (find_group (e->src) != find_group (e->dest)))
1216 if (dump_file)
1217 fprintf (dump_file, "Abnormal edge %d to %d put to tree\n",
1218 e->src->index, e->dest->index);
1219 EDGE_INFO (e)->on_tree = 1;
1220 union_groups (e->src, e->dest);
1224 /* Now insert all critical edges to the tree unless they form a cycle. */
1225 for (i = 0; i < num_edges; i++)
1227 edge e = INDEX_EDGE (el, i);
1228 if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore
1229 && find_group (e->src) != find_group (e->dest))
1231 if (dump_file)
1232 fprintf (dump_file, "Critical edge %d to %d put to tree\n",
1233 e->src->index, e->dest->index);
1234 EDGE_INFO (e)->on_tree = 1;
1235 union_groups (e->src, e->dest);
1239 /* And now the rest. */
1240 for (i = 0; i < num_edges; i++)
1242 edge e = INDEX_EDGE (el, i);
1243 if (!EDGE_INFO (e)->ignore
1244 && find_group (e->src) != find_group (e->dest))
1246 if (dump_file)
1247 fprintf (dump_file, "Normal edge %d to %d put to tree\n",
1248 e->src->index, e->dest->index);
1249 EDGE_INFO (e)->on_tree = 1;
1250 union_groups (e->src, e->dest);
1254 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1255 bb->aux = NULL;
1258 /* Perform file-level initialization for branch-prob processing. */
1260 void
1261 init_branch_prob (void)
1263 int i;
1265 total_num_blocks = 0;
1266 total_num_edges = 0;
1267 total_num_edges_ignored = 0;
1268 total_num_edges_instrumented = 0;
1269 total_num_blocks_created = 0;
1270 total_num_passes = 0;
1271 total_num_times_called = 0;
1272 total_num_branches = 0;
1273 total_num_never_executed = 0;
1274 for (i = 0; i < 20; i++)
1275 total_hist_br_prob[i] = 0;
1278 /* Performs file-level cleanup after branch-prob processing
1279 is completed. */
1281 void
1282 end_branch_prob (void)
1284 if (dump_file)
1286 fprintf (dump_file, "\n");
1287 fprintf (dump_file, "Total number of blocks: %d\n",
1288 total_num_blocks);
1289 fprintf (dump_file, "Total number of edges: %d\n", total_num_edges);
1290 fprintf (dump_file, "Total number of ignored edges: %d\n",
1291 total_num_edges_ignored);
1292 fprintf (dump_file, "Total number of instrumented edges: %d\n",
1293 total_num_edges_instrumented);
1294 fprintf (dump_file, "Total number of blocks created: %d\n",
1295 total_num_blocks_created);
1296 fprintf (dump_file, "Total number of graph solution passes: %d\n",
1297 total_num_passes);
1298 if (total_num_times_called != 0)
1299 fprintf (dump_file, "Average number of graph solution passes: %d\n",
1300 (total_num_passes + (total_num_times_called >> 1))
1301 / total_num_times_called);
1302 fprintf (dump_file, "Total number of branches: %d\n",
1303 total_num_branches);
1304 fprintf (dump_file, "Total number of branches never executed: %d\n",
1305 total_num_never_executed);
1306 if (total_num_branches)
1308 int i;
1310 for (i = 0; i < 10; i++)
1311 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1312 (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1313 / total_num_branches, 5*i, 5*i+5);
1318 /* Set up hooks to enable tree-based profiling. */
1320 void
1321 tree_register_profile_hooks (void)
1323 gcc_assert (ir_type ());
1324 profile_hooks = &tree_profile_hooks;
1327 /* Set up hooks to enable RTL-based profiling. */
1329 void
1330 rtl_register_profile_hooks (void)
1332 gcc_assert (!ir_type ());
1333 profile_hooks = &rtl_profile_hooks;
1336 static bool
1337 gate_handle_profiling (void)
1339 return optimize > 0
1340 || (!flag_tree_based_profiling
1341 && (profile_arc_flag || flag_test_coverage
1342 || flag_branch_probabilities));
1345 struct tree_opt_pass pass_profiling =
1347 NULL, /* name */
1348 gate_handle_profiling, /* gate */
1349 NULL, /* execute */
1350 NULL, /* sub */
1351 NULL, /* next */
1352 0, /* static_pass_number */
1353 0, /* tv_id */
1354 0, /* properties_required */
1355 0, /* properties_provided */
1356 0, /* properties_destroyed */
1357 0, /* todo_flags_start */
1358 0, /* todo_flags_finish */
1359 0 /* letter */
1363 /* Do branch profiling and static profile estimation passes. */
1364 static void
1365 rest_of_handle_branch_prob (void)
1367 struct loops loops;
1369 rtl_register_profile_hooks ();
1370 rtl_register_value_prof_hooks ();
1372 if ((profile_arc_flag || flag_test_coverage || flag_branch_probabilities)
1373 && !flag_tree_based_profiling)
1374 branch_prob ();
1376 /* Discover and record the loop depth at the head of each basic
1377 block. The loop infrastructure does the real job for us. */
1378 flow_loops_find (&loops);
1380 if (dump_file)
1381 flow_loops_dump (&loops, dump_file, NULL, 0);
1383 /* Estimate using heuristics if no profiling info is available. */
1384 if (flag_guess_branch_prob
1385 && (profile_status == PROFILE_ABSENT
1386 || (profile_status == PROFILE_READ && !flag_tree_based_profiling)))
1387 estimate_probability (&loops);
1389 flow_loops_free (&loops);
1390 free_dominance_info (CDI_DOMINATORS);
1393 struct tree_opt_pass pass_branch_prob =
1395 "bp", /* name */
1396 NULL, /* gate */
1397 rest_of_handle_branch_prob, /* execute */
1398 NULL, /* sub */
1399 NULL, /* next */
1400 0, /* static_pass_number */
1401 TV_BRANCH_PROB, /* tv_id */
1402 0, /* properties_required */
1403 0, /* properties_provided */
1404 0, /* properties_destroyed */
1405 0, /* todo_flags_start */
1406 TODO_dump_func, /* todo_flags_finish */
1407 'b' /* letter */