Refuse to label media that is too large to handle a 32 bit disklabel
[dragonfly/vkernel-mp.git] / contrib / gcc-4.1 / gcc / profile.c
blob04130672c772425cac3fcc700189bcfdfa848640
1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
5 based on some ideas from Dain Samples of UC Berkeley.
6 Further mangling by Bob Manson, Cygnus Support.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 2, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to the Free
22 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 02110-1301, USA. */
25 /* Generate basic block profile instrumentation and auxiliary files.
26 Profile generation is optimized, so that not all arcs in the basic
27 block graph need instrumenting. First, the BB graph is closed with
28 one entry (function start), and one exit (function exit). Any
29 ABNORMAL_EDGE cannot be instrumented (because there is no control
30 path to place the code). We close the graph by inserting fake
31 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
32 edges that do not go to the exit_block. We ignore such abnormal
33 edges. Naturally these fake edges are never directly traversed,
34 and so *cannot* be directly instrumented. Some other graph
35 massaging is done. To optimize the instrumentation we generate the
36 BB minimal span tree, only edges that are not on the span tree
37 (plus the entry point) need instrumenting. From that information
38 all other edge counts can be deduced. By construction all fake
39 edges must be on the spanning tree. We also attempt to place
40 EDGE_CRITICAL edges on the spanning tree.
42 The auxiliary files generated are <dumpbase>.gcno (at compile time)
43 and <dumpbase>.gcda (at run time). The format is
44 described in full in gcov-io.h. */
46 /* ??? Register allocation should use basic block execution counts to
47 give preference to the most commonly executed blocks. */
49 /* ??? Should calculate branch probabilities before instrumenting code, since
50 then we can use arc counts to help decide which arcs to instrument. */
52 #include "config.h"
53 #include "system.h"
54 #include "coretypes.h"
55 #include "tm.h"
56 #include "rtl.h"
57 #include "flags.h"
58 #include "output.h"
59 #include "regs.h"
60 #include "expr.h"
61 #include "function.h"
62 #include "toplev.h"
63 #include "coverage.h"
64 #include "value-prof.h"
65 #include "tree.h"
66 #include "cfghooks.h"
67 #include "tree-flow.h"
68 #include "timevar.h"
69 #include "cfgloop.h"
70 #include "tree-pass.h"
72 /* Hooks for profiling. */
73 static struct profile_hooks* profile_hooks;
75 /* File for profiling debug output. */
76 static inline FILE*
77 profile_dump_file (void) {
78 return profile_hooks->profile_dump_file ();
81 /* Additional information about the edges we need. */
82 struct edge_info {
83 unsigned int count_valid : 1;
85 /* Is on the spanning tree. */
86 unsigned int on_tree : 1;
88 /* Pretend this edge does not exist (it is abnormal and we've
89 inserted a fake to compensate). */
90 unsigned int ignore : 1;
93 struct bb_info {
94 unsigned int count_valid : 1;
96 /* Number of successor and predecessor edges. */
97 gcov_type succ_count;
98 gcov_type pred_count;
101 #define EDGE_INFO(e) ((struct edge_info *) (e)->aux)
102 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
104 /* Counter summary from the last set of coverage counts read. */
106 const struct gcov_ctr_summary *profile_info;
108 /* Collect statistics on the performance of this pass for the entire source
109 file. */
111 static int total_num_blocks;
112 static int total_num_edges;
113 static int total_num_edges_ignored;
114 static int total_num_edges_instrumented;
115 static int total_num_blocks_created;
116 static int total_num_passes;
117 static int total_num_times_called;
118 static int total_hist_br_prob[20];
119 static int total_num_never_executed;
120 static int total_num_branches;
122 /* Forward declarations. */
123 static void find_spanning_tree (struct edge_list *);
124 static unsigned instrument_edges (struct edge_list *);
125 static void instrument_values (histogram_values);
126 static void compute_branch_probabilities (void);
127 static void compute_value_histograms (histogram_values);
128 static gcov_type * get_exec_counts (void);
129 static basic_block find_group (basic_block);
130 static void union_groups (basic_block, basic_block);
133 /* Add edge instrumentation code to the entire insn chain.
135 F is the first insn of the chain.
136 NUM_BLOCKS is the number of basic blocks found in F. */
138 static unsigned
139 instrument_edges (struct edge_list *el)
141 unsigned num_instr_edges = 0;
142 int num_edges = NUM_EDGES (el);
143 basic_block bb;
145 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
147 edge e;
148 edge_iterator ei;
150 FOR_EACH_EDGE (e, ei, bb->succs)
152 struct edge_info *inf = EDGE_INFO (e);
154 if (!inf->ignore && !inf->on_tree)
156 gcc_assert (!(e->flags & EDGE_ABNORMAL));
157 if (dump_file)
158 fprintf (dump_file, "Edge %d to %d instrumented%s\n",
159 e->src->index, e->dest->index,
160 EDGE_CRITICAL_P (e) ? " (and split)" : "");
161 (profile_hooks->gen_edge_profiler) (num_instr_edges++, e);
166 total_num_blocks_created += num_edges;
167 if (dump_file)
168 fprintf (dump_file, "%d edges instrumented\n", num_instr_edges);
169 return num_instr_edges;
172 /* Add code to measure histograms for values in list VALUES. */
173 static void
174 instrument_values (histogram_values values)
176 unsigned i, t;
178 /* Emit code to generate the histograms before the insns. */
180 for (i = 0; i < VEC_length (histogram_value, values); i++)
182 histogram_value hist = VEC_index (histogram_value, values, i);
183 switch (hist->type)
185 case HIST_TYPE_INTERVAL:
186 t = GCOV_COUNTER_V_INTERVAL;
187 break;
189 case HIST_TYPE_POW2:
190 t = GCOV_COUNTER_V_POW2;
191 break;
193 case HIST_TYPE_SINGLE_VALUE:
194 t = GCOV_COUNTER_V_SINGLE;
195 break;
197 case HIST_TYPE_CONST_DELTA:
198 t = GCOV_COUNTER_V_DELTA;
199 break;
201 default:
202 gcc_unreachable ();
204 if (!coverage_counter_alloc (t, hist->n_counters))
205 continue;
207 switch (hist->type)
209 case HIST_TYPE_INTERVAL:
210 (profile_hooks->gen_interval_profiler) (hist, t, 0);
211 break;
213 case HIST_TYPE_POW2:
214 (profile_hooks->gen_pow2_profiler) (hist, t, 0);
215 break;
217 case HIST_TYPE_SINGLE_VALUE:
218 (profile_hooks->gen_one_value_profiler) (hist, t, 0);
219 break;
221 case HIST_TYPE_CONST_DELTA:
222 (profile_hooks->gen_const_delta_profiler) (hist, t, 0);
223 break;
225 default:
226 gcc_unreachable ();
232 /* Computes hybrid profile for all matching entries in da_file. */
234 static gcov_type *
235 get_exec_counts (void)
237 unsigned num_edges = 0;
238 basic_block bb;
239 gcov_type *counts;
241 /* Count the edges to be (possibly) instrumented. */
242 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
244 edge e;
245 edge_iterator ei;
247 FOR_EACH_EDGE (e, ei, bb->succs)
248 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
249 num_edges++;
252 counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, &profile_info);
253 if (!counts)
254 return NULL;
256 if (dump_file && profile_info)
257 fprintf(dump_file, "Merged %u profiles with maximal count %u.\n",
258 profile_info->runs, (unsigned) profile_info->sum_max);
260 return counts;
264 /* Compute the branch probabilities for the various branches.
265 Annotate them accordingly. */
267 static void
268 compute_branch_probabilities (void)
270 basic_block bb;
271 int i;
272 int num_edges = 0;
273 int changes;
274 int passes;
275 int hist_br_prob[20];
276 int num_never_executed;
277 int num_branches;
278 gcov_type *exec_counts = get_exec_counts ();
279 int exec_counts_pos = 0;
281 /* Very simple sanity checks so we catch bugs in our profiling code. */
282 if (profile_info)
284 if (profile_info->run_max * profile_info->runs < profile_info->sum_max)
286 error ("corrupted profile info: run_max * runs < sum_max");
287 exec_counts = NULL;
290 if (profile_info->sum_all < profile_info->sum_max)
292 error ("corrupted profile info: sum_all is smaller than sum_max");
293 exec_counts = NULL;
297 /* Attach extra info block to each bb. */
299 alloc_aux_for_blocks (sizeof (struct bb_info));
300 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
302 edge e;
303 edge_iterator ei;
305 FOR_EACH_EDGE (e, ei, bb->succs)
306 if (!EDGE_INFO (e)->ignore)
307 BB_INFO (bb)->succ_count++;
308 FOR_EACH_EDGE (e, ei, bb->preds)
309 if (!EDGE_INFO (e)->ignore)
310 BB_INFO (bb)->pred_count++;
313 /* Avoid predicting entry on exit nodes. */
314 BB_INFO (EXIT_BLOCK_PTR)->succ_count = 2;
315 BB_INFO (ENTRY_BLOCK_PTR)->pred_count = 2;
317 /* For each edge not on the spanning tree, set its execution count from
318 the .da file. */
320 /* The first count in the .da file is the number of times that the function
321 was entered. This is the exec_count for block zero. */
323 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
325 edge e;
326 edge_iterator ei;
328 FOR_EACH_EDGE (e, ei, bb->succs)
329 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
331 num_edges++;
332 if (exec_counts)
334 e->count = exec_counts[exec_counts_pos++];
335 if (e->count > profile_info->sum_max)
337 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
338 bb->index, e->dest->index);
341 else
342 e->count = 0;
344 EDGE_INFO (e)->count_valid = 1;
345 BB_INFO (bb)->succ_count--;
346 BB_INFO (e->dest)->pred_count--;
347 if (dump_file)
349 fprintf (dump_file, "\nRead edge from %i to %i, count:",
350 bb->index, e->dest->index);
351 fprintf (dump_file, HOST_WIDEST_INT_PRINT_DEC,
352 (HOST_WIDEST_INT) e->count);
357 if (dump_file)
358 fprintf (dump_file, "\n%d edge counts read\n", num_edges);
360 /* For every block in the file,
361 - if every exit/entrance edge has a known count, then set the block count
362 - if the block count is known, and every exit/entrance edge but one has
363 a known execution count, then set the count of the remaining edge
365 As edge counts are set, decrement the succ/pred count, but don't delete
366 the edge, that way we can easily tell when all edges are known, or only
367 one edge is unknown. */
369 /* The order that the basic blocks are iterated through is important.
370 Since the code that finds spanning trees starts with block 0, low numbered
371 edges are put on the spanning tree in preference to high numbered edges.
372 Hence, most instrumented edges are at the end. Graph solving works much
373 faster if we propagate numbers from the end to the start.
375 This takes an average of slightly more than 3 passes. */
377 changes = 1;
378 passes = 0;
379 while (changes)
381 passes++;
382 changes = 0;
383 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
385 struct bb_info *bi = BB_INFO (bb);
386 if (! bi->count_valid)
388 if (bi->succ_count == 0)
390 edge e;
391 edge_iterator ei;
392 gcov_type total = 0;
394 FOR_EACH_EDGE (e, ei, bb->succs)
395 total += e->count;
396 bb->count = total;
397 bi->count_valid = 1;
398 changes = 1;
400 else if (bi->pred_count == 0)
402 edge e;
403 edge_iterator ei;
404 gcov_type total = 0;
406 FOR_EACH_EDGE (e, ei, bb->preds)
407 total += e->count;
408 bb->count = total;
409 bi->count_valid = 1;
410 changes = 1;
413 if (bi->count_valid)
415 if (bi->succ_count == 1)
417 edge e;
418 edge_iterator ei;
419 gcov_type total = 0;
421 /* One of the counts will be invalid, but it is zero,
422 so adding it in also doesn't hurt. */
423 FOR_EACH_EDGE (e, ei, bb->succs)
424 total += e->count;
426 /* Seedgeh for the invalid edge, and set its count. */
427 FOR_EACH_EDGE (e, ei, bb->succs)
428 if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
429 break;
431 /* Calculate count for remaining edge by conservation. */
432 total = bb->count - total;
434 gcc_assert (e);
435 EDGE_INFO (e)->count_valid = 1;
436 e->count = total;
437 bi->succ_count--;
439 BB_INFO (e->dest)->pred_count--;
440 changes = 1;
442 if (bi->pred_count == 1)
444 edge e;
445 edge_iterator ei;
446 gcov_type total = 0;
448 /* One of the counts will be invalid, but it is zero,
449 so adding it in also doesn't hurt. */
450 FOR_EACH_EDGE (e, ei, bb->preds)
451 total += e->count;
453 /* Search for the invalid edge, and set its count. */
454 FOR_EACH_EDGE (e, ei, bb->preds)
455 if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
456 break;
458 /* Calculate count for remaining edge by conservation. */
459 total = bb->count - total + e->count;
461 gcc_assert (e);
462 EDGE_INFO (e)->count_valid = 1;
463 e->count = total;
464 bi->pred_count--;
466 BB_INFO (e->src)->succ_count--;
467 changes = 1;
472 if (dump_file)
473 dump_flow_info (dump_file);
475 total_num_passes += passes;
476 if (dump_file)
477 fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
479 /* If the graph has been correctly solved, every block will have a
480 succ and pred count of zero. */
481 FOR_EACH_BB (bb)
483 gcc_assert (!BB_INFO (bb)->succ_count && !BB_INFO (bb)->pred_count);
486 /* For every edge, calculate its branch probability and add a reg_note
487 to the branch insn to indicate this. */
489 for (i = 0; i < 20; i++)
490 hist_br_prob[i] = 0;
491 num_never_executed = 0;
492 num_branches = 0;
494 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
496 edge e;
497 edge_iterator ei;
498 rtx note;
500 if (bb->count < 0)
502 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
503 bb->index, (int)bb->count);
504 bb->count = 0;
506 FOR_EACH_EDGE (e, ei, bb->succs)
508 /* Function may return twice in the cased the called function is
509 setjmp or calls fork, but we can't represent this by extra
510 edge from the entry, since extra edge from the exit is
511 already present. We get negative frequency from the entry
512 point. */
513 if ((e->count < 0
514 && e->dest == EXIT_BLOCK_PTR)
515 || (e->count > bb->count
516 && e->dest != EXIT_BLOCK_PTR))
518 if (block_ends_with_call_p (bb))
519 e->count = e->count < 0 ? 0 : bb->count;
521 if (e->count < 0 || e->count > bb->count)
523 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
524 e->src->index, e->dest->index,
525 (int)e->count);
526 e->count = bb->count / 2;
529 if (bb->count)
531 FOR_EACH_EDGE (e, ei, bb->succs)
532 e->probability = (e->count * REG_BR_PROB_BASE + bb->count / 2) / bb->count;
533 if (bb->index >= 0
534 && block_ends_with_condjump_p (bb)
535 && EDGE_COUNT (bb->succs) >= 2)
537 int prob;
538 edge e;
539 int index;
541 /* Find the branch edge. It is possible that we do have fake
542 edges here. */
543 FOR_EACH_EDGE (e, ei, bb->succs)
544 if (!(e->flags & (EDGE_FAKE | EDGE_FALLTHRU)))
545 break;
547 prob = e->probability;
548 index = prob * 20 / REG_BR_PROB_BASE;
550 if (index == 20)
551 index = 19;
552 hist_br_prob[index]++;
554 /* Do this for RTL only. */
555 if (!ir_type ())
557 note = find_reg_note (BB_END (bb), REG_BR_PROB, 0);
558 /* There may be already note put by some other pass, such
559 as builtin_expect expander. */
560 if (note)
561 XEXP (note, 0) = GEN_INT (prob);
562 else
563 REG_NOTES (BB_END (bb))
564 = gen_rtx_EXPR_LIST (REG_BR_PROB, GEN_INT (prob),
565 REG_NOTES (BB_END (bb)));
567 num_branches++;
570 /* Otherwise try to preserve the existing REG_BR_PROB probabilities
571 tree based profile guessing put into code. BB can be the
572 ENTRY_BLOCK, and it can have multiple (fake) successors in
573 EH cases, but it still has no code; don't crash in this case. */
574 else if (profile_status == PROFILE_ABSENT
575 && !ir_type ()
576 && EDGE_COUNT (bb->succs) > 1
577 && BB_END (bb)
578 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, 0)))
580 int prob = INTVAL (XEXP (note, 0));
582 BRANCH_EDGE (bb)->probability = prob;
583 FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
585 /* As a last resort, distribute the probabilities evenly.
586 Use simple heuristics that if there are normal edges,
587 give all abnormals frequency of 0, otherwise distribute the
588 frequency over abnormals (this is the case of noreturn
589 calls). */
590 else if (profile_status == PROFILE_ABSENT)
592 int total = 0;
594 FOR_EACH_EDGE (e, ei, bb->succs)
595 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
596 total ++;
597 if (total)
599 FOR_EACH_EDGE (e, ei, bb->succs)
600 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
601 e->probability = REG_BR_PROB_BASE / total;
602 else
603 e->probability = 0;
605 else
607 total += EDGE_COUNT (bb->succs);
608 FOR_EACH_EDGE (e, ei, bb->succs)
609 e->probability = REG_BR_PROB_BASE / total;
611 if (bb->index >= 0
612 && block_ends_with_condjump_p (bb)
613 && EDGE_COUNT (bb->succs) >= 2)
614 num_branches++, num_never_executed;
617 counts_to_freqs ();
619 if (dump_file)
621 fprintf (dump_file, "%d branches\n", num_branches);
622 fprintf (dump_file, "%d branches never executed\n",
623 num_never_executed);
624 if (num_branches)
625 for (i = 0; i < 10; i++)
626 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
627 (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
628 5 * i, 5 * i + 5);
630 total_num_branches += num_branches;
631 total_num_never_executed += num_never_executed;
632 for (i = 0; i < 20; i++)
633 total_hist_br_prob[i] += hist_br_prob[i];
635 fputc ('\n', dump_file);
636 fputc ('\n', dump_file);
639 free_aux_for_blocks ();
642 /* Load value histograms values whose description is stored in VALUES array
643 from .gcda file. */
645 static void
646 compute_value_histograms (histogram_values values)
648 unsigned i, j, t, any;
649 unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS];
650 gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS];
651 gcov_type *act_count[GCOV_N_VALUE_COUNTERS];
652 gcov_type *aact_count;
654 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
655 n_histogram_counters[t] = 0;
657 for (i = 0; i < VEC_length (histogram_value, values); i++)
659 histogram_value hist = VEC_index (histogram_value, values, i);
660 n_histogram_counters[(int) hist->type] += hist->n_counters;
663 any = 0;
664 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
666 if (!n_histogram_counters[t])
668 histogram_counts[t] = NULL;
669 continue;
672 histogram_counts[t] =
673 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t),
674 n_histogram_counters[t], NULL);
675 if (histogram_counts[t])
676 any = 1;
677 act_count[t] = histogram_counts[t];
679 if (!any)
680 return;
682 for (i = 0; i < VEC_length (histogram_value, values); i++)
684 histogram_value hist = VEC_index (histogram_value, values, i);
685 tree stmt = hist->hvalue.stmt;
686 stmt_ann_t ann = get_stmt_ann (stmt);
688 t = (int) hist->type;
690 aact_count = act_count[t];
691 act_count[t] += hist->n_counters;
693 hist->hvalue.next = ann->histograms;
694 ann->histograms = hist;
695 hist->hvalue.counters =
696 xmalloc (sizeof (gcov_type) * hist->n_counters);
697 for (j = 0; j < hist->n_counters; j++)
698 hist->hvalue.counters[j] = aact_count[j];
701 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
702 if (histogram_counts[t])
703 free (histogram_counts[t]);
706 #define BB_TO_GCOV_INDEX(bb) ((bb)->index + 1)
707 /* When passed NULL as file_name, initialize.
708 When passed something else, output the necessary commands to change
709 line to LINE and offset to FILE_NAME. */
710 static void
711 output_location (char const *file_name, int line,
712 gcov_position_t *offset, basic_block bb)
714 static char const *prev_file_name;
715 static int prev_line;
716 bool name_differs, line_differs;
718 if (!file_name)
720 prev_file_name = NULL;
721 prev_line = -1;
722 return;
725 name_differs = !prev_file_name || strcmp (file_name, prev_file_name);
726 line_differs = prev_line != line;
728 if (name_differs || line_differs)
730 if (!*offset)
732 *offset = gcov_write_tag (GCOV_TAG_LINES);
733 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
734 name_differs = line_differs=true;
737 /* If this is a new source file, then output the
738 file's name to the .bb file. */
739 if (name_differs)
741 prev_file_name = file_name;
742 gcov_write_unsigned (0);
743 gcov_write_string (prev_file_name);
745 if (line_differs)
747 gcov_write_unsigned (line);
748 prev_line = line;
753 /* Instrument and/or analyze program behavior based on program flow graph.
754 In either case, this function builds a flow graph for the function being
755 compiled. The flow graph is stored in BB_GRAPH.
757 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
758 the flow graph that are needed to reconstruct the dynamic behavior of the
759 flow graph.
761 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
762 information from a data file containing edge count information from previous
763 executions of the function being compiled. In this case, the flow graph is
764 annotated with actual execution counts, which are later propagated into the
765 rtl for optimization purposes.
767 Main entry point of this file. */
769 void
770 branch_prob (void)
772 basic_block bb;
773 unsigned i;
774 unsigned num_edges, ignored_edges;
775 unsigned num_instrumented;
776 struct edge_list *el;
777 histogram_values values = NULL;
779 total_num_times_called++;
781 flow_call_edges_add (NULL);
782 add_noreturn_fake_exit_edges ();
784 /* We can't handle cyclic regions constructed using abnormal edges.
785 To avoid these we replace every source of abnormal edge by a fake
786 edge from entry node and every destination by fake edge to exit.
787 This keeps graph acyclic and our calculation exact for all normal
788 edges except for exit and entrance ones.
790 We also add fake exit edges for each call and asm statement in the
791 basic, since it may not return. */
793 FOR_EACH_BB (bb)
795 int need_exit_edge = 0, need_entry_edge = 0;
796 int have_exit_edge = 0, have_entry_edge = 0;
797 edge e;
798 edge_iterator ei;
800 /* Functions returning multiple times are not handled by extra edges.
801 Instead we simply allow negative counts on edges from exit to the
802 block past call and corresponding probabilities. We can't go
803 with the extra edges because that would result in flowgraph that
804 needs to have fake edges outside the spanning tree. */
806 FOR_EACH_EDGE (e, ei, bb->succs)
808 block_stmt_iterator bsi;
809 tree last = NULL;
811 /* It may happen that there are compiler generated statements
812 without a locus at all. Go through the basic block from the
813 last to the first statement looking for a locus. */
814 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
816 last = bsi_stmt (bsi);
817 if (EXPR_LOCUS (last))
818 break;
821 /* Edge with goto locus might get wrong coverage info unless
822 it is the only edge out of BB.
823 Don't do that when the locuses match, so
824 if (blah) goto something;
825 is not computed twice. */
826 if (last && EXPR_LOCUS (last)
827 && e->goto_locus
828 && !single_succ_p (bb)
829 #ifdef USE_MAPPED_LOCATION
830 && (LOCATION_FILE (e->goto_locus)
831 != LOCATION_FILE (EXPR_LOCATION (last))
832 || (LOCATION_LINE (e->goto_locus)
833 != LOCATION_LINE (EXPR_LOCATION (last)))))
834 #else
835 && (e->goto_locus->file != EXPR_LOCUS (last)->file
836 || (e->goto_locus->line != EXPR_LOCUS (last)->line)))
837 #endif
839 basic_block new = split_edge (e);
840 single_succ_edge (new)->goto_locus = e->goto_locus;
842 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
843 && e->dest != EXIT_BLOCK_PTR)
844 need_exit_edge = 1;
845 if (e->dest == EXIT_BLOCK_PTR)
846 have_exit_edge = 1;
848 FOR_EACH_EDGE (e, ei, bb->preds)
850 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
851 && e->src != ENTRY_BLOCK_PTR)
852 need_entry_edge = 1;
853 if (e->src == ENTRY_BLOCK_PTR)
854 have_entry_edge = 1;
857 if (need_exit_edge && !have_exit_edge)
859 if (dump_file)
860 fprintf (dump_file, "Adding fake exit edge to bb %i\n",
861 bb->index);
862 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
864 if (need_entry_edge && !have_entry_edge)
866 if (dump_file)
867 fprintf (dump_file, "Adding fake entry edge to bb %i\n",
868 bb->index);
869 make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FAKE);
873 el = create_edge_list ();
874 num_edges = NUM_EDGES (el);
875 alloc_aux_for_edges (sizeof (struct edge_info));
877 /* The basic blocks are expected to be numbered sequentially. */
878 compact_blocks ();
880 ignored_edges = 0;
881 for (i = 0 ; i < num_edges ; i++)
883 edge e = INDEX_EDGE (el, i);
884 e->count = 0;
886 /* Mark edges we've replaced by fake edges above as ignored. */
887 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
888 && e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR)
890 EDGE_INFO (e)->ignore = 1;
891 ignored_edges++;
895 /* Create spanning tree from basic block graph, mark each edge that is
896 on the spanning tree. We insert as many abnormal and critical edges
897 as possible to minimize number of edge splits necessary. */
899 find_spanning_tree (el);
901 /* Fake edges that are not on the tree will not be instrumented, so
902 mark them ignored. */
903 for (num_instrumented = i = 0; i < num_edges; i++)
905 edge e = INDEX_EDGE (el, i);
906 struct edge_info *inf = EDGE_INFO (e);
908 if (inf->ignore || inf->on_tree)
909 /*NOP*/;
910 else if (e->flags & EDGE_FAKE)
912 inf->ignore = 1;
913 ignored_edges++;
915 else
916 num_instrumented++;
919 total_num_blocks += n_basic_blocks + 2;
920 if (dump_file)
921 fprintf (dump_file, "%d basic blocks\n", n_basic_blocks);
923 total_num_edges += num_edges;
924 if (dump_file)
925 fprintf (dump_file, "%d edges\n", num_edges);
927 total_num_edges_ignored += ignored_edges;
928 if (dump_file)
929 fprintf (dump_file, "%d ignored edges\n", ignored_edges);
931 /* Write the data from which gcov can reconstruct the basic block
932 graph. */
934 /* Basic block flags */
935 if (coverage_begin_output ())
937 gcov_position_t offset;
939 offset = gcov_write_tag (GCOV_TAG_BLOCKS);
940 for (i = 0; i != (unsigned) (n_basic_blocks + 2); i++)
941 gcov_write_unsigned (0);
942 gcov_write_length (offset);
945 /* Keep all basic block indexes nonnegative in the gcov output.
946 Index 0 is used for entry block, last index is for exit block.
948 ENTRY_BLOCK_PTR->index = -1;
949 EXIT_BLOCK_PTR->index = last_basic_block;
951 /* Arcs */
952 if (coverage_begin_output ())
954 gcov_position_t offset;
956 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
958 edge e;
959 edge_iterator ei;
961 offset = gcov_write_tag (GCOV_TAG_ARCS);
962 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
964 FOR_EACH_EDGE (e, ei, bb->succs)
966 struct edge_info *i = EDGE_INFO (e);
967 if (!i->ignore)
969 unsigned flag_bits = 0;
971 if (i->on_tree)
972 flag_bits |= GCOV_ARC_ON_TREE;
973 if (e->flags & EDGE_FAKE)
974 flag_bits |= GCOV_ARC_FAKE;
975 if (e->flags & EDGE_FALLTHRU)
976 flag_bits |= GCOV_ARC_FALLTHROUGH;
977 /* On trees we don't have fallthru flags, but we can
978 recompute them from CFG shape. */
979 if (ir_type ()
980 && e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)
981 && e->src->next_bb == e->dest)
982 flag_bits |= GCOV_ARC_FALLTHROUGH;
984 gcov_write_unsigned (BB_TO_GCOV_INDEX (e->dest));
985 gcov_write_unsigned (flag_bits);
989 gcov_write_length (offset);
993 /* Line numbers. */
994 if (coverage_begin_output ())
996 /* Initialize the output. */
997 output_location (NULL, 0, NULL, NULL);
999 if (!ir_type ())
1001 gcov_position_t offset;
1003 FOR_EACH_BB (bb)
1005 rtx insn = BB_HEAD (bb);
1006 int ignore_next_note = 0;
1008 offset = 0;
1010 /* We are looking for line number notes. Search backward
1011 before basic block to find correct ones. */
1012 insn = prev_nonnote_insn (insn);
1013 if (!insn)
1014 insn = get_insns ();
1015 else
1016 insn = NEXT_INSN (insn);
1018 while (insn != BB_END (bb))
1020 if (NOTE_P (insn))
1022 /* Must ignore the line number notes that
1023 immediately follow the end of an inline function
1024 to avoid counting it twice. There is a note
1025 before the call, and one after the call. */
1026 if (NOTE_LINE_NUMBER (insn)
1027 == NOTE_INSN_REPEATED_LINE_NUMBER)
1028 ignore_next_note = 1;
1029 else if (NOTE_LINE_NUMBER (insn) <= 0)
1030 /*NOP*/;
1031 else if (ignore_next_note)
1032 ignore_next_note = 0;
1033 else
1035 expanded_location s;
1036 NOTE_EXPANDED_LOCATION (s, insn);
1037 output_location (s.file, s.line, &offset, bb);
1040 insn = NEXT_INSN (insn);
1043 if (offset)
1045 /* A file of NULL indicates the end of run. */
1046 gcov_write_unsigned (0);
1047 gcov_write_string (NULL);
1048 gcov_write_length (offset);
1052 else
1054 gcov_position_t offset;
1056 FOR_EACH_BB (bb)
1058 block_stmt_iterator bsi;
1060 offset = 0;
1062 if (bb == ENTRY_BLOCK_PTR->next_bb)
1064 expanded_location curr_location =
1065 expand_location (DECL_SOURCE_LOCATION
1066 (current_function_decl));
1067 output_location (curr_location.file, curr_location.line,
1068 &offset, bb);
1071 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1073 tree stmt = bsi_stmt (bsi);
1074 if (EXPR_HAS_LOCATION (stmt))
1075 output_location (EXPR_FILENAME (stmt),
1076 EXPR_LINENO (stmt),
1077 &offset, bb);
1080 /* Notice GOTO expressions we eliminated while constructing the
1081 CFG. */
1082 if (single_succ_p (bb) && single_succ_edge (bb)->goto_locus)
1084 /* ??? source_locus type is marked deprecated in input.h. */
1085 source_locus curr_location = single_succ_edge (bb)->goto_locus;
1086 /* ??? The FILE/LINE API is inconsistent for these cases. */
1087 #ifdef USE_MAPPED_LOCATION
1088 output_location (LOCATION_FILE (curr_location),
1089 LOCATION_LINE (curr_location),
1090 &offset, bb);
1091 #else
1092 output_location (curr_location->file, curr_location->line,
1093 &offset, bb);
1094 #endif
1097 if (offset)
1099 /* A file of NULL indicates the end of run. */
1100 gcov_write_unsigned (0);
1101 gcov_write_string (NULL);
1102 gcov_write_length (offset);
1108 ENTRY_BLOCK_PTR->index = ENTRY_BLOCK;
1109 EXIT_BLOCK_PTR->index = EXIT_BLOCK;
1110 #undef BB_TO_GCOV_INDEX
1112 if (flag_profile_values)
1113 find_values_to_profile (&values);
1115 if (flag_branch_probabilities)
1117 compute_branch_probabilities ();
1118 if (flag_profile_values)
1119 compute_value_histograms (values);
1122 remove_fake_edges ();
1124 /* For each edge not on the spanning tree, add counting code. */
1125 if (profile_arc_flag
1126 && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
1128 unsigned n_instrumented;
1130 profile_hooks->init_edge_profiler ();
1132 n_instrumented = instrument_edges (el);
1134 gcc_assert (n_instrumented == num_instrumented);
1136 if (flag_profile_values)
1137 instrument_values (values);
1139 /* Commit changes done by instrumentation. */
1140 if (ir_type ())
1141 bsi_commit_edge_inserts ();
1142 else
1144 commit_edge_insertions_watch_calls ();
1145 allocate_reg_info (max_reg_num (), FALSE, FALSE);
1149 free_aux_for_edges ();
1151 if (!ir_type ())
1153 /* Re-merge split basic blocks and the mess introduced by
1154 insert_insn_on_edge. */
1155 cleanup_cfg (profile_arc_flag ? CLEANUP_EXPENSIVE : 0);
1156 if (profile_dump_file())
1157 dump_flow_info (profile_dump_file());
1160 VEC_free (histogram_value, heap, values);
1161 free_edge_list (el);
1162 if (flag_branch_probabilities)
1163 profile_status = PROFILE_READ;
1164 coverage_end_function ();
1167 /* Union find algorithm implementation for the basic blocks using
1168 aux fields. */
1170 static basic_block
1171 find_group (basic_block bb)
1173 basic_block group = bb, bb1;
1175 while ((basic_block) group->aux != group)
1176 group = (basic_block) group->aux;
1178 /* Compress path. */
1179 while ((basic_block) bb->aux != group)
1181 bb1 = (basic_block) bb->aux;
1182 bb->aux = (void *) group;
1183 bb = bb1;
1185 return group;
1188 static void
1189 union_groups (basic_block bb1, basic_block bb2)
1191 basic_block bb1g = find_group (bb1);
1192 basic_block bb2g = find_group (bb2);
1194 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1195 this code is unlikely going to be performance problem anyway. */
1196 gcc_assert (bb1g != bb2g);
1198 bb1g->aux = bb2g;
1201 /* This function searches all of the edges in the program flow graph, and puts
1202 as many bad edges as possible onto the spanning tree. Bad edges include
1203 abnormals edges, which can't be instrumented at the moment. Since it is
1204 possible for fake edges to form a cycle, we will have to develop some
1205 better way in the future. Also put critical edges to the tree, since they
1206 are more expensive to instrument. */
1208 static void
1209 find_spanning_tree (struct edge_list *el)
1211 int i;
1212 int num_edges = NUM_EDGES (el);
1213 basic_block bb;
1215 /* We use aux field for standard union-find algorithm. */
1216 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1217 bb->aux = bb;
1219 /* Add fake edge exit to entry we can't instrument. */
1220 union_groups (EXIT_BLOCK_PTR, ENTRY_BLOCK_PTR);
1222 /* First add all abnormal edges to the tree unless they form a cycle. Also
1223 add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
1224 setting return value from function. */
1225 for (i = 0; i < num_edges; i++)
1227 edge e = INDEX_EDGE (el, i);
1228 if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
1229 || e->dest == EXIT_BLOCK_PTR)
1230 && !EDGE_INFO (e)->ignore
1231 && (find_group (e->src) != find_group (e->dest)))
1233 if (dump_file)
1234 fprintf (dump_file, "Abnormal edge %d to %d put to tree\n",
1235 e->src->index, e->dest->index);
1236 EDGE_INFO (e)->on_tree = 1;
1237 union_groups (e->src, e->dest);
1241 /* Now insert all critical edges to the tree unless they form a cycle. */
1242 for (i = 0; i < num_edges; i++)
1244 edge e = INDEX_EDGE (el, i);
1245 if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore
1246 && find_group (e->src) != find_group (e->dest))
1248 if (dump_file)
1249 fprintf (dump_file, "Critical edge %d to %d put to tree\n",
1250 e->src->index, e->dest->index);
1251 EDGE_INFO (e)->on_tree = 1;
1252 union_groups (e->src, e->dest);
1256 /* And now the rest. */
1257 for (i = 0; i < num_edges; i++)
1259 edge e = INDEX_EDGE (el, i);
1260 if (!EDGE_INFO (e)->ignore
1261 && find_group (e->src) != find_group (e->dest))
1263 if (dump_file)
1264 fprintf (dump_file, "Normal edge %d to %d put to tree\n",
1265 e->src->index, e->dest->index);
1266 EDGE_INFO (e)->on_tree = 1;
1267 union_groups (e->src, e->dest);
1271 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1272 bb->aux = NULL;
1275 /* Perform file-level initialization for branch-prob processing. */
1277 void
1278 init_branch_prob (void)
1280 int i;
1282 total_num_blocks = 0;
1283 total_num_edges = 0;
1284 total_num_edges_ignored = 0;
1285 total_num_edges_instrumented = 0;
1286 total_num_blocks_created = 0;
1287 total_num_passes = 0;
1288 total_num_times_called = 0;
1289 total_num_branches = 0;
1290 total_num_never_executed = 0;
1291 for (i = 0; i < 20; i++)
1292 total_hist_br_prob[i] = 0;
1295 /* Performs file-level cleanup after branch-prob processing
1296 is completed. */
1298 void
1299 end_branch_prob (void)
1301 if (dump_file)
1303 fprintf (dump_file, "\n");
1304 fprintf (dump_file, "Total number of blocks: %d\n",
1305 total_num_blocks);
1306 fprintf (dump_file, "Total number of edges: %d\n", total_num_edges);
1307 fprintf (dump_file, "Total number of ignored edges: %d\n",
1308 total_num_edges_ignored);
1309 fprintf (dump_file, "Total number of instrumented edges: %d\n",
1310 total_num_edges_instrumented);
1311 fprintf (dump_file, "Total number of blocks created: %d\n",
1312 total_num_blocks_created);
1313 fprintf (dump_file, "Total number of graph solution passes: %d\n",
1314 total_num_passes);
1315 if (total_num_times_called != 0)
1316 fprintf (dump_file, "Average number of graph solution passes: %d\n",
1317 (total_num_passes + (total_num_times_called >> 1))
1318 / total_num_times_called);
1319 fprintf (dump_file, "Total number of branches: %d\n",
1320 total_num_branches);
1321 fprintf (dump_file, "Total number of branches never executed: %d\n",
1322 total_num_never_executed);
1323 if (total_num_branches)
1325 int i;
1327 for (i = 0; i < 10; i++)
1328 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1329 (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1330 / total_num_branches, 5*i, 5*i+5);
1335 /* Set up hooks to enable tree-based profiling. */
1337 void
1338 tree_register_profile_hooks (void)
1340 gcc_assert (ir_type ());
1341 profile_hooks = &tree_profile_hooks;
1345 /* Do branch profiling and static profile estimation passes. */
1346 static void
1347 rest_of_handle_branch_prob (void)
1349 struct loops loops;
1351 /* Discover and record the loop depth at the head of each basic
1352 block. The loop infrastructure does the real job for us. */
1353 flow_loops_find (&loops);
1355 if (dump_file)
1356 flow_loops_dump (&loops, dump_file, NULL, 0);
1358 /* Estimate using heuristics if no profiling info is available. */
1359 if (flag_guess_branch_prob
1360 && profile_status == PROFILE_ABSENT)
1361 estimate_probability (&loops);
1363 flow_loops_free (&loops);
1364 free_dominance_info (CDI_DOMINATORS);
1367 struct tree_opt_pass pass_branch_prob =
1369 "bp", /* name */
1370 NULL, /* gate */
1371 rest_of_handle_branch_prob, /* execute */
1372 NULL, /* sub */
1373 NULL, /* next */
1374 0, /* static_pass_number */
1375 TV_BRANCH_PROB, /* tv_id */
1376 0, /* properties_required */
1377 0, /* properties_provided */
1378 0, /* properties_destroyed */
1379 0, /* todo_flags_start */
1380 TODO_dump_func, /* todo_flags_finish */
1381 'b' /* letter */