temporary add to fix file in CVS
[official-gcc.git] / gcc / profile.c
blob002e7a1b694f870169317a55eadc1166153fe149
1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
5 based on some ideas from Dain Samples of UC Berkeley.
6 Further mangling by Bob Manson, Cygnus Support.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 2, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to the Free
22 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 02111-1307, USA. */
25 /* Generate basic block profile instrumentation and auxiliary files.
26 Profile generation is optimized, so that not all arcs in the basic
27 block graph need instrumenting. First, the BB graph is closed with
28 one entry (function start), and one exit (function exit). Any
29 ABNORMAL_EDGE cannot be instrumented (because there is no control
30 path to place the code). We close the graph by inserting fake
31 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
32 edges that do not go to the exit_block. We ignore such abnormal
33 edges. Naturally these fake edges are never directly traversed,
34 and so *cannot* be directly instrumented. Some other graph
35 massaging is done. To optimize the instrumentation we generate the
36 BB minimal span tree, only edges that are not on the span tree
37 (plus the entry point) need instrumenting. From that information
38 all other edge counts can be deduced. By construction all fake
39 edges must be on the spanning tree. We also attempt to place
40 EDGE_CRITICAL edges on the spanning tree.
42 The auxiliary files generated are <dumpbase>.gcno (at compile time)
43 and <dumpbase>.gcda (at run time). The format is
44 described in full in gcov-io.h. */
46 /* ??? Register allocation should use basic block execution counts to
47 give preference to the most commonly executed blocks. */
49 /* ??? Should calculate branch probabilities before instrumenting code, since
50 then we can use arc counts to help decide which arcs to instrument. */
52 #include "config.h"
53 #include "system.h"
54 #include "coretypes.h"
55 #include "tm.h"
56 #include "rtl.h"
57 #include "flags.h"
58 #include "output.h"
59 #include "regs.h"
60 #include "expr.h"
61 #include "function.h"
62 #include "toplev.h"
63 #include "coverage.h"
64 #include "value-prof.h"
65 #include "tree.h"
66 #include "cfghooks.h"
67 #include "tree-flow.h"
69 /* Hooks for profiling. */
70 static struct profile_hooks* profile_hooks;
72 /* File for profiling debug output. */
73 static inline FILE*
74 profile_dump_file (void) {
75 return profile_hooks->profile_dump_file ();
78 /* Additional information about the edges we need. */
79 struct edge_info {
80 unsigned int count_valid : 1;
82 /* Is on the spanning tree. */
83 unsigned int on_tree : 1;
85 /* Pretend this edge does not exist (it is abnormal and we've
86 inserted a fake to compensate). */
87 unsigned int ignore : 1;
90 struct bb_info {
91 unsigned int count_valid : 1;
93 /* Number of successor and predecessor edges. */
94 gcov_type succ_count;
95 gcov_type pred_count;
98 #define EDGE_INFO(e) ((struct edge_info *) (e)->aux)
99 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
101 /* Counter summary from the last set of coverage counts read. */
103 const struct gcov_ctr_summary *profile_info;
105 /* Collect statistics on the performance of this pass for the entire source
106 file. */
108 static int total_num_blocks;
109 static int total_num_edges;
110 static int total_num_edges_ignored;
111 static int total_num_edges_instrumented;
112 static int total_num_blocks_created;
113 static int total_num_passes;
114 static int total_num_times_called;
115 static int total_hist_br_prob[20];
116 static int total_num_never_executed;
117 static int total_num_branches;
119 /* Forward declarations. */
120 static void find_spanning_tree (struct edge_list *);
121 static unsigned instrument_edges (struct edge_list *);
122 static void instrument_values (unsigned, struct histogram_value *);
123 static void compute_branch_probabilities (void);
124 static void compute_value_histograms (unsigned, struct histogram_value *);
125 static gcov_type * get_exec_counts (void);
126 static basic_block find_group (basic_block);
127 static void union_groups (basic_block, basic_block);
130 /* Add edge instrumentation code to the entire insn chain.
132 F is the first insn of the chain.
133 NUM_BLOCKS is the number of basic blocks found in F. */
135 static unsigned
136 instrument_edges (struct edge_list *el)
138 unsigned num_instr_edges = 0;
139 int num_edges = NUM_EDGES (el);
140 basic_block bb;
142 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
144 edge e;
146 for (e = bb->succ; e; e = e->succ_next)
148 struct edge_info *inf = EDGE_INFO (e);
150 if (!inf->ignore && !inf->on_tree)
152 if (e->flags & EDGE_ABNORMAL)
153 abort ();
154 if (dump_file)
155 fprintf (dump_file, "Edge %d to %d instrumented%s\n",
156 e->src->index, e->dest->index,
157 EDGE_CRITICAL_P (e) ? " (and split)" : "");
158 (profile_hooks->gen_edge_profiler) (num_instr_edges++, e);
163 total_num_blocks_created += num_edges;
164 if (dump_file)
165 fprintf (dump_file, "%d edges instrumented\n", num_instr_edges);
166 return num_instr_edges;
169 /* Add code to measure histograms list of VALUES of length N_VALUES. */
170 static void
171 instrument_values (unsigned n_values, struct histogram_value *values)
173 unsigned i, t;
175 /* Emit code to generate the histograms before the insns. */
177 for (i = 0; i < n_values; i++)
179 switch (values[i].type)
181 case HIST_TYPE_INTERVAL:
182 t = GCOV_COUNTER_V_INTERVAL;
183 break;
185 case HIST_TYPE_POW2:
186 t = GCOV_COUNTER_V_POW2;
187 break;
189 case HIST_TYPE_SINGLE_VALUE:
190 t = GCOV_COUNTER_V_SINGLE;
191 break;
193 case HIST_TYPE_CONST_DELTA:
194 t = GCOV_COUNTER_V_DELTA;
195 break;
197 default:
198 abort ();
200 if (!coverage_counter_alloc (t, values[i].n_counters))
201 continue;
203 switch (values[i].type)
205 case HIST_TYPE_INTERVAL:
206 (profile_hooks->gen_interval_profiler) (values + i, t, 0);
207 break;
209 case HIST_TYPE_POW2:
210 (profile_hooks->gen_pow2_profiler) (values + i, t, 0);
211 break;
213 case HIST_TYPE_SINGLE_VALUE:
214 (profile_hooks->gen_one_value_profiler) (values + i, t, 0);
215 break;
217 case HIST_TYPE_CONST_DELTA:
218 (profile_hooks->gen_const_delta_profiler) (values + i, t, 0);
219 break;
221 default:
222 abort ();
228 /* Computes hybrid profile for all matching entries in da_file. */
230 static gcov_type *
231 get_exec_counts (void)
233 unsigned num_edges = 0;
234 basic_block bb;
235 gcov_type *counts;
237 /* Count the edges to be (possibly) instrumented. */
238 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
240 edge e;
241 for (e = bb->succ; e; e = e->succ_next)
242 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
243 num_edges++;
246 counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, &profile_info);
247 if (!counts)
248 return NULL;
250 if (dump_file && profile_info)
251 fprintf(dump_file, "Merged %u profiles with maximal count %u.\n",
252 profile_info->runs, (unsigned) profile_info->sum_max);
254 return counts;
258 /* Compute the branch probabilities for the various branches.
259 Annotate them accordingly. */
261 static void
262 compute_branch_probabilities (void)
264 basic_block bb;
265 int i;
266 int num_edges = 0;
267 int changes;
268 int passes;
269 int hist_br_prob[20];
270 int num_never_executed;
271 int num_branches;
272 gcov_type *exec_counts = get_exec_counts ();
273 int exec_counts_pos = 0;
275 /* Very simple sanity checks so we catch bugs in our profiling code. */
276 if (profile_info)
278 if (profile_info->run_max * profile_info->runs < profile_info->sum_max)
280 error ("corrupted profile info: run_max * runs < sum_max");
281 exec_counts = NULL;
284 if (profile_info->sum_all < profile_info->sum_max)
286 error ("corrupted profile info: sum_all is smaller than sum_max");
287 exec_counts = NULL;
291 /* Attach extra info block to each bb. */
293 alloc_aux_for_blocks (sizeof (struct bb_info));
294 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
296 edge e;
298 for (e = bb->succ; e; e = e->succ_next)
299 if (!EDGE_INFO (e)->ignore)
300 BB_INFO (bb)->succ_count++;
301 for (e = bb->pred; e; e = e->pred_next)
302 if (!EDGE_INFO (e)->ignore)
303 BB_INFO (bb)->pred_count++;
306 /* Avoid predicting entry on exit nodes. */
307 BB_INFO (EXIT_BLOCK_PTR)->succ_count = 2;
308 BB_INFO (ENTRY_BLOCK_PTR)->pred_count = 2;
310 /* For each edge not on the spanning tree, set its execution count from
311 the .da file. */
313 /* The first count in the .da file is the number of times that the function
314 was entered. This is the exec_count for block zero. */
316 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
318 edge e;
319 for (e = bb->succ; e; e = e->succ_next)
320 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
322 num_edges++;
323 if (exec_counts)
325 e->count = exec_counts[exec_counts_pos++];
326 if (e->count > profile_info->sum_max)
328 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
329 bb->index, e->dest->index);
332 else
333 e->count = 0;
335 EDGE_INFO (e)->count_valid = 1;
336 BB_INFO (bb)->succ_count--;
337 BB_INFO (e->dest)->pred_count--;
338 if (dump_file)
340 fprintf (dump_file, "\nRead edge from %i to %i, count:",
341 bb->index, e->dest->index);
342 fprintf (dump_file, HOST_WIDEST_INT_PRINT_DEC,
343 (HOST_WIDEST_INT) e->count);
348 if (dump_file)
349 fprintf (dump_file, "\n%d edge counts read\n", num_edges);
351 /* For every block in the file,
352 - if every exit/entrance edge has a known count, then set the block count
353 - if the block count is known, and every exit/entrance edge but one has
354 a known execution count, then set the count of the remaining edge
356 As edge counts are set, decrement the succ/pred count, but don't delete
357 the edge, that way we can easily tell when all edges are known, or only
358 one edge is unknown. */
360 /* The order that the basic blocks are iterated through is important.
361 Since the code that finds spanning trees starts with block 0, low numbered
362 edges are put on the spanning tree in preference to high numbered edges.
363 Hence, most instrumented edges are at the end. Graph solving works much
364 faster if we propagate numbers from the end to the start.
366 This takes an average of slightly more than 3 passes. */
368 changes = 1;
369 passes = 0;
370 while (changes)
372 passes++;
373 changes = 0;
374 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
376 struct bb_info *bi = BB_INFO (bb);
377 if (! bi->count_valid)
379 if (bi->succ_count == 0)
381 edge e;
382 gcov_type total = 0;
384 for (e = bb->succ; e; e = e->succ_next)
385 total += e->count;
386 bb->count = total;
387 bi->count_valid = 1;
388 changes = 1;
390 else if (bi->pred_count == 0)
392 edge e;
393 gcov_type total = 0;
395 for (e = bb->pred; e; e = e->pred_next)
396 total += e->count;
397 bb->count = total;
398 bi->count_valid = 1;
399 changes = 1;
402 if (bi->count_valid)
404 if (bi->succ_count == 1)
406 edge e;
407 gcov_type total = 0;
409 /* One of the counts will be invalid, but it is zero,
410 so adding it in also doesn't hurt. */
411 for (e = bb->succ; e; e = e->succ_next)
412 total += e->count;
414 /* Seedgeh for the invalid edge, and set its count. */
415 for (e = bb->succ; e; e = e->succ_next)
416 if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
417 break;
419 /* Calculate count for remaining edge by conservation. */
420 total = bb->count - total;
422 if (! e)
423 abort ();
424 EDGE_INFO (e)->count_valid = 1;
425 e->count = total;
426 bi->succ_count--;
428 BB_INFO (e->dest)->pred_count--;
429 changes = 1;
431 if (bi->pred_count == 1)
433 edge e;
434 gcov_type total = 0;
436 /* One of the counts will be invalid, but it is zero,
437 so adding it in also doesn't hurt. */
438 for (e = bb->pred; e; e = e->pred_next)
439 total += e->count;
441 /* Search for the invalid edge, and set its count. */
442 for (e = bb->pred; e; e = e->pred_next)
443 if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
444 break;
446 /* Calculate count for remaining edge by conservation. */
447 total = bb->count - total + e->count;
449 if (! e)
450 abort ();
451 EDGE_INFO (e)->count_valid = 1;
452 e->count = total;
453 bi->pred_count--;
455 BB_INFO (e->src)->succ_count--;
456 changes = 1;
461 if (dump_file)
462 dump_flow_info (dump_file);
464 total_num_passes += passes;
465 if (dump_file)
466 fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
468 /* If the graph has been correctly solved, every block will have a
469 succ and pred count of zero. */
470 FOR_EACH_BB (bb)
472 if (BB_INFO (bb)->succ_count || BB_INFO (bb)->pred_count)
473 abort ();
476 /* For every edge, calculate its branch probability and add a reg_note
477 to the branch insn to indicate this. */
479 for (i = 0; i < 20; i++)
480 hist_br_prob[i] = 0;
481 num_never_executed = 0;
482 num_branches = 0;
484 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
486 edge e;
487 rtx note;
489 if (bb->count < 0)
491 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
492 bb->index, (int)bb->count);
493 bb->count = 0;
495 for (e = bb->succ; e; e = e->succ_next)
497 /* Function may return twice in the cased the called function is
498 setjmp or calls fork, but we can't represent this by extra
499 edge from the entry, since extra edge from the exit is
500 already present. We get negative frequency from the entry
501 point. */
502 if ((e->count < 0
503 && e->dest == EXIT_BLOCK_PTR)
504 || (e->count > bb->count
505 && e->dest != EXIT_BLOCK_PTR))
507 if (block_ends_with_call_p (bb))
508 e->count = e->count < 0 ? 0 : bb->count;
510 if (e->count < 0 || e->count > bb->count)
512 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
513 e->src->index, e->dest->index,
514 (int)e->count);
515 e->count = bb->count / 2;
518 if (bb->count)
520 for (e = bb->succ; e; e = e->succ_next)
521 e->probability = (e->count * REG_BR_PROB_BASE + bb->count / 2) / bb->count;
522 if (bb->index >= 0
523 && block_ends_with_condjump_p (bb)
524 && bb->succ->succ_next)
526 int prob;
527 edge e;
528 int index;
530 /* Find the branch edge. It is possible that we do have fake
531 edges here. */
532 for (e = bb->succ; e->flags & (EDGE_FAKE | EDGE_FALLTHRU);
533 e = e->succ_next)
534 continue; /* Loop body has been intentionally left blank. */
536 prob = e->probability;
537 index = prob * 20 / REG_BR_PROB_BASE;
539 if (index == 20)
540 index = 19;
541 hist_br_prob[index]++;
543 /* Do this for RTL only. */
544 if (!ir_type ())
546 note = find_reg_note (BB_END (bb), REG_BR_PROB, 0);
547 /* There may be already note put by some other pass, such
548 as builtin_expect expander. */
549 if (note)
550 XEXP (note, 0) = GEN_INT (prob);
551 else
552 REG_NOTES (BB_END (bb))
553 = gen_rtx_EXPR_LIST (REG_BR_PROB, GEN_INT (prob),
554 REG_NOTES (BB_END (bb)));
556 num_branches++;
559 /* Otherwise distribute the probabilities evenly so we get sane
560 sum. Use simple heuristics that if there are normal edges,
561 give all abnormals frequency of 0, otherwise distribute the
562 frequency over abnormals (this is the case of noreturn
563 calls). */
564 else
566 int total = 0;
568 for (e = bb->succ; e; e = e->succ_next)
569 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
570 total ++;
571 if (total)
573 for (e = bb->succ; e; e = e->succ_next)
574 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
575 e->probability = REG_BR_PROB_BASE / total;
576 else
577 e->probability = 0;
579 else
581 for (e = bb->succ; e; e = e->succ_next)
582 total ++;
583 for (e = bb->succ; e; e = e->succ_next)
584 e->probability = REG_BR_PROB_BASE / total;
586 if (bb->index >= 0
587 && block_ends_with_condjump_p (bb)
588 && bb->succ->succ_next)
589 num_branches++, num_never_executed;
593 if (dump_file)
595 fprintf (dump_file, "%d branches\n", num_branches);
596 fprintf (dump_file, "%d branches never executed\n",
597 num_never_executed);
598 if (num_branches)
599 for (i = 0; i < 10; i++)
600 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
601 (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
602 5 * i, 5 * i + 5);
604 total_num_branches += num_branches;
605 total_num_never_executed += num_never_executed;
606 for (i = 0; i < 20; i++)
607 total_hist_br_prob[i] += hist_br_prob[i];
609 fputc ('\n', dump_file);
610 fputc ('\n', dump_file);
613 free_aux_for_blocks ();
616 /* Load value histograms for N_VALUES values whose description is stored
617 in VALUES array from .da file. */
618 static void
619 compute_value_histograms (unsigned n_values, struct histogram_value *values)
621 unsigned i, j, t, any;
622 unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS];
623 gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS];
624 gcov_type *act_count[GCOV_N_VALUE_COUNTERS];
625 gcov_type *aact_count;
627 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
628 n_histogram_counters[t] = 0;
630 for (i = 0; i < n_values; i++)
631 n_histogram_counters[(int) (values[i].type)] += values[i].n_counters;
633 any = 0;
634 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
636 if (!n_histogram_counters[t])
638 histogram_counts[t] = NULL;
639 continue;
642 histogram_counts[t] =
643 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t),
644 n_histogram_counters[t], NULL);
645 if (histogram_counts[t])
646 any = 1;
647 act_count[t] = histogram_counts[t];
649 if (!any)
650 return;
652 for (i = 0; i < n_values; i++)
654 rtx hist_list = NULL_RTX;
655 t = (int) (values[i].type);
657 /* FIXME: make this work for trees. */
658 if (!ir_type ())
660 aact_count = act_count[t];
661 act_count[t] += values[i].n_counters;
662 for (j = values[i].n_counters; j > 0; j--)
663 hist_list = alloc_EXPR_LIST (0, GEN_INT (aact_count[j - 1]),
664 hist_list);
665 hist_list = alloc_EXPR_LIST (0,
666 copy_rtx ((rtx)values[i].value), hist_list);
667 hist_list = alloc_EXPR_LIST (0, GEN_INT (values[i].type), hist_list);
668 REG_NOTES ((rtx)values[i].insn) =
669 alloc_EXPR_LIST (REG_VALUE_PROFILE, hist_list,
670 REG_NOTES ((rtx)values[i].insn));
674 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
675 if (histogram_counts[t])
676 free (histogram_counts[t]);
679 /* Instrument and/or analyze program behavior based on program flow graph.
680 In either case, this function builds a flow graph for the function being
681 compiled. The flow graph is stored in BB_GRAPH.
683 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
684 the flow graph that are needed to reconstruct the dynamic behavior of the
685 flow graph.
687 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
688 information from a data file containing edge count information from previous
689 executions of the function being compiled. In this case, the flow graph is
690 annotated with actual execution counts, which are later propagated into the
691 rtl for optimization purposes.
693 Main entry point of this file. */
695 void
696 branch_prob (void)
698 basic_block bb;
699 unsigned i;
700 unsigned num_edges, ignored_edges;
701 unsigned num_instrumented;
702 struct edge_list *el;
703 unsigned n_values = 0;
704 struct histogram_value *values = NULL;
706 total_num_times_called++;
708 flow_call_edges_add (NULL);
709 add_noreturn_fake_exit_edges ();
711 /* We can't handle cyclic regions constructed using abnormal edges.
712 To avoid these we replace every source of abnormal edge by a fake
713 edge from entry node and every destination by fake edge to exit.
714 This keeps graph acyclic and our calculation exact for all normal
715 edges except for exit and entrance ones.
717 We also add fake exit edges for each call and asm statement in the
718 basic, since it may not return. */
720 FOR_EACH_BB (bb)
722 int need_exit_edge = 0, need_entry_edge = 0;
723 int have_exit_edge = 0, have_entry_edge = 0;
724 edge e;
726 /* Functions returning multiple times are not handled by extra edges.
727 Instead we simply allow negative counts on edges from exit to the
728 block past call and corresponding probabilities. We can't go
729 with the extra edges because that would result in flowgraph that
730 needs to have fake edges outside the spanning tree. */
732 for (e = bb->succ; e; e = e->succ_next)
734 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
735 && e->dest != EXIT_BLOCK_PTR)
736 need_exit_edge = 1;
737 if (e->dest == EXIT_BLOCK_PTR)
738 have_exit_edge = 1;
740 for (e = bb->pred; e; e = e->pred_next)
742 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
743 && e->src != ENTRY_BLOCK_PTR)
744 need_entry_edge = 1;
745 if (e->src == ENTRY_BLOCK_PTR)
746 have_entry_edge = 1;
749 if (need_exit_edge && !have_exit_edge)
751 if (dump_file)
752 fprintf (dump_file, "Adding fake exit edge to bb %i\n",
753 bb->index);
754 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
756 if (need_entry_edge && !have_entry_edge)
758 if (dump_file)
759 fprintf (dump_file, "Adding fake entry edge to bb %i\n",
760 bb->index);
761 make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FAKE);
765 el = create_edge_list ();
766 num_edges = NUM_EDGES (el);
767 alloc_aux_for_edges (sizeof (struct edge_info));
769 /* The basic blocks are expected to be numbered sequentially. */
770 compact_blocks ();
772 ignored_edges = 0;
773 for (i = 0 ; i < num_edges ; i++)
775 edge e = INDEX_EDGE (el, i);
776 e->count = 0;
778 /* Mark edges we've replaced by fake edges above as ignored. */
779 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
780 && e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR)
782 EDGE_INFO (e)->ignore = 1;
783 ignored_edges++;
787 #ifdef ENABLE_CHECKING
788 verify_flow_info ();
789 #endif
791 /* Create spanning tree from basic block graph, mark each edge that is
792 on the spanning tree. We insert as many abnormal and critical edges
793 as possible to minimize number of edge splits necessary. */
795 find_spanning_tree (el);
797 /* Fake edges that are not on the tree will not be instrumented, so
798 mark them ignored. */
799 for (num_instrumented = i = 0; i < num_edges; i++)
801 edge e = INDEX_EDGE (el, i);
802 struct edge_info *inf = EDGE_INFO (e);
804 if (inf->ignore || inf->on_tree)
805 /*NOP*/;
806 else if (e->flags & EDGE_FAKE)
808 inf->ignore = 1;
809 ignored_edges++;
811 else
812 num_instrumented++;
815 total_num_blocks += n_basic_blocks + 2;
816 if (dump_file)
817 fprintf (dump_file, "%d basic blocks\n", n_basic_blocks);
819 total_num_edges += num_edges;
820 if (dump_file)
821 fprintf (dump_file, "%d edges\n", num_edges);
823 total_num_edges_ignored += ignored_edges;
824 if (dump_file)
825 fprintf (dump_file, "%d ignored edges\n", ignored_edges);
827 /* Write the data from which gcov can reconstruct the basic block
828 graph. */
830 /* Basic block flags */
831 if (coverage_begin_output ())
833 gcov_position_t offset;
835 offset = gcov_write_tag (GCOV_TAG_BLOCKS);
836 for (i = 0; i != (unsigned) (n_basic_blocks + 2); i++)
837 gcov_write_unsigned (0);
838 gcov_write_length (offset);
841 /* Keep all basic block indexes nonnegative in the gcov output.
842 Index 0 is used for entry block, last index is for exit block.
844 ENTRY_BLOCK_PTR->index = -1;
845 EXIT_BLOCK_PTR->index = last_basic_block;
846 #define BB_TO_GCOV_INDEX(bb) ((bb)->index + 1)
848 /* Arcs */
849 if (coverage_begin_output ())
851 gcov_position_t offset;
853 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
855 edge e;
857 offset = gcov_write_tag (GCOV_TAG_ARCS);
858 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
860 for (e = bb->succ; e; e = e->succ_next)
862 struct edge_info *i = EDGE_INFO (e);
863 if (!i->ignore)
865 unsigned flag_bits = 0;
867 if (i->on_tree)
868 flag_bits |= GCOV_ARC_ON_TREE;
869 if (e->flags & EDGE_FAKE)
870 flag_bits |= GCOV_ARC_FAKE;
871 if (e->flags & EDGE_FALLTHRU)
872 flag_bits |= GCOV_ARC_FALLTHROUGH;
874 gcov_write_unsigned (BB_TO_GCOV_INDEX (e->dest));
875 gcov_write_unsigned (flag_bits);
879 gcov_write_length (offset);
883 /* Line numbers. */
884 /* FIXME: make this work for trees. (Line numbers are in location_t
885 objects, but aren't always attached to the obvious tree...) */
886 if (coverage_begin_output () && !ir_type ())
888 char const *prev_file_name = NULL;
889 gcov_position_t offset;
891 FOR_EACH_BB (bb)
893 rtx insn = BB_HEAD (bb);
894 int ignore_next_note = 0;
896 offset = 0;
898 /* We are looking for line number notes. Search backward
899 before basic block to find correct ones. */
900 insn = prev_nonnote_insn (insn);
901 if (!insn)
902 insn = get_insns ();
903 else
904 insn = NEXT_INSN (insn);
906 while (insn != BB_END (bb))
908 if (NOTE_P (insn))
910 /* Must ignore the line number notes that
911 immediately follow the end of an inline function
912 to avoid counting it twice. There is a note
913 before the call, and one after the call. */
914 if (NOTE_LINE_NUMBER (insn)
915 == NOTE_INSN_REPEATED_LINE_NUMBER)
916 ignore_next_note = 1;
917 else if (NOTE_LINE_NUMBER (insn) <= 0)
918 /*NOP*/;
919 else if (ignore_next_note)
920 ignore_next_note = 0;
921 else
923 expanded_location s;
925 if (!offset)
927 offset = gcov_write_tag (GCOV_TAG_LINES);
928 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
931 NOTE_EXPANDED_LOCATION (s, insn);
933 /* If this is a new source file, then output the
934 file's name to the .bb file. */
935 if (!prev_file_name
936 || strcmp (s.file, prev_file_name))
938 prev_file_name = s.file;
939 gcov_write_unsigned (0);
940 gcov_write_string (prev_file_name);
942 gcov_write_unsigned (s.line);
945 insn = NEXT_INSN (insn);
948 if (offset)
950 /* A file of NULL indicates the end of run. */
951 gcov_write_unsigned (0);
952 gcov_write_string (NULL);
953 gcov_write_length (offset);
958 ENTRY_BLOCK_PTR->index = ENTRY_BLOCK;
959 EXIT_BLOCK_PTR->index = EXIT_BLOCK;
960 #undef BB_TO_GCOV_INDEX
962 if (flag_profile_values)
963 find_values_to_profile (&n_values, &values);
965 if (flag_branch_probabilities)
967 compute_branch_probabilities ();
968 if (flag_profile_values)
969 compute_value_histograms (n_values, values);
972 remove_fake_edges ();
974 /* For each edge not on the spanning tree, add counting code. */
975 if (profile_arc_flag
976 && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
978 unsigned n_instrumented = instrument_edges (el);
980 if (n_instrumented != num_instrumented)
981 abort ();
983 if (flag_profile_values)
984 instrument_values (n_values, values);
986 /* Commit changes done by instrumentation. */
987 if (ir_type ())
988 bsi_commit_edge_inserts ((int *)NULL);
989 else
991 commit_edge_insertions_watch_calls ();
992 allocate_reg_info (max_reg_num (), FALSE, FALSE);
996 free_aux_for_edges ();
998 if (!ir_type ())
1000 /* Re-merge split basic blocks and the mess introduced by
1001 insert_insn_on_edge. */
1002 cleanup_cfg (profile_arc_flag ? CLEANUP_EXPENSIVE : 0);
1003 if (profile_dump_file())
1004 dump_flow_info (profile_dump_file());
1007 free_edge_list (el);
1008 profile_status = PROFILE_READ;
1011 /* Union find algorithm implementation for the basic blocks using
1012 aux fields. */
1014 static basic_block
1015 find_group (basic_block bb)
1017 basic_block group = bb, bb1;
1019 while ((basic_block) group->aux != group)
1020 group = (basic_block) group->aux;
1022 /* Compress path. */
1023 while ((basic_block) bb->aux != group)
1025 bb1 = (basic_block) bb->aux;
1026 bb->aux = (void *) group;
1027 bb = bb1;
1029 return group;
1032 static void
1033 union_groups (basic_block bb1, basic_block bb2)
1035 basic_block bb1g = find_group (bb1);
1036 basic_block bb2g = find_group (bb2);
1038 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1039 this code is unlikely going to be performance problem anyway. */
1040 if (bb1g == bb2g)
1041 abort ();
1043 bb1g->aux = bb2g;
1046 /* This function searches all of the edges in the program flow graph, and puts
1047 as many bad edges as possible onto the spanning tree. Bad edges include
1048 abnormals edges, which can't be instrumented at the moment. Since it is
1049 possible for fake edges to form a cycle, we will have to develop some
1050 better way in the future. Also put critical edges to the tree, since they
1051 are more expensive to instrument. */
1053 static void
1054 find_spanning_tree (struct edge_list *el)
1056 int i;
1057 int num_edges = NUM_EDGES (el);
1058 basic_block bb;
1060 /* We use aux field for standard union-find algorithm. */
1061 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1062 bb->aux = bb;
1064 /* Add fake edge exit to entry we can't instrument. */
1065 union_groups (EXIT_BLOCK_PTR, ENTRY_BLOCK_PTR);
1067 /* First add all abnormal edges to the tree unless they form a cycle. Also
1068 add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
1069 setting return value from function. */
1070 for (i = 0; i < num_edges; i++)
1072 edge e = INDEX_EDGE (el, i);
1073 if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
1074 || e->dest == EXIT_BLOCK_PTR)
1075 && !EDGE_INFO (e)->ignore
1076 && (find_group (e->src) != find_group (e->dest)))
1078 if (dump_file)
1079 fprintf (dump_file, "Abnormal edge %d to %d put to tree\n",
1080 e->src->index, e->dest->index);
1081 EDGE_INFO (e)->on_tree = 1;
1082 union_groups (e->src, e->dest);
1086 /* Now insert all critical edges to the tree unless they form a cycle. */
1087 for (i = 0; i < num_edges; i++)
1089 edge e = INDEX_EDGE (el, i);
1090 if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore
1091 && find_group (e->src) != find_group (e->dest))
1093 if (dump_file)
1094 fprintf (dump_file, "Critical edge %d to %d put to tree\n",
1095 e->src->index, e->dest->index);
1096 EDGE_INFO (e)->on_tree = 1;
1097 union_groups (e->src, e->dest);
1101 /* And now the rest. */
1102 for (i = 0; i < num_edges; i++)
1104 edge e = INDEX_EDGE (el, i);
1105 if (!EDGE_INFO (e)->ignore
1106 && find_group (e->src) != find_group (e->dest))
1108 if (dump_file)
1109 fprintf (dump_file, "Normal edge %d to %d put to tree\n",
1110 e->src->index, e->dest->index);
1111 EDGE_INFO (e)->on_tree = 1;
1112 union_groups (e->src, e->dest);
1116 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1117 bb->aux = NULL;
1120 /* Perform file-level initialization for branch-prob processing. */
1122 void
1123 init_branch_prob (void)
1125 int i;
1127 total_num_blocks = 0;
1128 total_num_edges = 0;
1129 total_num_edges_ignored = 0;
1130 total_num_edges_instrumented = 0;
1131 total_num_blocks_created = 0;
1132 total_num_passes = 0;
1133 total_num_times_called = 0;
1134 total_num_branches = 0;
1135 total_num_never_executed = 0;
1136 for (i = 0; i < 20; i++)
1137 total_hist_br_prob[i] = 0;
1140 /* Performs file-level cleanup after branch-prob processing
1141 is completed. */
1143 void
1144 end_branch_prob (void)
1146 if (dump_file)
1148 fprintf (dump_file, "\n");
1149 fprintf (dump_file, "Total number of blocks: %d\n",
1150 total_num_blocks);
1151 fprintf (dump_file, "Total number of edges: %d\n", total_num_edges);
1152 fprintf (dump_file, "Total number of ignored edges: %d\n",
1153 total_num_edges_ignored);
1154 fprintf (dump_file, "Total number of instrumented edges: %d\n",
1155 total_num_edges_instrumented);
1156 fprintf (dump_file, "Total number of blocks created: %d\n",
1157 total_num_blocks_created);
1158 fprintf (dump_file, "Total number of graph solution passes: %d\n",
1159 total_num_passes);
1160 if (total_num_times_called != 0)
1161 fprintf (dump_file, "Average number of graph solution passes: %d\n",
1162 (total_num_passes + (total_num_times_called >> 1))
1163 / total_num_times_called);
1164 fprintf (dump_file, "Total number of branches: %d\n",
1165 total_num_branches);
1166 fprintf (dump_file, "Total number of branches never executed: %d\n",
1167 total_num_never_executed);
1168 if (total_num_branches)
1170 int i;
1172 for (i = 0; i < 10; i++)
1173 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1174 (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1175 / total_num_branches, 5*i, 5*i+5);
1180 /* Set up hooks to enable tree-based profiling. */
1182 void
1183 tree_register_profile_hooks (void)
1185 profile_hooks = &tree_profile_hooks;
1186 if (!ir_type ())
1187 abort ();
1190 /* Set up hooks to enable RTL-based profiling. */
1192 void
1193 rtl_register_profile_hooks (void)
1195 profile_hooks = &rtl_profile_hooks;
1196 if (ir_type ())
1197 abort ();