* config/i386/i386.c (init_cumulative_args): Set mmx/sse registers
[official-gcc.git] / gcc / profile.c
blob1d16b4368ac985d9989aa4e4dd7cb7255b1e10bc
1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
5 based on some ideas from Dain Samples of UC Berkeley.
6 Further mangling by Bob Manson, Cygnus Support.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 2, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to the Free
22 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 02111-1307, USA. */
25 /* Generate basic block profile instrumentation and auxiliary files.
26 Profile generation is optimized, so that not all arcs in the basic
27 block graph need instrumenting. First, the BB graph is closed with
28 one entry (function start), and one exit (function exit). Any
29 ABNORMAL_EDGE cannot be instrumented (because there is no control
30 path to place the code). We close the graph by inserting fake
31 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
32 edges that do not go to the exit_block. We ignore such abnormal
33 edges. Naturally these fake edges are never directly traversed,
34 and so *cannot* be directly instrumented. Some other graph
35 massaging is done. To optimize the instrumentation we generate the
36 BB minimal span tree, only edges that are not on the span tree
37 (plus the entry point) need instrumenting. From that information
38 all other edge counts can be deduced. By construction all fake
39 edges must be on the spanning tree. We also attempt to place
40 EDGE_CRITICAL edges on the spanning tree.
42 The auxiliary files generated are <dumpbase>.gcno (at compile time)
43 and <dumpbase>.gcda (at run time). The format is
44 described in full in gcov-io.h. */
46 /* ??? Register allocation should use basic block execution counts to
47 give preference to the most commonly executed blocks. */
49 /* ??? Should calculate branch probabilities before instrumenting code, since
50 then we can use arc counts to help decide which arcs to instrument. */
52 #include "config.h"
53 #include "system.h"
54 #include "coretypes.h"
55 #include "tm.h"
56 #include "rtl.h"
57 #include "flags.h"
58 #include "output.h"
59 #include "regs.h"
60 #include "expr.h"
61 #include "function.h"
62 #include "toplev.h"
63 #include "coverage.h"
64 #include "value-prof.h"
65 #include "tree.h"
66 #include "cfghooks.h"
67 #include "tree-flow.h"
69 /* Hooks for profiling. */
70 static struct profile_hooks* profile_hooks;
72 /* File for profiling debug output. */
73 static inline FILE*
74 profile_dump_file (void) {
75 return profile_hooks->profile_dump_file ();
78 /* Additional information about the edges we need. */
79 struct edge_info {
80 unsigned int count_valid : 1;
82 /* Is on the spanning tree. */
83 unsigned int on_tree : 1;
85 /* Pretend this edge does not exist (it is abnormal and we've
86 inserted a fake to compensate). */
87 unsigned int ignore : 1;
90 struct bb_info {
91 unsigned int count_valid : 1;
93 /* Number of successor and predecessor edges. */
94 gcov_type succ_count;
95 gcov_type pred_count;
98 #define EDGE_INFO(e) ((struct edge_info *) (e)->aux)
99 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
101 /* Counter summary from the last set of coverage counts read. */
103 const struct gcov_ctr_summary *profile_info;
105 /* Collect statistics on the performance of this pass for the entire source
106 file. */
108 static int total_num_blocks;
109 static int total_num_edges;
110 static int total_num_edges_ignored;
111 static int total_num_edges_instrumented;
112 static int total_num_blocks_created;
113 static int total_num_passes;
114 static int total_num_times_called;
115 static int total_hist_br_prob[20];
116 static int total_num_never_executed;
117 static int total_num_branches;
119 /* Forward declarations. */
120 static void find_spanning_tree (struct edge_list *);
121 static unsigned instrument_edges (struct edge_list *);
122 static void instrument_values (unsigned, struct histogram_value *);
123 static void compute_branch_probabilities (void);
124 static void compute_value_histograms (unsigned, struct histogram_value *);
125 static gcov_type * get_exec_counts (void);
126 static basic_block find_group (basic_block);
127 static void union_groups (basic_block, basic_block);
130 /* Add edge instrumentation code to the entire insn chain.
132 F is the first insn of the chain.
133 NUM_BLOCKS is the number of basic blocks found in F. */
135 static unsigned
136 instrument_edges (struct edge_list *el)
138 unsigned num_instr_edges = 0;
139 int num_edges = NUM_EDGES (el);
140 basic_block bb;
142 remove_fake_edges ();
144 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
146 edge e;
148 for (e = bb->succ; e; e = e->succ_next)
150 struct edge_info *inf = EDGE_INFO (e);
152 if (!inf->ignore && !inf->on_tree)
154 if (e->flags & EDGE_ABNORMAL)
155 abort ();
156 if (dump_file)
157 fprintf (dump_file, "Edge %d to %d instrumented%s\n",
158 e->src->index, e->dest->index,
159 EDGE_CRITICAL_P (e) ? " (and split)" : "");
160 (profile_hooks->gen_edge_profiler) (num_instr_edges++, e);
165 total_num_blocks_created += num_edges;
166 if (dump_file)
167 fprintf (dump_file, "%d edges instrumented\n", num_instr_edges);
168 return num_instr_edges;
171 /* Add code to measure histograms list of VALUES of length N_VALUES. */
172 static void
173 instrument_values (unsigned n_values, struct histogram_value *values)
175 unsigned i, t;
177 /* Emit code to generate the histograms before the insns. */
179 for (i = 0; i < n_values; i++)
181 switch (values[i].type)
183 case HIST_TYPE_INTERVAL:
184 t = GCOV_COUNTER_V_INTERVAL;
185 break;
187 case HIST_TYPE_POW2:
188 t = GCOV_COUNTER_V_POW2;
189 break;
191 case HIST_TYPE_SINGLE_VALUE:
192 t = GCOV_COUNTER_V_SINGLE;
193 break;
195 case HIST_TYPE_CONST_DELTA:
196 t = GCOV_COUNTER_V_DELTA;
197 break;
199 default:
200 abort ();
202 if (!coverage_counter_alloc (t, values[i].n_counters))
203 continue;
205 switch (values[i].type)
207 case HIST_TYPE_INTERVAL:
208 (profile_hooks->gen_interval_profiler) (values + i, t, 0);
209 break;
211 case HIST_TYPE_POW2:
212 (profile_hooks->gen_pow2_profiler) (values + i, t, 0);
213 break;
215 case HIST_TYPE_SINGLE_VALUE:
216 (profile_hooks->gen_one_value_profiler) (values + i, t, 0);
217 break;
219 case HIST_TYPE_CONST_DELTA:
220 (profile_hooks->gen_const_delta_profiler) (values + i, t, 0);
221 break;
223 default:
224 abort ();
230 /* Computes hybrid profile for all matching entries in da_file. */
232 static gcov_type *
233 get_exec_counts (void)
235 unsigned num_edges = 0;
236 basic_block bb;
237 gcov_type *counts;
239 /* Count the edges to be (possibly) instrumented. */
240 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
242 edge e;
243 for (e = bb->succ; e; e = e->succ_next)
244 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
245 num_edges++;
248 counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, &profile_info);
249 if (!counts)
250 return NULL;
252 if (dump_file && profile_info)
253 fprintf(dump_file, "Merged %u profiles with maximal count %u.\n",
254 profile_info->runs, (unsigned) profile_info->sum_max);
256 return counts;
260 /* Compute the branch probabilities for the various branches.
261 Annotate them accordingly. */
263 static void
264 compute_branch_probabilities (void)
266 basic_block bb;
267 int i;
268 int num_edges = 0;
269 int changes;
270 int passes;
271 int hist_br_prob[20];
272 int num_never_executed;
273 int num_branches;
274 gcov_type *exec_counts = get_exec_counts ();
275 int exec_counts_pos = 0;
277 /* Very simple sanity checks so we catch bugs in our profiling code. */
278 if (profile_info)
280 if (profile_info->run_max * profile_info->runs < profile_info->sum_max)
282 error ("corrupted profile info: run_max * runs < sum_max");
283 exec_counts = NULL;
286 if (profile_info->sum_all < profile_info->sum_max)
288 error ("corrupted profile info: sum_all is smaller than sum_max");
289 exec_counts = NULL;
293 /* Attach extra info block to each bb. */
295 alloc_aux_for_blocks (sizeof (struct bb_info));
296 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
298 edge e;
300 for (e = bb->succ; e; e = e->succ_next)
301 if (!EDGE_INFO (e)->ignore)
302 BB_INFO (bb)->succ_count++;
303 for (e = bb->pred; e; e = e->pred_next)
304 if (!EDGE_INFO (e)->ignore)
305 BB_INFO (bb)->pred_count++;
308 /* Avoid predicting entry on exit nodes. */
309 BB_INFO (EXIT_BLOCK_PTR)->succ_count = 2;
310 BB_INFO (ENTRY_BLOCK_PTR)->pred_count = 2;
312 /* For each edge not on the spanning tree, set its execution count from
313 the .da file. */
315 /* The first count in the .da file is the number of times that the function
316 was entered. This is the exec_count for block zero. */
318 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
320 edge e;
321 for (e = bb->succ; e; e = e->succ_next)
322 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
324 num_edges++;
325 if (exec_counts)
327 e->count = exec_counts[exec_counts_pos++];
328 if (e->count > profile_info->sum_max)
330 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
331 bb->index, e->dest->index);
334 else
335 e->count = 0;
337 EDGE_INFO (e)->count_valid = 1;
338 BB_INFO (bb)->succ_count--;
339 BB_INFO (e->dest)->pred_count--;
340 if (dump_file)
342 fprintf (dump_file, "\nRead edge from %i to %i, count:",
343 bb->index, e->dest->index);
344 fprintf (dump_file, HOST_WIDEST_INT_PRINT_DEC,
345 (HOST_WIDEST_INT) e->count);
350 if (dump_file)
351 fprintf (dump_file, "\n%d edge counts read\n", num_edges);
353 /* For every block in the file,
354 - if every exit/entrance edge has a known count, then set the block count
355 - if the block count is known, and every exit/entrance edge but one has
356 a known execution count, then set the count of the remaining edge
358 As edge counts are set, decrement the succ/pred count, but don't delete
359 the edge, that way we can easily tell when all edges are known, or only
360 one edge is unknown. */
362 /* The order that the basic blocks are iterated through is important.
363 Since the code that finds spanning trees starts with block 0, low numbered
364 edges are put on the spanning tree in preference to high numbered edges.
365 Hence, most instrumented edges are at the end. Graph solving works much
366 faster if we propagate numbers from the end to the start.
368 This takes an average of slightly more than 3 passes. */
370 changes = 1;
371 passes = 0;
372 while (changes)
374 passes++;
375 changes = 0;
376 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
378 struct bb_info *bi = BB_INFO (bb);
379 if (! bi->count_valid)
381 if (bi->succ_count == 0)
383 edge e;
384 gcov_type total = 0;
386 for (e = bb->succ; e; e = e->succ_next)
387 total += e->count;
388 bb->count = total;
389 bi->count_valid = 1;
390 changes = 1;
392 else if (bi->pred_count == 0)
394 edge e;
395 gcov_type total = 0;
397 for (e = bb->pred; e; e = e->pred_next)
398 total += e->count;
399 bb->count = total;
400 bi->count_valid = 1;
401 changes = 1;
404 if (bi->count_valid)
406 if (bi->succ_count == 1)
408 edge e;
409 gcov_type total = 0;
411 /* One of the counts will be invalid, but it is zero,
412 so adding it in also doesn't hurt. */
413 for (e = bb->succ; e; e = e->succ_next)
414 total += e->count;
416 /* Seedgeh for the invalid edge, and set its count. */
417 for (e = bb->succ; e; e = e->succ_next)
418 if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
419 break;
421 /* Calculate count for remaining edge by conservation. */
422 total = bb->count - total;
424 if (! e)
425 abort ();
426 EDGE_INFO (e)->count_valid = 1;
427 e->count = total;
428 bi->succ_count--;
430 BB_INFO (e->dest)->pred_count--;
431 changes = 1;
433 if (bi->pred_count == 1)
435 edge e;
436 gcov_type total = 0;
438 /* One of the counts will be invalid, but it is zero,
439 so adding it in also doesn't hurt. */
440 for (e = bb->pred; e; e = e->pred_next)
441 total += e->count;
443 /* Search for the invalid edge, and set its count. */
444 for (e = bb->pred; e; e = e->pred_next)
445 if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
446 break;
448 /* Calculate count for remaining edge by conservation. */
449 total = bb->count - total + e->count;
451 if (! e)
452 abort ();
453 EDGE_INFO (e)->count_valid = 1;
454 e->count = total;
455 bi->pred_count--;
457 BB_INFO (e->src)->succ_count--;
458 changes = 1;
463 if (dump_file)
464 dump_flow_info (dump_file);
466 total_num_passes += passes;
467 if (dump_file)
468 fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
470 /* If the graph has been correctly solved, every block will have a
471 succ and pred count of zero. */
472 FOR_EACH_BB (bb)
474 if (BB_INFO (bb)->succ_count || BB_INFO (bb)->pred_count)
475 abort ();
478 /* For every edge, calculate its branch probability and add a reg_note
479 to the branch insn to indicate this. */
481 for (i = 0; i < 20; i++)
482 hist_br_prob[i] = 0;
483 num_never_executed = 0;
484 num_branches = 0;
486 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
488 edge e;
489 rtx note;
491 if (bb->count < 0)
493 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
494 bb->index, (int)bb->count);
495 bb->count = 0;
497 for (e = bb->succ; e; e = e->succ_next)
499 /* Function may return twice in the cased the called function is
500 setjmp or calls fork, but we can't represent this by extra
501 edge from the entry, since extra edge from the exit is
502 already present. We get negative frequency from the entry
503 point. */
504 if ((e->count < 0
505 && e->dest == EXIT_BLOCK_PTR)
506 || (e->count > bb->count
507 && e->dest != EXIT_BLOCK_PTR))
509 if (block_ends_with_call_p (bb))
510 e->count = e->count < 0 ? 0 : bb->count;
512 if (e->count < 0 || e->count > bb->count)
514 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
515 e->src->index, e->dest->index,
516 (int)e->count);
517 e->count = bb->count / 2;
520 if (bb->count)
522 for (e = bb->succ; e; e = e->succ_next)
523 e->probability = (e->count * REG_BR_PROB_BASE + bb->count / 2) / bb->count;
524 if (bb->index >= 0
525 && block_ends_with_condjump_p (bb)
526 && bb->succ->succ_next)
528 int prob;
529 edge e;
530 int index;
532 /* Find the branch edge. It is possible that we do have fake
533 edges here. */
534 for (e = bb->succ; e->flags & (EDGE_FAKE | EDGE_FALLTHRU);
535 e = e->succ_next)
536 continue; /* Loop body has been intentionally left blank. */
538 prob = e->probability;
539 index = prob * 20 / REG_BR_PROB_BASE;
541 if (index == 20)
542 index = 19;
543 hist_br_prob[index]++;
545 /* Do this for RTL only. */
546 if (!ir_type ())
548 note = find_reg_note (BB_END (bb), REG_BR_PROB, 0);
549 /* There may be already note put by some other pass, such
550 as builtin_expect expander. */
551 if (note)
552 XEXP (note, 0) = GEN_INT (prob);
553 else
554 REG_NOTES (BB_END (bb))
555 = gen_rtx_EXPR_LIST (REG_BR_PROB, GEN_INT (prob),
556 REG_NOTES (BB_END (bb)));
558 num_branches++;
561 /* Otherwise distribute the probabilities evenly so we get sane
562 sum. Use simple heuristics that if there are normal edges,
563 give all abnormals frequency of 0, otherwise distribute the
564 frequency over abnormals (this is the case of noreturn
565 calls). */
566 else
568 int total = 0;
570 for (e = bb->succ; e; e = e->succ_next)
571 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
572 total ++;
573 if (total)
575 for (e = bb->succ; e; e = e->succ_next)
576 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
577 e->probability = REG_BR_PROB_BASE / total;
578 else
579 e->probability = 0;
581 else
583 for (e = bb->succ; e; e = e->succ_next)
584 total ++;
585 for (e = bb->succ; e; e = e->succ_next)
586 e->probability = REG_BR_PROB_BASE / total;
588 if (bb->index >= 0
589 && block_ends_with_condjump_p (bb)
590 && bb->succ->succ_next)
591 num_branches++, num_never_executed;
595 if (dump_file)
597 fprintf (dump_file, "%d branches\n", num_branches);
598 fprintf (dump_file, "%d branches never executed\n",
599 num_never_executed);
600 if (num_branches)
601 for (i = 0; i < 10; i++)
602 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
603 (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
604 5 * i, 5 * i + 5);
606 total_num_branches += num_branches;
607 total_num_never_executed += num_never_executed;
608 for (i = 0; i < 20; i++)
609 total_hist_br_prob[i] += hist_br_prob[i];
611 fputc ('\n', dump_file);
612 fputc ('\n', dump_file);
615 free_aux_for_blocks ();
618 /* Load value histograms for N_VALUES values whose description is stored
619 in VALUES array from .da file. */
620 static void
621 compute_value_histograms (unsigned n_values, struct histogram_value *values)
623 unsigned i, j, t, any;
624 unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS];
625 gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS];
626 gcov_type *act_count[GCOV_N_VALUE_COUNTERS];
627 gcov_type *aact_count;
629 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
630 n_histogram_counters[t] = 0;
632 for (i = 0; i < n_values; i++)
633 n_histogram_counters[(int) (values[i].type)] += values[i].n_counters;
635 any = 0;
636 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
638 if (!n_histogram_counters[t])
640 histogram_counts[t] = NULL;
641 continue;
644 histogram_counts[t] =
645 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t),
646 n_histogram_counters[t], NULL);
647 if (histogram_counts[t])
648 any = 1;
649 act_count[t] = histogram_counts[t];
651 if (!any)
652 return;
654 for (i = 0; i < n_values; i++)
656 rtx hist_list = NULL_RTX;
657 t = (int) (values[i].type);
659 /* FIXME: make this work for trees. */
660 if (!ir_type ())
662 aact_count = act_count[t];
663 act_count[t] += values[i].n_counters;
664 for (j = values[i].n_counters; j > 0; j--)
665 hist_list = alloc_EXPR_LIST (0, GEN_INT (aact_count[j - 1]),
666 hist_list);
667 hist_list = alloc_EXPR_LIST (0,
668 copy_rtx ((rtx)values[i].value), hist_list);
669 hist_list = alloc_EXPR_LIST (0, GEN_INT (values[i].type), hist_list);
670 REG_NOTES ((rtx)values[i].insn) =
671 alloc_EXPR_LIST (REG_VALUE_PROFILE, hist_list,
672 REG_NOTES ((rtx)values[i].insn));
676 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
677 if (histogram_counts[t])
678 free (histogram_counts[t]);
681 /* Instrument and/or analyze program behavior based on program flow graph.
682 In either case, this function builds a flow graph for the function being
683 compiled. The flow graph is stored in BB_GRAPH.
685 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
686 the flow graph that are needed to reconstruct the dynamic behavior of the
687 flow graph.
689 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
690 information from a data file containing edge count information from previous
691 executions of the function being compiled. In this case, the flow graph is
692 annotated with actual execution counts, which are later propagated into the
693 rtl for optimization purposes.
695 Main entry point of this file. */
697 void
698 branch_prob (void)
700 basic_block bb;
701 unsigned i;
702 unsigned num_edges, ignored_edges;
703 unsigned num_instrumented;
704 struct edge_list *el;
705 unsigned n_values = 0;
706 struct histogram_value *values = NULL;
708 total_num_times_called++;
710 flow_call_edges_add (NULL);
711 add_noreturn_fake_exit_edges ();
713 /* We can't handle cyclic regions constructed using abnormal edges.
714 To avoid these we replace every source of abnormal edge by a fake
715 edge from entry node and every destination by fake edge to exit.
716 This keeps graph acyclic and our calculation exact for all normal
717 edges except for exit and entrance ones.
719 We also add fake exit edges for each call and asm statement in the
720 basic, since it may not return. */
722 FOR_EACH_BB (bb)
724 int need_exit_edge = 0, need_entry_edge = 0;
725 int have_exit_edge = 0, have_entry_edge = 0;
726 edge e;
728 /* Functions returning multiple times are not handled by extra edges.
729 Instead we simply allow negative counts on edges from exit to the
730 block past call and corresponding probabilities. We can't go
731 with the extra edges because that would result in flowgraph that
732 needs to have fake edges outside the spanning tree. */
734 for (e = bb->succ; e; e = e->succ_next)
736 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
737 && e->dest != EXIT_BLOCK_PTR)
738 need_exit_edge = 1;
739 if (e->dest == EXIT_BLOCK_PTR)
740 have_exit_edge = 1;
742 for (e = bb->pred; e; e = e->pred_next)
744 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
745 && e->src != ENTRY_BLOCK_PTR)
746 need_entry_edge = 1;
747 if (e->src == ENTRY_BLOCK_PTR)
748 have_entry_edge = 1;
751 if (need_exit_edge && !have_exit_edge)
753 if (dump_file)
754 fprintf (dump_file, "Adding fake exit edge to bb %i\n",
755 bb->index);
756 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
758 if (need_entry_edge && !have_entry_edge)
760 if (dump_file)
761 fprintf (dump_file, "Adding fake entry edge to bb %i\n",
762 bb->index);
763 make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FAKE);
767 el = create_edge_list ();
768 num_edges = NUM_EDGES (el);
769 alloc_aux_for_edges (sizeof (struct edge_info));
771 /* The basic blocks are expected to be numbered sequentially. */
772 compact_blocks ();
774 ignored_edges = 0;
775 for (i = 0 ; i < num_edges ; i++)
777 edge e = INDEX_EDGE (el, i);
778 e->count = 0;
780 /* Mark edges we've replaced by fake edges above as ignored. */
781 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
782 && e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR)
784 EDGE_INFO (e)->ignore = 1;
785 ignored_edges++;
789 #ifdef ENABLE_CHECKING
790 verify_flow_info ();
791 #endif
793 /* Create spanning tree from basic block graph, mark each edge that is
794 on the spanning tree. We insert as many abnormal and critical edges
795 as possible to minimize number of edge splits necessary. */
797 find_spanning_tree (el);
799 /* Fake edges that are not on the tree will not be instrumented, so
800 mark them ignored. */
801 for (num_instrumented = i = 0; i < num_edges; i++)
803 edge e = INDEX_EDGE (el, i);
804 struct edge_info *inf = EDGE_INFO (e);
806 if (inf->ignore || inf->on_tree)
807 /*NOP*/;
808 else if (e->flags & EDGE_FAKE)
810 inf->ignore = 1;
811 ignored_edges++;
813 else
814 num_instrumented++;
817 total_num_blocks += n_basic_blocks + 2;
818 if (dump_file)
819 fprintf (dump_file, "%d basic blocks\n", n_basic_blocks);
821 total_num_edges += num_edges;
822 if (dump_file)
823 fprintf (dump_file, "%d edges\n", num_edges);
825 total_num_edges_ignored += ignored_edges;
826 if (dump_file)
827 fprintf (dump_file, "%d ignored edges\n", ignored_edges);
829 /* Write the data from which gcov can reconstruct the basic block
830 graph. */
832 /* Basic block flags */
833 if (coverage_begin_output ())
835 gcov_position_t offset;
837 offset = gcov_write_tag (GCOV_TAG_BLOCKS);
838 for (i = 0; i != (unsigned) (n_basic_blocks + 2); i++)
839 gcov_write_unsigned (0);
840 gcov_write_length (offset);
843 /* Keep all basic block indexes nonnegative in the gcov output.
844 Index 0 is used for entry block, last index is for exit block.
846 ENTRY_BLOCK_PTR->index = -1;
847 EXIT_BLOCK_PTR->index = last_basic_block;
848 #define BB_TO_GCOV_INDEX(bb) ((bb)->index + 1)
850 /* Arcs */
851 if (coverage_begin_output ())
853 gcov_position_t offset;
855 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
857 edge e;
859 offset = gcov_write_tag (GCOV_TAG_ARCS);
860 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
862 for (e = bb->succ; e; e = e->succ_next)
864 struct edge_info *i = EDGE_INFO (e);
865 if (!i->ignore)
867 unsigned flag_bits = 0;
869 if (i->on_tree)
870 flag_bits |= GCOV_ARC_ON_TREE;
871 if (e->flags & EDGE_FAKE)
872 flag_bits |= GCOV_ARC_FAKE;
873 if (e->flags & EDGE_FALLTHRU)
874 flag_bits |= GCOV_ARC_FALLTHROUGH;
876 gcov_write_unsigned (BB_TO_GCOV_INDEX (e->dest));
877 gcov_write_unsigned (flag_bits);
881 gcov_write_length (offset);
885 /* Line numbers. */
886 /* FIXME: make this work for trees. (Line numbers are in location_t
887 objects, but aren't always attached to the obvious tree...) */
888 if (coverage_begin_output () && !ir_type ())
890 char const *prev_file_name = NULL;
891 gcov_position_t offset;
893 FOR_EACH_BB (bb)
895 rtx insn = BB_HEAD (bb);
896 int ignore_next_note = 0;
898 offset = 0;
900 /* We are looking for line number notes. Search backward
901 before basic block to find correct ones. */
902 insn = prev_nonnote_insn (insn);
903 if (!insn)
904 insn = get_insns ();
905 else
906 insn = NEXT_INSN (insn);
908 while (insn != BB_END (bb))
910 if (NOTE_P (insn))
912 /* Must ignore the line number notes that
913 immediately follow the end of an inline function
914 to avoid counting it twice. There is a note
915 before the call, and one after the call. */
916 if (NOTE_LINE_NUMBER (insn)
917 == NOTE_INSN_REPEATED_LINE_NUMBER)
918 ignore_next_note = 1;
919 else if (NOTE_LINE_NUMBER (insn) <= 0)
920 /*NOP*/;
921 else if (ignore_next_note)
922 ignore_next_note = 0;
923 else
925 expanded_location s;
927 if (!offset)
929 offset = gcov_write_tag (GCOV_TAG_LINES);
930 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
933 NOTE_EXPANDED_LOCATION (s, insn);
935 /* If this is a new source file, then output the
936 file's name to the .bb file. */
937 if (!prev_file_name
938 || strcmp (s.file, prev_file_name))
940 prev_file_name = s.file;
941 gcov_write_unsigned (0);
942 gcov_write_string (prev_file_name);
944 gcov_write_unsigned (s.line);
947 insn = NEXT_INSN (insn);
950 if (offset)
952 /* A file of NULL indicates the end of run. */
953 gcov_write_unsigned (0);
954 gcov_write_string (NULL);
955 gcov_write_length (offset);
960 ENTRY_BLOCK_PTR->index = ENTRY_BLOCK;
961 EXIT_BLOCK_PTR->index = EXIT_BLOCK;
962 #undef BB_TO_GCOV_INDEX
964 if (flag_profile_values)
965 find_values_to_profile (&n_values, &values);
967 if (flag_branch_probabilities)
969 compute_branch_probabilities ();
970 if (flag_profile_values)
971 compute_value_histograms (n_values, values);
974 /* For each edge not on the spanning tree, add counting code. */
975 if (profile_arc_flag
976 && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
978 unsigned n_instrumented = instrument_edges (el);
980 if (n_instrumented != num_instrumented)
981 abort ();
983 if (flag_profile_values)
984 instrument_values (n_values, values);
986 /* Commit changes done by instrumentation. */
987 if (ir_type ())
988 bsi_commit_edge_inserts ((int *)NULL);
989 else
991 commit_edge_insertions_watch_calls ();
992 allocate_reg_info (max_reg_num (), FALSE, FALSE);
996 remove_fake_edges ();
997 free_aux_for_edges ();
999 if (!ir_type ())
1001 /* Re-merge split basic blocks and the mess introduced by
1002 insert_insn_on_edge. */
1003 cleanup_cfg (profile_arc_flag ? CLEANUP_EXPENSIVE : 0);
1004 if (profile_dump_file())
1005 dump_flow_info (profile_dump_file());
1008 free_edge_list (el);
1011 /* Union find algorithm implementation for the basic blocks using
1012 aux fields. */
1014 static basic_block
1015 find_group (basic_block bb)
1017 basic_block group = bb, bb1;
1019 while ((basic_block) group->aux != group)
1020 group = (basic_block) group->aux;
1022 /* Compress path. */
1023 while ((basic_block) bb->aux != group)
1025 bb1 = (basic_block) bb->aux;
1026 bb->aux = (void *) group;
1027 bb = bb1;
1029 return group;
1032 static void
1033 union_groups (basic_block bb1, basic_block bb2)
1035 basic_block bb1g = find_group (bb1);
1036 basic_block bb2g = find_group (bb2);
1038 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1039 this code is unlikely going to be performance problem anyway. */
1040 if (bb1g == bb2g)
1041 abort ();
1043 bb1g->aux = bb2g;
1046 /* This function searches all of the edges in the program flow graph, and puts
1047 as many bad edges as possible onto the spanning tree. Bad edges include
1048 abnormals edges, which can't be instrumented at the moment. Since it is
1049 possible for fake edges to form a cycle, we will have to develop some
1050 better way in the future. Also put critical edges to the tree, since they
1051 are more expensive to instrument. */
1053 static void
1054 find_spanning_tree (struct edge_list *el)
1056 int i;
1057 int num_edges = NUM_EDGES (el);
1058 basic_block bb;
1060 /* We use aux field for standard union-find algorithm. */
1061 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1062 bb->aux = bb;
1064 /* Add fake edge exit to entry we can't instrument. */
1065 union_groups (EXIT_BLOCK_PTR, ENTRY_BLOCK_PTR);
1067 /* First add all abnormal edges to the tree unless they form a cycle. Also
1068 add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
1069 setting return value from function. */
1070 for (i = 0; i < num_edges; i++)
1072 edge e = INDEX_EDGE (el, i);
1073 if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
1074 || e->dest == EXIT_BLOCK_PTR)
1075 && !EDGE_INFO (e)->ignore
1076 && (find_group (e->src) != find_group (e->dest)))
1078 if (dump_file)
1079 fprintf (dump_file, "Abnormal edge %d to %d put to tree\n",
1080 e->src->index, e->dest->index);
1081 EDGE_INFO (e)->on_tree = 1;
1082 union_groups (e->src, e->dest);
1086 /* Now insert all critical edges to the tree unless they form a cycle. */
1087 for (i = 0; i < num_edges; i++)
1089 edge e = INDEX_EDGE (el, i);
1090 if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore
1091 && find_group (e->src) != find_group (e->dest))
1093 if (dump_file)
1094 fprintf (dump_file, "Critical edge %d to %d put to tree\n",
1095 e->src->index, e->dest->index);
1096 EDGE_INFO (e)->on_tree = 1;
1097 union_groups (e->src, e->dest);
1101 /* And now the rest. */
1102 for (i = 0; i < num_edges; i++)
1104 edge e = INDEX_EDGE (el, i);
1105 if (!EDGE_INFO (e)->ignore
1106 && find_group (e->src) != find_group (e->dest))
1108 if (dump_file)
1109 fprintf (dump_file, "Normal edge %d to %d put to tree\n",
1110 e->src->index, e->dest->index);
1111 EDGE_INFO (e)->on_tree = 1;
1112 union_groups (e->src, e->dest);
1116 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1117 bb->aux = NULL;
1120 /* Perform file-level initialization for branch-prob processing. */
1122 void
1123 init_branch_prob (void)
1125 int i;
1127 total_num_blocks = 0;
1128 total_num_edges = 0;
1129 total_num_edges_ignored = 0;
1130 total_num_edges_instrumented = 0;
1131 total_num_blocks_created = 0;
1132 total_num_passes = 0;
1133 total_num_times_called = 0;
1134 total_num_branches = 0;
1135 total_num_never_executed = 0;
1136 for (i = 0; i < 20; i++)
1137 total_hist_br_prob[i] = 0;
1140 /* Performs file-level cleanup after branch-prob processing
1141 is completed. */
1143 void
1144 end_branch_prob (void)
1146 if (dump_file)
1148 fprintf (dump_file, "\n");
1149 fprintf (dump_file, "Total number of blocks: %d\n",
1150 total_num_blocks);
1151 fprintf (dump_file, "Total number of edges: %d\n", total_num_edges);
1152 fprintf (dump_file, "Total number of ignored edges: %d\n",
1153 total_num_edges_ignored);
1154 fprintf (dump_file, "Total number of instrumented edges: %d\n",
1155 total_num_edges_instrumented);
1156 fprintf (dump_file, "Total number of blocks created: %d\n",
1157 total_num_blocks_created);
1158 fprintf (dump_file, "Total number of graph solution passes: %d\n",
1159 total_num_passes);
1160 if (total_num_times_called != 0)
1161 fprintf (dump_file, "Average number of graph solution passes: %d\n",
1162 (total_num_passes + (total_num_times_called >> 1))
1163 / total_num_times_called);
1164 fprintf (dump_file, "Total number of branches: %d\n",
1165 total_num_branches);
1166 fprintf (dump_file, "Total number of branches never executed: %d\n",
1167 total_num_never_executed);
1168 if (total_num_branches)
1170 int i;
1172 for (i = 0; i < 10; i++)
1173 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1174 (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1175 / total_num_branches, 5*i, 5*i+5);
1180 /* Set up hooks to enable tree-based profiling. */
1182 void
1183 tree_register_profile_hooks (void)
1185 profile_hooks = &tree_profile_hooks;
1186 if (!ir_type ())
1187 abort ();
1190 /* Set up hooks to enable RTL-based profiling. */
1192 void
1193 rtl_register_profile_hooks (void)
1195 profile_hooks = &rtl_profile_hooks;
1196 if (ir_type ())
1197 abort ();