add PR#
[official-gcc.git] / gcc / profile.c
blob310277f61fd07d89cd987fa8bfb3cecd1672d7a9
1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
5 based on some ideas from Dain Samples of UC Berkeley.
6 Further mangling by Bob Manson, Cygnus Support.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 2, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to the Free
22 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 02111-1307, USA. */
25 /* Generate basic block profile instrumentation and auxiliary files.
26 Profile generation is optimized, so that not all arcs in the basic
27 block graph need instrumenting. First, the BB graph is closed with
28 one entry (function start), and one exit (function exit). Any
29 ABNORMAL_EDGE cannot be instrumented (because there is no control
30 path to place the code). We close the graph by inserting fake
31 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
32 edges that do not go to the exit_block. We ignore such abnormal
33 edges. Naturally these fake edges are never directly traversed,
34 and so *cannot* be directly instrumented. Some other graph
35 massaging is done. To optimize the instrumentation we generate the
36 BB minimal span tree, only edges that are not on the span tree
37 (plus the entry point) need instrumenting. From that information
38 all other edge counts can be deduced. By construction all fake
39 edges must be on the spanning tree. We also attempt to place
40 EDGE_CRITICAL edges on the spanning tree.
42 The auxiliary file generated is <dumpbase>.bbg. The format is
43 described in full in gcov-io.h. */
45 /* ??? Register allocation should use basic block execution counts to
46 give preference to the most commonly executed blocks. */
48 /* ??? Should calculate branch probabilities before instrumenting code, since
49 then we can use arc counts to help decide which arcs to instrument. */
51 #include "config.h"
52 #include "system.h"
53 #include "coretypes.h"
54 #include "tm.h"
55 #include "rtl.h"
56 #include "flags.h"
57 #include "output.h"
58 #include "regs.h"
59 #include "expr.h"
60 #include "function.h"
61 #include "toplev.h"
62 #include "coverage.h"
64 /* Additional information about the edges we need. */
65 struct edge_info {
66 unsigned int count_valid : 1;
68 /* Is on the spanning tree. */
69 unsigned int on_tree : 1;
71 /* Pretend this edge does not exist (it is abnormal and we've
72 inserted a fake to compensate). */
73 unsigned int ignore : 1;
76 struct bb_info {
77 unsigned int count_valid : 1;
79 /* Number of successor and predecessor edges. */
80 gcov_type succ_count;
81 gcov_type pred_count;
84 #define EDGE_INFO(e) ((struct edge_info *) (e)->aux)
85 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
87 /* Counter summary from the last set of coverage counts read. */
89 const struct gcov_ctr_summary *profile_info;
91 /* Collect statistics on the performance of this pass for the entire source
92 file. */
94 static int total_num_blocks;
95 static int total_num_edges;
96 static int total_num_edges_ignored;
97 static int total_num_edges_instrumented;
98 static int total_num_blocks_created;
99 static int total_num_passes;
100 static int total_num_times_called;
101 static int total_hist_br_prob[20];
102 static int total_num_never_executed;
103 static int total_num_branches;
105 /* Forward declarations. */
106 static void find_spanning_tree PARAMS ((struct edge_list *));
107 static rtx gen_edge_profiler PARAMS ((int));
108 static unsigned instrument_edges PARAMS ((struct edge_list *));
109 static void compute_branch_probabilities PARAMS ((void));
110 static gcov_type * get_exec_counts PARAMS ((void));
111 static basic_block find_group PARAMS ((basic_block));
112 static void union_groups PARAMS ((basic_block, basic_block));
115 /* Add edge instrumentation code to the entire insn chain.
117 F is the first insn of the chain.
118 NUM_BLOCKS is the number of basic blocks found in F. */
120 static unsigned
121 instrument_edges (el)
122 struct edge_list *el;
124 unsigned num_instr_edges = 0;
125 int num_edges = NUM_EDGES (el);
126 basic_block bb;
128 remove_fake_edges ();
130 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
132 edge e;
134 for (e = bb->succ; e; e = e->succ_next)
136 struct edge_info *inf = EDGE_INFO (e);
138 if (!inf->ignore && !inf->on_tree)
140 rtx edge_profile;
142 if (e->flags & EDGE_ABNORMAL)
143 abort ();
144 if (rtl_dump_file)
145 fprintf (rtl_dump_file, "Edge %d to %d instrumented%s\n",
146 e->src->index, e->dest->index,
147 EDGE_CRITICAL_P (e) ? " (and split)" : "");
148 edge_profile = gen_edge_profiler (num_instr_edges++);
149 insert_insn_on_edge (edge_profile, e);
150 rebuild_jump_labels (e->insns);
155 total_num_blocks_created += num_edges;
156 if (rtl_dump_file)
157 fprintf (rtl_dump_file, "%d edges instrumented\n", num_instr_edges);
158 return num_instr_edges;
162 /* Computes hybrid profile for all matching entries in da_file.
163 Sets max_counter_in_program as a side effect. */
165 static gcov_type *
166 get_exec_counts ()
168 unsigned num_edges = 0;
169 basic_block bb;
170 gcov_type *counts;
172 /* Count the edges to be (possibly) instrumented. */
173 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
175 edge e;
176 for (e = bb->succ; e; e = e->succ_next)
177 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
178 num_edges++;
181 counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, &profile_info);
182 if (!counts)
183 return NULL;
185 if (rtl_dump_file && profile_info)
186 fprintf(rtl_dump_file, "Merged %u profiles with maximal count %u.\n",
187 profile_info->runs, (unsigned) profile_info->sum_max);
189 return counts;
193 /* Compute the branch probabilities for the various branches.
194 Annotate them accordingly. */
196 static void
197 compute_branch_probabilities ()
199 basic_block bb;
200 int i;
201 int num_edges = 0;
202 int changes;
203 int passes;
204 int hist_br_prob[20];
205 int num_never_executed;
206 int num_branches;
207 gcov_type *exec_counts = get_exec_counts ();
208 int exec_counts_pos = 0;
210 /* Attach extra info block to each bb. */
212 alloc_aux_for_blocks (sizeof (struct bb_info));
213 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
215 edge e;
217 for (e = bb->succ; e; e = e->succ_next)
218 if (!EDGE_INFO (e)->ignore)
219 BB_INFO (bb)->succ_count++;
220 for (e = bb->pred; e; e = e->pred_next)
221 if (!EDGE_INFO (e)->ignore)
222 BB_INFO (bb)->pred_count++;
225 /* Avoid predicting entry on exit nodes. */
226 BB_INFO (EXIT_BLOCK_PTR)->succ_count = 2;
227 BB_INFO (ENTRY_BLOCK_PTR)->pred_count = 2;
229 /* For each edge not on the spanning tree, set its execution count from
230 the .da file. */
232 /* The first count in the .da file is the number of times that the function
233 was entered. This is the exec_count for block zero. */
235 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
237 edge e;
238 for (e = bb->succ; e; e = e->succ_next)
239 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
241 num_edges++;
242 if (exec_counts)
244 e->count = exec_counts[exec_counts_pos++];
246 else
247 e->count = 0;
249 EDGE_INFO (e)->count_valid = 1;
250 BB_INFO (bb)->succ_count--;
251 BB_INFO (e->dest)->pred_count--;
252 if (rtl_dump_file)
254 fprintf (rtl_dump_file, "\nRead edge from %i to %i, count:",
255 bb->index, e->dest->index);
256 fprintf (rtl_dump_file, HOST_WIDEST_INT_PRINT_DEC,
257 (HOST_WIDEST_INT) e->count);
262 if (rtl_dump_file)
263 fprintf (rtl_dump_file, "\n%d edge counts read\n", num_edges);
265 /* For every block in the file,
266 - if every exit/entrance edge has a known count, then set the block count
267 - if the block count is known, and every exit/entrance edge but one has
268 a known execution count, then set the count of the remaining edge
270 As edge counts are set, decrement the succ/pred count, but don't delete
271 the edge, that way we can easily tell when all edges are known, or only
272 one edge is unknown. */
274 /* The order that the basic blocks are iterated through is important.
275 Since the code that finds spanning trees starts with block 0, low numbered
276 edges are put on the spanning tree in preference to high numbered edges.
277 Hence, most instrumented edges are at the end. Graph solving works much
278 faster if we propagate numbers from the end to the start.
280 This takes an average of slightly more than 3 passes. */
282 changes = 1;
283 passes = 0;
284 while (changes)
286 passes++;
287 changes = 0;
288 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
290 struct bb_info *bi = BB_INFO (bb);
291 if (! bi->count_valid)
293 if (bi->succ_count == 0)
295 edge e;
296 gcov_type total = 0;
298 for (e = bb->succ; e; e = e->succ_next)
299 total += e->count;
300 bb->count = total;
301 bi->count_valid = 1;
302 changes = 1;
304 else if (bi->pred_count == 0)
306 edge e;
307 gcov_type total = 0;
309 for (e = bb->pred; e; e = e->pred_next)
310 total += e->count;
311 bb->count = total;
312 bi->count_valid = 1;
313 changes = 1;
316 if (bi->count_valid)
318 if (bi->succ_count == 1)
320 edge e;
321 gcov_type total = 0;
323 /* One of the counts will be invalid, but it is zero,
324 so adding it in also doesn't hurt. */
325 for (e = bb->succ; e; e = e->succ_next)
326 total += e->count;
328 /* Seedgeh for the invalid edge, and set its count. */
329 for (e = bb->succ; e; e = e->succ_next)
330 if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
331 break;
333 /* Calculate count for remaining edge by conservation. */
334 total = bb->count - total;
336 if (! e)
337 abort ();
338 EDGE_INFO (e)->count_valid = 1;
339 e->count = total;
340 bi->succ_count--;
342 BB_INFO (e->dest)->pred_count--;
343 changes = 1;
345 if (bi->pred_count == 1)
347 edge e;
348 gcov_type total = 0;
350 /* One of the counts will be invalid, but it is zero,
351 so adding it in also doesn't hurt. */
352 for (e = bb->pred; e; e = e->pred_next)
353 total += e->count;
355 /* Search for the invalid edge, and set its count. */
356 for (e = bb->pred; e; e = e->pred_next)
357 if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
358 break;
360 /* Calculate count for remaining edge by conservation. */
361 total = bb->count - total + e->count;
363 if (! e)
364 abort ();
365 EDGE_INFO (e)->count_valid = 1;
366 e->count = total;
367 bi->pred_count--;
369 BB_INFO (e->src)->succ_count--;
370 changes = 1;
375 if (rtl_dump_file)
376 dump_flow_info (rtl_dump_file);
378 total_num_passes += passes;
379 if (rtl_dump_file)
380 fprintf (rtl_dump_file, "Graph solving took %d passes.\n\n", passes);
382 /* If the graph has been correctly solved, every block will have a
383 succ and pred count of zero. */
384 FOR_EACH_BB (bb)
386 if (BB_INFO (bb)->succ_count || BB_INFO (bb)->pred_count)
387 abort ();
390 /* For every edge, calculate its branch probability and add a reg_note
391 to the branch insn to indicate this. */
393 for (i = 0; i < 20; i++)
394 hist_br_prob[i] = 0;
395 num_never_executed = 0;
396 num_branches = 0;
398 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
400 edge e;
401 rtx note;
403 if (bb->count < 0)
405 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
406 bb->index, (int)bb->count);
407 bb->count = 0;
409 for (e = bb->succ; e; e = e->succ_next)
411 /* Function may return twice in the cased the called fucntion is
412 setjmp or calls fork, but we can't represent this by extra
413 edge from the entry, since extra edge from the exit is
414 already present. We get negative frequency from the entry
415 point. */
416 if ((e->count < 0
417 && e->dest == EXIT_BLOCK_PTR)
418 || (e->count > bb->count
419 && e->dest != EXIT_BLOCK_PTR))
421 rtx insn = bb->end;
423 while (GET_CODE (insn) != CALL_INSN
424 && insn != bb->head
425 && keep_with_call_p (insn))
426 insn = PREV_INSN (insn);
427 if (GET_CODE (insn) == CALL_INSN)
428 e->count = e->count < 0 ? 0 : bb->count;
430 if (e->count < 0 || e->count > bb->count)
432 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
433 e->src->index, e->dest->index,
434 (int)e->count);
435 e->count = bb->count / 2;
438 if (bb->count)
440 for (e = bb->succ; e; e = e->succ_next)
441 e->probability = (e->count * REG_BR_PROB_BASE + bb->count / 2) / bb->count;
442 if (bb->index >= 0
443 && any_condjump_p (bb->end)
444 && bb->succ->succ_next)
446 int prob;
447 edge e;
448 int index;
450 /* Find the branch edge. It is possible that we do have fake
451 edges here. */
452 for (e = bb->succ; e->flags & (EDGE_FAKE | EDGE_FALLTHRU);
453 e = e->succ_next)
454 continue; /* Loop body has been intentionally left blank. */
456 prob = e->probability;
457 index = prob * 20 / REG_BR_PROB_BASE;
459 if (index == 20)
460 index = 19;
461 hist_br_prob[index]++;
463 note = find_reg_note (bb->end, REG_BR_PROB, 0);
464 /* There may be already note put by some other pass, such
465 as builtin_expect expander. */
466 if (note)
467 XEXP (note, 0) = GEN_INT (prob);
468 else
469 REG_NOTES (bb->end)
470 = gen_rtx_EXPR_LIST (REG_BR_PROB, GEN_INT (prob),
471 REG_NOTES (bb->end));
472 num_branches++;
475 /* Otherwise distribute the probabilities evenly so we get sane
476 sum. Use simple heuristics that if there are normal edges,
477 give all abnormals frequency of 0, otherwise distribute the
478 frequency over abnormals (this is the case of noreturn
479 calls). */
480 else
482 int total = 0;
484 for (e = bb->succ; e; e = e->succ_next)
485 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
486 total ++;
487 if (total)
489 for (e = bb->succ; e; e = e->succ_next)
490 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
491 e->probability = REG_BR_PROB_BASE / total;
492 else
493 e->probability = 0;
495 else
497 for (e = bb->succ; e; e = e->succ_next)
498 total ++;
499 for (e = bb->succ; e; e = e->succ_next)
500 e->probability = REG_BR_PROB_BASE / total;
502 if (bb->index >= 0
503 && any_condjump_p (bb->end)
504 && bb->succ->succ_next)
505 num_branches++, num_never_executed;
509 if (rtl_dump_file)
511 fprintf (rtl_dump_file, "%d branches\n", num_branches);
512 fprintf (rtl_dump_file, "%d branches never executed\n",
513 num_never_executed);
514 if (num_branches)
515 for (i = 0; i < 10; i++)
516 fprintf (rtl_dump_file, "%d%% branches in range %d-%d%%\n",
517 (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
518 5 * i, 5 * i + 5);
520 total_num_branches += num_branches;
521 total_num_never_executed += num_never_executed;
522 for (i = 0; i < 20; i++)
523 total_hist_br_prob[i] += hist_br_prob[i];
525 fputc ('\n', rtl_dump_file);
526 fputc ('\n', rtl_dump_file);
529 free_aux_for_blocks ();
532 /* Instrument and/or analyze program behavior based on program flow graph.
533 In either case, this function builds a flow graph for the function being
534 compiled. The flow graph is stored in BB_GRAPH.
536 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
537 the flow graph that are needed to reconstruct the dynamic behavior of the
538 flow graph.
540 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
541 information from a data file containing edge count information from previous
542 executions of the function being compiled. In this case, the flow graph is
543 annotated with actual execution counts, which are later propagated into the
544 rtl for optimization purposes.
546 Main entry point of this file. */
548 void
549 branch_prob ()
551 basic_block bb;
552 unsigned i;
553 unsigned num_edges, ignored_edges;
554 unsigned num_instrumented;
555 struct edge_list *el;
557 total_num_times_called++;
559 flow_call_edges_add (NULL);
560 add_noreturn_fake_exit_edges ();
562 /* We can't handle cyclic regions constructed using abnormal edges.
563 To avoid these we replace every source of abnormal edge by a fake
564 edge from entry node and every destination by fake edge to exit.
565 This keeps graph acyclic and our calculation exact for all normal
566 edges except for exit and entrance ones.
568 We also add fake exit edges for each call and asm statement in the
569 basic, since it may not return. */
571 FOR_EACH_BB (bb)
573 int need_exit_edge = 0, need_entry_edge = 0;
574 int have_exit_edge = 0, have_entry_edge = 0;
575 edge e;
577 /* Functions returning multiple times are not handled by extra edges.
578 Instead we simply allow negative counts on edges from exit to the
579 block past call and corresponding probabilities. We can't go
580 with the extra edges because that would result in flowgraph that
581 needs to have fake edges outside the spanning tree. */
583 for (e = bb->succ; e; e = e->succ_next)
585 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
586 && e->dest != EXIT_BLOCK_PTR)
587 need_exit_edge = 1;
588 if (e->dest == EXIT_BLOCK_PTR)
589 have_exit_edge = 1;
591 for (e = bb->pred; e; e = e->pred_next)
593 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
594 && e->src != ENTRY_BLOCK_PTR)
595 need_entry_edge = 1;
596 if (e->src == ENTRY_BLOCK_PTR)
597 have_entry_edge = 1;
600 if (need_exit_edge && !have_exit_edge)
602 if (rtl_dump_file)
603 fprintf (rtl_dump_file, "Adding fake exit edge to bb %i\n",
604 bb->index);
605 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
607 if (need_entry_edge && !have_entry_edge)
609 if (rtl_dump_file)
610 fprintf (rtl_dump_file, "Adding fake entry edge to bb %i\n",
611 bb->index);
612 make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FAKE);
616 el = create_edge_list ();
617 num_edges = NUM_EDGES (el);
618 alloc_aux_for_edges (sizeof (struct edge_info));
620 /* The basic blocks are expected to be numbered sequentially. */
621 compact_blocks ();
623 ignored_edges = 0;
624 for (i = 0 ; i < num_edges ; i++)
626 edge e = INDEX_EDGE (el, i);
627 e->count = 0;
629 /* Mark edges we've replaced by fake edges above as ignored. */
630 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
631 && e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR)
633 EDGE_INFO (e)->ignore = 1;
634 ignored_edges++;
638 #ifdef ENABLE_CHECKING
639 verify_flow_info ();
640 #endif
642 /* Create spanning tree from basic block graph, mark each edge that is
643 on the spanning tree. We insert as many abnormal and critical edges
644 as possible to minimize number of edge splits necessary. */
646 find_spanning_tree (el);
648 /* Fake edges that are not on the tree will not be instrumented, so
649 mark them ignored. */
650 for (num_instrumented = i = 0; i < num_edges; i++)
652 edge e = INDEX_EDGE (el, i);
653 struct edge_info *inf = EDGE_INFO (e);
655 if (inf->ignore || inf->on_tree)
656 /*NOP*/;
657 else if (e->flags & EDGE_FAKE)
659 inf->ignore = 1;
660 ignored_edges++;
662 else
663 num_instrumented++;
666 total_num_blocks += n_basic_blocks + 2;
667 if (rtl_dump_file)
668 fprintf (rtl_dump_file, "%d basic blocks\n", n_basic_blocks);
670 total_num_edges += num_edges;
671 if (rtl_dump_file)
672 fprintf (rtl_dump_file, "%d edges\n", num_edges);
674 total_num_edges_ignored += ignored_edges;
675 if (rtl_dump_file)
676 fprintf (rtl_dump_file, "%d ignored edges\n", ignored_edges);
678 /* Write the data from which gcov can reconstruct the basic block
679 graph. */
681 /* Basic block flags */
682 if (coverage_begin_output ())
684 gcov_position_t offset;
686 offset = gcov_write_tag (GCOV_TAG_BLOCKS);
687 for (i = 0; i != (unsigned) (n_basic_blocks + 2); i++)
688 gcov_write_unsigned (0);
689 gcov_write_length (offset);
692 /* Keep all basic block indexes nonnegative in the gcov output.
693 Index 0 is used for entry block, last index is for exit block.
695 ENTRY_BLOCK_PTR->index = -1;
696 EXIT_BLOCK_PTR->index = last_basic_block;
697 #define BB_TO_GCOV_INDEX(bb) ((bb)->index + 1)
699 /* Arcs */
700 if (coverage_begin_output ())
702 gcov_position_t offset;
704 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
706 edge e;
708 offset = gcov_write_tag (GCOV_TAG_ARCS);
709 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
711 for (e = bb->succ; e; e = e->succ_next)
713 struct edge_info *i = EDGE_INFO (e);
714 if (!i->ignore)
716 unsigned flag_bits = 0;
718 if (i->on_tree)
719 flag_bits |= GCOV_ARC_ON_TREE;
720 if (e->flags & EDGE_FAKE)
721 flag_bits |= GCOV_ARC_FAKE;
722 if (e->flags & EDGE_FALLTHRU)
723 flag_bits |= GCOV_ARC_FALLTHROUGH;
725 gcov_write_unsigned (BB_TO_GCOV_INDEX (e->dest));
726 gcov_write_unsigned (flag_bits);
730 gcov_write_length (offset);
734 /* Line numbers. */
735 if (coverage_begin_output ())
737 char const *prev_file_name = NULL;
738 gcov_position_t offset;
740 FOR_EACH_BB (bb)
742 rtx insn = bb->head;
743 int ignore_next_note = 0;
745 offset = 0;
747 /* We are looking for line number notes. Search backward
748 before basic block to find correct ones. */
749 insn = prev_nonnote_insn (insn);
750 if (!insn)
751 insn = get_insns ();
752 else
753 insn = NEXT_INSN (insn);
755 while (insn != bb->end)
757 if (GET_CODE (insn) == NOTE)
759 /* Must ignore the line number notes that
760 immediately follow the end of an inline function
761 to avoid counting it twice. There is a note
762 before the call, and one after the call. */
763 if (NOTE_LINE_NUMBER (insn)
764 == NOTE_INSN_REPEATED_LINE_NUMBER)
765 ignore_next_note = 1;
766 else if (NOTE_LINE_NUMBER (insn) <= 0)
767 /*NOP*/;
768 else if (ignore_next_note)
769 ignore_next_note = 0;
770 else
772 if (!offset)
774 offset = gcov_write_tag (GCOV_TAG_LINES);
775 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
778 /* If this is a new source file, then output the
779 file's name to the .bb file. */
780 if (!prev_file_name
781 || strcmp (NOTE_SOURCE_FILE (insn),
782 prev_file_name))
784 prev_file_name = NOTE_SOURCE_FILE (insn);
785 gcov_write_unsigned (0);
786 gcov_write_string (prev_file_name);
788 gcov_write_unsigned (NOTE_LINE_NUMBER (insn));
791 insn = NEXT_INSN (insn);
794 if (offset)
796 /* A file of NULL indicates the end of run. */
797 gcov_write_unsigned (0);
798 gcov_write_string (NULL);
799 gcov_write_length (offset);
803 ENTRY_BLOCK_PTR->index = ENTRY_BLOCK;
804 EXIT_BLOCK_PTR->index = EXIT_BLOCK;
805 #undef BB_TO_GCOV_INDEX
807 if (flag_branch_probabilities)
808 compute_branch_probabilities ();
810 /* For each edge not on the spanning tree, add counting code as rtl. */
811 if (profile_arc_flag
812 && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
814 unsigned n_instrumented = instrument_edges (el);
816 if (n_instrumented != num_instrumented)
817 abort ();
819 /* Commit changes done by instrumentation. */
820 commit_edge_insertions_watch_calls ();
821 allocate_reg_info (max_reg_num (), FALSE, FALSE);
824 remove_fake_edges ();
825 free_aux_for_edges ();
826 /* Re-merge split basic blocks and the mess introduced by
827 insert_insn_on_edge. */
828 cleanup_cfg (profile_arc_flag ? CLEANUP_EXPENSIVE : 0);
829 if (rtl_dump_file)
830 dump_flow_info (rtl_dump_file);
832 free_edge_list (el);
835 /* Union find algorithm implementation for the basic blocks using
836 aux fields. */
838 static basic_block
839 find_group (bb)
840 basic_block bb;
842 basic_block group = bb, bb1;
844 while ((basic_block) group->aux != group)
845 group = (basic_block) group->aux;
847 /* Compress path. */
848 while ((basic_block) bb->aux != group)
850 bb1 = (basic_block) bb->aux;
851 bb->aux = (void *) group;
852 bb = bb1;
854 return group;
857 static void
858 union_groups (bb1, bb2)
859 basic_block bb1, bb2;
861 basic_block bb1g = find_group (bb1);
862 basic_block bb2g = find_group (bb2);
864 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
865 this code is unlikely going to be performance problem anyway. */
866 if (bb1g == bb2g)
867 abort ();
869 bb1g->aux = bb2g;
872 /* This function searches all of the edges in the program flow graph, and puts
873 as many bad edges as possible onto the spanning tree. Bad edges include
874 abnormals edges, which can't be instrumented at the moment. Since it is
875 possible for fake edges to form a cycle, we will have to develop some
876 better way in the future. Also put critical edges to the tree, since they
877 are more expensive to instrument. */
879 static void
880 find_spanning_tree (el)
881 struct edge_list *el;
883 int i;
884 int num_edges = NUM_EDGES (el);
885 basic_block bb;
887 /* We use aux field for standard union-find algorithm. */
888 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
889 bb->aux = bb;
891 /* Add fake edge exit to entry we can't instrument. */
892 union_groups (EXIT_BLOCK_PTR, ENTRY_BLOCK_PTR);
894 /* First add all abnormal edges to the tree unless they form a cycle. Also
895 add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
896 setting return value from function. */
897 for (i = 0; i < num_edges; i++)
899 edge e = INDEX_EDGE (el, i);
900 if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
901 || e->dest == EXIT_BLOCK_PTR)
902 && !EDGE_INFO (e)->ignore
903 && (find_group (e->src) != find_group (e->dest)))
905 if (rtl_dump_file)
906 fprintf (rtl_dump_file, "Abnormal edge %d to %d put to tree\n",
907 e->src->index, e->dest->index);
908 EDGE_INFO (e)->on_tree = 1;
909 union_groups (e->src, e->dest);
913 /* Now insert all critical edges to the tree unless they form a cycle. */
914 for (i = 0; i < num_edges; i++)
916 edge e = INDEX_EDGE (el, i);
917 if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore
918 && find_group (e->src) != find_group (e->dest))
920 if (rtl_dump_file)
921 fprintf (rtl_dump_file, "Critical edge %d to %d put to tree\n",
922 e->src->index, e->dest->index);
923 EDGE_INFO (e)->on_tree = 1;
924 union_groups (e->src, e->dest);
928 /* And now the rest. */
929 for (i = 0; i < num_edges; i++)
931 edge e = INDEX_EDGE (el, i);
932 if (!EDGE_INFO (e)->ignore
933 && find_group (e->src) != find_group (e->dest))
935 if (rtl_dump_file)
936 fprintf (rtl_dump_file, "Normal edge %d to %d put to tree\n",
937 e->src->index, e->dest->index);
938 EDGE_INFO (e)->on_tree = 1;
939 union_groups (e->src, e->dest);
943 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
944 bb->aux = NULL;
947 /* Perform file-level initialization for branch-prob processing. */
949 void
950 init_branch_prob ()
952 int i;
954 total_num_blocks = 0;
955 total_num_edges = 0;
956 total_num_edges_ignored = 0;
957 total_num_edges_instrumented = 0;
958 total_num_blocks_created = 0;
959 total_num_passes = 0;
960 total_num_times_called = 0;
961 total_num_branches = 0;
962 total_num_never_executed = 0;
963 for (i = 0; i < 20; i++)
964 total_hist_br_prob[i] = 0;
967 /* Performs file-level cleanup after branch-prob processing
968 is completed. */
970 void
971 end_branch_prob ()
973 if (rtl_dump_file)
975 fprintf (rtl_dump_file, "\n");
976 fprintf (rtl_dump_file, "Total number of blocks: %d\n",
977 total_num_blocks);
978 fprintf (rtl_dump_file, "Total number of edges: %d\n", total_num_edges);
979 fprintf (rtl_dump_file, "Total number of ignored edges: %d\n",
980 total_num_edges_ignored);
981 fprintf (rtl_dump_file, "Total number of instrumented edges: %d\n",
982 total_num_edges_instrumented);
983 fprintf (rtl_dump_file, "Total number of blocks created: %d\n",
984 total_num_blocks_created);
985 fprintf (rtl_dump_file, "Total number of graph solution passes: %d\n",
986 total_num_passes);
987 if (total_num_times_called != 0)
988 fprintf (rtl_dump_file, "Average number of graph solution passes: %d\n",
989 (total_num_passes + (total_num_times_called >> 1))
990 / total_num_times_called);
991 fprintf (rtl_dump_file, "Total number of branches: %d\n",
992 total_num_branches);
993 fprintf (rtl_dump_file, "Total number of branches never executed: %d\n",
994 total_num_never_executed);
995 if (total_num_branches)
997 int i;
999 for (i = 0; i < 10; i++)
1000 fprintf (rtl_dump_file, "%d%% branches in range %d-%d%%\n",
1001 (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1002 / total_num_branches, 5*i, 5*i+5);
1008 /* Output instructions as RTL to increment the edge execution count. */
1010 static rtx
1011 gen_edge_profiler (edgeno)
1012 int edgeno;
1014 rtx ref = coverage_counter_ref (GCOV_COUNTER_ARCS, edgeno);
1015 rtx tmp;
1016 enum machine_mode mode = GET_MODE (ref);
1017 rtx sequence;
1019 start_sequence ();
1020 ref = validize_mem (ref);
1022 tmp = expand_simple_binop (mode, PLUS, ref, const1_rtx,
1023 ref, 0, OPTAB_WIDEN);
1025 if (tmp != ref)
1026 emit_move_insn (copy_rtx (ref), tmp);
1028 sequence = get_insns ();
1029 end_sequence ();
1030 return sequence;