* diagnostic.c (announce_function): Move to toplev.c.
[official-gcc.git] / gcc / profile.c
blob09949819064c3fc57c93eb56ee757eaf90465641
1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
5 based on some ideas from Dain Samples of UC Berkeley.
6 Further mangling by Bob Manson, Cygnus Support.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 2, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to the Free
22 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 02111-1307, USA. */
25 /* Generate basic block profile instrumentation and auxiliary files.
26 Profile generation is optimized, so that not all arcs in the basic
27 block graph need instrumenting. First, the BB graph is closed with
28 one entry (function start), and one exit (function exit). Any
29 ABNORMAL_EDGE cannot be instrumented (because there is no control
30 path to place the code). We close the graph by inserting fake
31 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
32 edges that do not go to the exit_block. We ignore such abnormal
33 edges. Naturally these fake edges are never directly traversed,
34 and so *cannot* be directly instrumented. Some other graph
35 massaging is done. To optimize the instrumentation we generate the
36 BB minimal span tree, only edges that are not on the span tree
37 (plus the entry point) need instrumenting. From that information
38 all other edge counts can be deduced. By construction all fake
39 edges must be on the spanning tree. We also attempt to place
40 EDGE_CRITICAL edges on the spanning tree.
42 The auxiliary file generated is <dumpbase>.bbg. The format is
43 described in full in gcov-io.h. */
45 /* ??? Register allocation should use basic block execution counts to
46 give preference to the most commonly executed blocks. */
48 /* ??? Should calculate branch probabilities before instrumenting code, since
49 then we can use arc counts to help decide which arcs to instrument. */
51 #include "config.h"
52 #include "system.h"
53 #include "coretypes.h"
54 #include "tm.h"
55 #include "rtl.h"
56 #include "flags.h"
57 #include "output.h"
58 #include "regs.h"
59 #include "expr.h"
60 #include "function.h"
61 #include "toplev.h"
62 #include "coverage.h"
63 #include "value-prof.h"
64 #include "tree.h"
66 /* Additional information about the edges we need. */
67 struct edge_info {
68 unsigned int count_valid : 1;
70 /* Is on the spanning tree. */
71 unsigned int on_tree : 1;
73 /* Pretend this edge does not exist (it is abnormal and we've
74 inserted a fake to compensate). */
75 unsigned int ignore : 1;
78 struct bb_info {
79 unsigned int count_valid : 1;
81 /* Number of successor and predecessor edges. */
82 gcov_type succ_count;
83 gcov_type pred_count;
86 #define EDGE_INFO(e) ((struct edge_info *) (e)->aux)
87 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
89 /* Counter summary from the last set of coverage counts read. */
91 const struct gcov_ctr_summary *profile_info;
93 /* Collect statistics on the performance of this pass for the entire source
94 file. */
96 static int total_num_blocks;
97 static int total_num_edges;
98 static int total_num_edges_ignored;
99 static int total_num_edges_instrumented;
100 static int total_num_blocks_created;
101 static int total_num_passes;
102 static int total_num_times_called;
103 static int total_hist_br_prob[20];
104 static int total_num_never_executed;
105 static int total_num_branches;
107 /* Forward declarations. */
108 static void find_spanning_tree (struct edge_list *);
109 static rtx gen_edge_profiler (int);
110 static rtx gen_interval_profiler (struct histogram_value *, unsigned,
111 unsigned);
112 static rtx gen_pow2_profiler (struct histogram_value *, unsigned, unsigned);
113 static rtx gen_one_value_profiler (struct histogram_value *, unsigned,
114 unsigned);
115 static rtx gen_const_delta_profiler (struct histogram_value *, unsigned,
116 unsigned);
117 static unsigned instrument_edges (struct edge_list *);
118 static void instrument_values (unsigned, struct histogram_value *);
119 static void compute_branch_probabilities (void);
120 static void compute_value_histograms (unsigned, struct histogram_value *);
121 static gcov_type * get_exec_counts (void);
122 static basic_block find_group (basic_block);
123 static void union_groups (basic_block, basic_block);
126 /* Add edge instrumentation code to the entire insn chain.
128 F is the first insn of the chain.
129 NUM_BLOCKS is the number of basic blocks found in F. */
131 static unsigned
132 instrument_edges (struct edge_list *el)
134 unsigned num_instr_edges = 0;
135 int num_edges = NUM_EDGES (el);
136 basic_block bb;
138 remove_fake_edges ();
140 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
142 edge e;
144 for (e = bb->succ; e; e = e->succ_next)
146 struct edge_info *inf = EDGE_INFO (e);
148 if (!inf->ignore && !inf->on_tree)
150 rtx edge_profile;
152 if (e->flags & EDGE_ABNORMAL)
153 abort ();
154 if (rtl_dump_file)
155 fprintf (rtl_dump_file, "Edge %d to %d instrumented%s\n",
156 e->src->index, e->dest->index,
157 EDGE_CRITICAL_P (e) ? " (and split)" : "");
158 edge_profile = gen_edge_profiler (num_instr_edges++);
159 insert_insn_on_edge (edge_profile, e);
160 rebuild_jump_labels (e->insns);
165 total_num_blocks_created += num_edges;
166 if (rtl_dump_file)
167 fprintf (rtl_dump_file, "%d edges instrumented\n", num_instr_edges);
168 return num_instr_edges;
171 /* Add code to measure histograms list of VALUES of length N_VALUES. */
172 static void
173 instrument_values (unsigned n_values, struct histogram_value *values)
175 rtx sequence;
176 unsigned i, t;
177 edge e;
179 /* Emit code to generate the histograms before the insns. */
181 for (i = 0; i < n_values; i++)
183 e = split_block (BLOCK_FOR_INSN (values[i].insn),
184 PREV_INSN (values[i].insn));
185 switch (values[i].type)
187 case HIST_TYPE_INTERVAL:
188 t = GCOV_COUNTER_V_INTERVAL;
189 break;
191 case HIST_TYPE_POW2:
192 t = GCOV_COUNTER_V_POW2;
193 break;
195 case HIST_TYPE_SINGLE_VALUE:
196 t = GCOV_COUNTER_V_SINGLE;
197 break;
199 case HIST_TYPE_CONST_DELTA:
200 t = GCOV_COUNTER_V_DELTA;
201 break;
203 default:
204 abort ();
206 if (!coverage_counter_alloc (t, values[i].n_counters))
207 continue;
209 switch (values[i].type)
211 case HIST_TYPE_INTERVAL:
212 sequence = gen_interval_profiler (values + i, t, 0);
213 break;
215 case HIST_TYPE_POW2:
216 sequence = gen_pow2_profiler (values + i, t, 0);
217 break;
219 case HIST_TYPE_SINGLE_VALUE:
220 sequence = gen_one_value_profiler (values + i, t, 0);
221 break;
223 case HIST_TYPE_CONST_DELTA:
224 sequence = gen_const_delta_profiler (values + i, t, 0);
225 break;
227 default:
228 abort ();
231 safe_insert_insn_on_edge (sequence, e);
236 /* Computes hybrid profile for all matching entries in da_file. */
238 static gcov_type *
239 get_exec_counts (void)
241 unsigned num_edges = 0;
242 basic_block bb;
243 gcov_type *counts;
245 /* Count the edges to be (possibly) instrumented. */
246 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
248 edge e;
249 for (e = bb->succ; e; e = e->succ_next)
250 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
251 num_edges++;
254 counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, &profile_info);
255 if (!counts)
256 return NULL;
258 if (rtl_dump_file && profile_info)
259 fprintf(rtl_dump_file, "Merged %u profiles with maximal count %u.\n",
260 profile_info->runs, (unsigned) profile_info->sum_max);
262 return counts;
266 /* Compute the branch probabilities for the various branches.
267 Annotate them accordingly. */
269 static void
270 compute_branch_probabilities (void)
272 basic_block bb;
273 int i;
274 int num_edges = 0;
275 int changes;
276 int passes;
277 int hist_br_prob[20];
278 int num_never_executed;
279 int num_branches;
280 gcov_type *exec_counts = get_exec_counts ();
281 int exec_counts_pos = 0;
283 /* Attach extra info block to each bb. */
285 alloc_aux_for_blocks (sizeof (struct bb_info));
286 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
288 edge e;
290 for (e = bb->succ; e; e = e->succ_next)
291 if (!EDGE_INFO (e)->ignore)
292 BB_INFO (bb)->succ_count++;
293 for (e = bb->pred; e; e = e->pred_next)
294 if (!EDGE_INFO (e)->ignore)
295 BB_INFO (bb)->pred_count++;
298 /* Avoid predicting entry on exit nodes. */
299 BB_INFO (EXIT_BLOCK_PTR)->succ_count = 2;
300 BB_INFO (ENTRY_BLOCK_PTR)->pred_count = 2;
302 /* For each edge not on the spanning tree, set its execution count from
303 the .da file. */
305 /* The first count in the .da file is the number of times that the function
306 was entered. This is the exec_count for block zero. */
308 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
310 edge e;
311 for (e = bb->succ; e; e = e->succ_next)
312 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
314 num_edges++;
315 if (exec_counts)
317 e->count = exec_counts[exec_counts_pos++];
319 else
320 e->count = 0;
322 EDGE_INFO (e)->count_valid = 1;
323 BB_INFO (bb)->succ_count--;
324 BB_INFO (e->dest)->pred_count--;
325 if (rtl_dump_file)
327 fprintf (rtl_dump_file, "\nRead edge from %i to %i, count:",
328 bb->index, e->dest->index);
329 fprintf (rtl_dump_file, HOST_WIDEST_INT_PRINT_DEC,
330 (HOST_WIDEST_INT) e->count);
335 if (rtl_dump_file)
336 fprintf (rtl_dump_file, "\n%d edge counts read\n", num_edges);
338 /* For every block in the file,
339 - if every exit/entrance edge has a known count, then set the block count
340 - if the block count is known, and every exit/entrance edge but one has
341 a known execution count, then set the count of the remaining edge
343 As edge counts are set, decrement the succ/pred count, but don't delete
344 the edge, that way we can easily tell when all edges are known, or only
345 one edge is unknown. */
347 /* The order that the basic blocks are iterated through is important.
348 Since the code that finds spanning trees starts with block 0, low numbered
349 edges are put on the spanning tree in preference to high numbered edges.
350 Hence, most instrumented edges are at the end. Graph solving works much
351 faster if we propagate numbers from the end to the start.
353 This takes an average of slightly more than 3 passes. */
355 changes = 1;
356 passes = 0;
357 while (changes)
359 passes++;
360 changes = 0;
361 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
363 struct bb_info *bi = BB_INFO (bb);
364 if (! bi->count_valid)
366 if (bi->succ_count == 0)
368 edge e;
369 gcov_type total = 0;
371 for (e = bb->succ; e; e = e->succ_next)
372 total += e->count;
373 bb->count = total;
374 bi->count_valid = 1;
375 changes = 1;
377 else if (bi->pred_count == 0)
379 edge e;
380 gcov_type total = 0;
382 for (e = bb->pred; e; e = e->pred_next)
383 total += e->count;
384 bb->count = total;
385 bi->count_valid = 1;
386 changes = 1;
389 if (bi->count_valid)
391 if (bi->succ_count == 1)
393 edge e;
394 gcov_type total = 0;
396 /* One of the counts will be invalid, but it is zero,
397 so adding it in also doesn't hurt. */
398 for (e = bb->succ; e; e = e->succ_next)
399 total += e->count;
401 /* Seedgeh for the invalid edge, and set its count. */
402 for (e = bb->succ; e; e = e->succ_next)
403 if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
404 break;
406 /* Calculate count for remaining edge by conservation. */
407 total = bb->count - total;
409 if (! e)
410 abort ();
411 EDGE_INFO (e)->count_valid = 1;
412 e->count = total;
413 bi->succ_count--;
415 BB_INFO (e->dest)->pred_count--;
416 changes = 1;
418 if (bi->pred_count == 1)
420 edge e;
421 gcov_type total = 0;
423 /* One of the counts will be invalid, but it is zero,
424 so adding it in also doesn't hurt. */
425 for (e = bb->pred; e; e = e->pred_next)
426 total += e->count;
428 /* Search for the invalid edge, and set its count. */
429 for (e = bb->pred; e; e = e->pred_next)
430 if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
431 break;
433 /* Calculate count for remaining edge by conservation. */
434 total = bb->count - total + e->count;
436 if (! e)
437 abort ();
438 EDGE_INFO (e)->count_valid = 1;
439 e->count = total;
440 bi->pred_count--;
442 BB_INFO (e->src)->succ_count--;
443 changes = 1;
448 if (rtl_dump_file)
449 dump_flow_info (rtl_dump_file);
451 total_num_passes += passes;
452 if (rtl_dump_file)
453 fprintf (rtl_dump_file, "Graph solving took %d passes.\n\n", passes);
455 /* If the graph has been correctly solved, every block will have a
456 succ and pred count of zero. */
457 FOR_EACH_BB (bb)
459 if (BB_INFO (bb)->succ_count || BB_INFO (bb)->pred_count)
460 abort ();
463 /* For every edge, calculate its branch probability and add a reg_note
464 to the branch insn to indicate this. */
466 for (i = 0; i < 20; i++)
467 hist_br_prob[i] = 0;
468 num_never_executed = 0;
469 num_branches = 0;
471 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
473 edge e;
474 rtx note;
476 if (bb->count < 0)
478 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
479 bb->index, (int)bb->count);
480 bb->count = 0;
482 for (e = bb->succ; e; e = e->succ_next)
484 /* Function may return twice in the cased the called fucntion is
485 setjmp or calls fork, but we can't represent this by extra
486 edge from the entry, since extra edge from the exit is
487 already present. We get negative frequency from the entry
488 point. */
489 if ((e->count < 0
490 && e->dest == EXIT_BLOCK_PTR)
491 || (e->count > bb->count
492 && e->dest != EXIT_BLOCK_PTR))
494 rtx insn = bb->end;
496 while (GET_CODE (insn) != CALL_INSN
497 && insn != bb->head
498 && keep_with_call_p (insn))
499 insn = PREV_INSN (insn);
500 if (GET_CODE (insn) == CALL_INSN)
501 e->count = e->count < 0 ? 0 : bb->count;
503 if (e->count < 0 || e->count > bb->count)
505 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
506 e->src->index, e->dest->index,
507 (int)e->count);
508 e->count = bb->count / 2;
511 if (bb->count)
513 for (e = bb->succ; e; e = e->succ_next)
514 e->probability = (e->count * REG_BR_PROB_BASE + bb->count / 2) / bb->count;
515 if (bb->index >= 0
516 && any_condjump_p (bb->end)
517 && bb->succ->succ_next)
519 int prob;
520 edge e;
521 int index;
523 /* Find the branch edge. It is possible that we do have fake
524 edges here. */
525 for (e = bb->succ; e->flags & (EDGE_FAKE | EDGE_FALLTHRU);
526 e = e->succ_next)
527 continue; /* Loop body has been intentionally left blank. */
529 prob = e->probability;
530 index = prob * 20 / REG_BR_PROB_BASE;
532 if (index == 20)
533 index = 19;
534 hist_br_prob[index]++;
536 note = find_reg_note (bb->end, REG_BR_PROB, 0);
537 /* There may be already note put by some other pass, such
538 as builtin_expect expander. */
539 if (note)
540 XEXP (note, 0) = GEN_INT (prob);
541 else
542 REG_NOTES (bb->end)
543 = gen_rtx_EXPR_LIST (REG_BR_PROB, GEN_INT (prob),
544 REG_NOTES (bb->end));
545 num_branches++;
548 /* Otherwise distribute the probabilities evenly so we get sane
549 sum. Use simple heuristics that if there are normal edges,
550 give all abnormals frequency of 0, otherwise distribute the
551 frequency over abnormals (this is the case of noreturn
552 calls). */
553 else
555 int total = 0;
557 for (e = bb->succ; e; e = e->succ_next)
558 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
559 total ++;
560 if (total)
562 for (e = bb->succ; e; e = e->succ_next)
563 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
564 e->probability = REG_BR_PROB_BASE / total;
565 else
566 e->probability = 0;
568 else
570 for (e = bb->succ; e; e = e->succ_next)
571 total ++;
572 for (e = bb->succ; e; e = e->succ_next)
573 e->probability = REG_BR_PROB_BASE / total;
575 if (bb->index >= 0
576 && any_condjump_p (bb->end)
577 && bb->succ->succ_next)
578 num_branches++, num_never_executed;
582 if (rtl_dump_file)
584 fprintf (rtl_dump_file, "%d branches\n", num_branches);
585 fprintf (rtl_dump_file, "%d branches never executed\n",
586 num_never_executed);
587 if (num_branches)
588 for (i = 0; i < 10; i++)
589 fprintf (rtl_dump_file, "%d%% branches in range %d-%d%%\n",
590 (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
591 5 * i, 5 * i + 5);
593 total_num_branches += num_branches;
594 total_num_never_executed += num_never_executed;
595 for (i = 0; i < 20; i++)
596 total_hist_br_prob[i] += hist_br_prob[i];
598 fputc ('\n', rtl_dump_file);
599 fputc ('\n', rtl_dump_file);
602 free_aux_for_blocks ();
605 /* Load value histograms for N_VALUES values whose description is stored
606 in VALUES array from .da file. */
607 static void
608 compute_value_histograms (unsigned n_values, struct histogram_value *values)
610 unsigned i, j, t, any;
611 unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS];
612 gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS];
613 gcov_type *act_count[GCOV_N_VALUE_COUNTERS];
614 gcov_type *aact_count;
616 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
617 n_histogram_counters[t] = 0;
619 for (i = 0; i < n_values; i++)
620 n_histogram_counters[(int) (values[i].type)] += values[i].n_counters;
622 any = 0;
623 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
625 if (!n_histogram_counters[t])
627 histogram_counts[t] = NULL;
628 continue;
631 histogram_counts[t] =
632 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t),
633 n_histogram_counters[t], NULL);
634 if (histogram_counts[t])
635 any = 1;
636 act_count[t] = histogram_counts[t];
638 if (!any)
639 return;
641 for (i = 0; i < n_values; i++)
643 rtx hist_list = NULL_RTX;
644 t = (int) (values[i].type);
646 aact_count = act_count[t];
647 act_count[t] += values[i].n_counters;
648 for (j = values[i].n_counters; j > 0; j--)
649 hist_list = alloc_EXPR_LIST (0, GEN_INT (aact_count[j - 1]), hist_list);
650 hist_list = alloc_EXPR_LIST (0, copy_rtx (values[i].value), hist_list);
651 hist_list = alloc_EXPR_LIST (0, GEN_INT (values[i].type), hist_list);
652 REG_NOTES (values[i].insn) =
653 alloc_EXPR_LIST (REG_VALUE_PROFILE, hist_list,
654 REG_NOTES (values[i].insn));
657 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
658 if (histogram_counts[t])
659 free (histogram_counts[t]);
662 /* Instrument and/or analyze program behavior based on program flow graph.
663 In either case, this function builds a flow graph for the function being
664 compiled. The flow graph is stored in BB_GRAPH.
666 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
667 the flow graph that are needed to reconstruct the dynamic behavior of the
668 flow graph.
670 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
671 information from a data file containing edge count information from previous
672 executions of the function being compiled. In this case, the flow graph is
673 annotated with actual execution counts, which are later propagated into the
674 rtl for optimization purposes.
676 Main entry point of this file. */
678 void
679 branch_prob (void)
681 basic_block bb;
682 unsigned i;
683 unsigned num_edges, ignored_edges;
684 unsigned num_instrumented;
685 struct edge_list *el;
686 unsigned n_values = 0;
687 struct histogram_value *values = NULL;
689 total_num_times_called++;
691 flow_call_edges_add (NULL);
692 add_noreturn_fake_exit_edges ();
694 /* We can't handle cyclic regions constructed using abnormal edges.
695 To avoid these we replace every source of abnormal edge by a fake
696 edge from entry node and every destination by fake edge to exit.
697 This keeps graph acyclic and our calculation exact for all normal
698 edges except for exit and entrance ones.
700 We also add fake exit edges for each call and asm statement in the
701 basic, since it may not return. */
703 FOR_EACH_BB (bb)
705 int need_exit_edge = 0, need_entry_edge = 0;
706 int have_exit_edge = 0, have_entry_edge = 0;
707 edge e;
709 /* Functions returning multiple times are not handled by extra edges.
710 Instead we simply allow negative counts on edges from exit to the
711 block past call and corresponding probabilities. We can't go
712 with the extra edges because that would result in flowgraph that
713 needs to have fake edges outside the spanning tree. */
715 for (e = bb->succ; e; e = e->succ_next)
717 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
718 && e->dest != EXIT_BLOCK_PTR)
719 need_exit_edge = 1;
720 if (e->dest == EXIT_BLOCK_PTR)
721 have_exit_edge = 1;
723 for (e = bb->pred; e; e = e->pred_next)
725 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
726 && e->src != ENTRY_BLOCK_PTR)
727 need_entry_edge = 1;
728 if (e->src == ENTRY_BLOCK_PTR)
729 have_entry_edge = 1;
732 if (need_exit_edge && !have_exit_edge)
734 if (rtl_dump_file)
735 fprintf (rtl_dump_file, "Adding fake exit edge to bb %i\n",
736 bb->index);
737 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
739 if (need_entry_edge && !have_entry_edge)
741 if (rtl_dump_file)
742 fprintf (rtl_dump_file, "Adding fake entry edge to bb %i\n",
743 bb->index);
744 make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FAKE);
748 el = create_edge_list ();
749 num_edges = NUM_EDGES (el);
750 alloc_aux_for_edges (sizeof (struct edge_info));
752 /* The basic blocks are expected to be numbered sequentially. */
753 compact_blocks ();
755 ignored_edges = 0;
756 for (i = 0 ; i < num_edges ; i++)
758 edge e = INDEX_EDGE (el, i);
759 e->count = 0;
761 /* Mark edges we've replaced by fake edges above as ignored. */
762 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
763 && e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR)
765 EDGE_INFO (e)->ignore = 1;
766 ignored_edges++;
770 #ifdef ENABLE_CHECKING
771 verify_flow_info ();
772 #endif
774 /* Create spanning tree from basic block graph, mark each edge that is
775 on the spanning tree. We insert as many abnormal and critical edges
776 as possible to minimize number of edge splits necessary. */
778 find_spanning_tree (el);
780 /* Fake edges that are not on the tree will not be instrumented, so
781 mark them ignored. */
782 for (num_instrumented = i = 0; i < num_edges; i++)
784 edge e = INDEX_EDGE (el, i);
785 struct edge_info *inf = EDGE_INFO (e);
787 if (inf->ignore || inf->on_tree)
788 /*NOP*/;
789 else if (e->flags & EDGE_FAKE)
791 inf->ignore = 1;
792 ignored_edges++;
794 else
795 num_instrumented++;
798 total_num_blocks += n_basic_blocks + 2;
799 if (rtl_dump_file)
800 fprintf (rtl_dump_file, "%d basic blocks\n", n_basic_blocks);
802 total_num_edges += num_edges;
803 if (rtl_dump_file)
804 fprintf (rtl_dump_file, "%d edges\n", num_edges);
806 total_num_edges_ignored += ignored_edges;
807 if (rtl_dump_file)
808 fprintf (rtl_dump_file, "%d ignored edges\n", ignored_edges);
810 /* Write the data from which gcov can reconstruct the basic block
811 graph. */
813 /* Basic block flags */
814 if (coverage_begin_output ())
816 gcov_position_t offset;
818 offset = gcov_write_tag (GCOV_TAG_BLOCKS);
819 for (i = 0; i != (unsigned) (n_basic_blocks + 2); i++)
820 gcov_write_unsigned (0);
821 gcov_write_length (offset);
824 /* Keep all basic block indexes nonnegative in the gcov output.
825 Index 0 is used for entry block, last index is for exit block.
827 ENTRY_BLOCK_PTR->index = -1;
828 EXIT_BLOCK_PTR->index = last_basic_block;
829 #define BB_TO_GCOV_INDEX(bb) ((bb)->index + 1)
831 /* Arcs */
832 if (coverage_begin_output ())
834 gcov_position_t offset;
836 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
838 edge e;
840 offset = gcov_write_tag (GCOV_TAG_ARCS);
841 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
843 for (e = bb->succ; e; e = e->succ_next)
845 struct edge_info *i = EDGE_INFO (e);
846 if (!i->ignore)
848 unsigned flag_bits = 0;
850 if (i->on_tree)
851 flag_bits |= GCOV_ARC_ON_TREE;
852 if (e->flags & EDGE_FAKE)
853 flag_bits |= GCOV_ARC_FAKE;
854 if (e->flags & EDGE_FALLTHRU)
855 flag_bits |= GCOV_ARC_FALLTHROUGH;
857 gcov_write_unsigned (BB_TO_GCOV_INDEX (e->dest));
858 gcov_write_unsigned (flag_bits);
862 gcov_write_length (offset);
866 /* Line numbers. */
867 if (coverage_begin_output ())
869 char const *prev_file_name = NULL;
870 gcov_position_t offset;
872 FOR_EACH_BB (bb)
874 rtx insn = bb->head;
875 int ignore_next_note = 0;
877 offset = 0;
879 /* We are looking for line number notes. Search backward
880 before basic block to find correct ones. */
881 insn = prev_nonnote_insn (insn);
882 if (!insn)
883 insn = get_insns ();
884 else
885 insn = NEXT_INSN (insn);
887 while (insn != bb->end)
889 if (GET_CODE (insn) == NOTE)
891 /* Must ignore the line number notes that
892 immediately follow the end of an inline function
893 to avoid counting it twice. There is a note
894 before the call, and one after the call. */
895 if (NOTE_LINE_NUMBER (insn)
896 == NOTE_INSN_REPEATED_LINE_NUMBER)
897 ignore_next_note = 1;
898 else if (NOTE_LINE_NUMBER (insn) <= 0)
899 /*NOP*/;
900 else if (ignore_next_note)
901 ignore_next_note = 0;
902 else
904 if (!offset)
906 offset = gcov_write_tag (GCOV_TAG_LINES);
907 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
910 /* If this is a new source file, then output the
911 file's name to the .bb file. */
912 if (!prev_file_name
913 || strcmp (NOTE_SOURCE_FILE (insn),
914 prev_file_name))
916 prev_file_name = NOTE_SOURCE_FILE (insn);
917 gcov_write_unsigned (0);
918 gcov_write_string (prev_file_name);
920 gcov_write_unsigned (NOTE_LINE_NUMBER (insn));
923 insn = NEXT_INSN (insn);
926 if (offset)
928 /* A file of NULL indicates the end of run. */
929 gcov_write_unsigned (0);
930 gcov_write_string (NULL);
931 gcov_write_length (offset);
935 ENTRY_BLOCK_PTR->index = ENTRY_BLOCK;
936 EXIT_BLOCK_PTR->index = EXIT_BLOCK;
937 #undef BB_TO_GCOV_INDEX
939 if (flag_profile_values)
941 life_analysis (get_insns (), NULL, PROP_DEATH_NOTES);
942 find_values_to_profile (&n_values, &values);
943 allocate_reg_info (max_reg_num (), FALSE, FALSE);
946 if (flag_branch_probabilities)
948 compute_branch_probabilities ();
949 if (flag_profile_values)
950 compute_value_histograms (n_values, values);
953 /* For each edge not on the spanning tree, add counting code as rtl. */
954 if (profile_arc_flag
955 && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
957 unsigned n_instrumented = instrument_edges (el);
959 if (n_instrumented != num_instrumented)
960 abort ();
962 if (flag_profile_values)
963 instrument_values (n_values, values);
965 /* Commit changes done by instrumentation. */
966 commit_edge_insertions_watch_calls ();
967 allocate_reg_info (max_reg_num (), FALSE, FALSE);
970 if (flag_profile_values)
971 count_or_remove_death_notes (NULL, 1);
972 remove_fake_edges ();
973 free_aux_for_edges ();
974 /* Re-merge split basic blocks and the mess introduced by
975 insert_insn_on_edge. */
976 cleanup_cfg (profile_arc_flag ? CLEANUP_EXPENSIVE : 0);
977 if (rtl_dump_file)
978 dump_flow_info (rtl_dump_file);
980 free_edge_list (el);
983 /* Union find algorithm implementation for the basic blocks using
984 aux fields. */
986 static basic_block
987 find_group (basic_block bb)
989 basic_block group = bb, bb1;
991 while ((basic_block) group->aux != group)
992 group = (basic_block) group->aux;
994 /* Compress path. */
995 while ((basic_block) bb->aux != group)
997 bb1 = (basic_block) bb->aux;
998 bb->aux = (void *) group;
999 bb = bb1;
1001 return group;
1004 static void
1005 union_groups (basic_block bb1, basic_block bb2)
1007 basic_block bb1g = find_group (bb1);
1008 basic_block bb2g = find_group (bb2);
1010 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1011 this code is unlikely going to be performance problem anyway. */
1012 if (bb1g == bb2g)
1013 abort ();
1015 bb1g->aux = bb2g;
1018 /* This function searches all of the edges in the program flow graph, and puts
1019 as many bad edges as possible onto the spanning tree. Bad edges include
1020 abnormals edges, which can't be instrumented at the moment. Since it is
1021 possible for fake edges to form a cycle, we will have to develop some
1022 better way in the future. Also put critical edges to the tree, since they
1023 are more expensive to instrument. */
1025 static void
1026 find_spanning_tree (struct edge_list *el)
1028 int i;
1029 int num_edges = NUM_EDGES (el);
1030 basic_block bb;
1032 /* We use aux field for standard union-find algorithm. */
1033 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1034 bb->aux = bb;
1036 /* Add fake edge exit to entry we can't instrument. */
1037 union_groups (EXIT_BLOCK_PTR, ENTRY_BLOCK_PTR);
1039 /* First add all abnormal edges to the tree unless they form a cycle. Also
1040 add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
1041 setting return value from function. */
1042 for (i = 0; i < num_edges; i++)
1044 edge e = INDEX_EDGE (el, i);
1045 if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
1046 || e->dest == EXIT_BLOCK_PTR)
1047 && !EDGE_INFO (e)->ignore
1048 && (find_group (e->src) != find_group (e->dest)))
1050 if (rtl_dump_file)
1051 fprintf (rtl_dump_file, "Abnormal edge %d to %d put to tree\n",
1052 e->src->index, e->dest->index);
1053 EDGE_INFO (e)->on_tree = 1;
1054 union_groups (e->src, e->dest);
1058 /* Now insert all critical edges to the tree unless they form a cycle. */
1059 for (i = 0; i < num_edges; i++)
1061 edge e = INDEX_EDGE (el, i);
1062 if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore
1063 && find_group (e->src) != find_group (e->dest))
1065 if (rtl_dump_file)
1066 fprintf (rtl_dump_file, "Critical edge %d to %d put to tree\n",
1067 e->src->index, e->dest->index);
1068 EDGE_INFO (e)->on_tree = 1;
1069 union_groups (e->src, e->dest);
1073 /* And now the rest. */
1074 for (i = 0; i < num_edges; i++)
1076 edge e = INDEX_EDGE (el, i);
1077 if (!EDGE_INFO (e)->ignore
1078 && find_group (e->src) != find_group (e->dest))
1080 if (rtl_dump_file)
1081 fprintf (rtl_dump_file, "Normal edge %d to %d put to tree\n",
1082 e->src->index, e->dest->index);
1083 EDGE_INFO (e)->on_tree = 1;
1084 union_groups (e->src, e->dest);
1088 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1089 bb->aux = NULL;
1092 /* Perform file-level initialization for branch-prob processing. */
1094 void
1095 init_branch_prob (void)
1097 int i;
1099 total_num_blocks = 0;
1100 total_num_edges = 0;
1101 total_num_edges_ignored = 0;
1102 total_num_edges_instrumented = 0;
1103 total_num_blocks_created = 0;
1104 total_num_passes = 0;
1105 total_num_times_called = 0;
1106 total_num_branches = 0;
1107 total_num_never_executed = 0;
1108 for (i = 0; i < 20; i++)
1109 total_hist_br_prob[i] = 0;
1112 /* Performs file-level cleanup after branch-prob processing
1113 is completed. */
1115 void
1116 end_branch_prob (void)
1118 if (rtl_dump_file)
1120 fprintf (rtl_dump_file, "\n");
1121 fprintf (rtl_dump_file, "Total number of blocks: %d\n",
1122 total_num_blocks);
1123 fprintf (rtl_dump_file, "Total number of edges: %d\n", total_num_edges);
1124 fprintf (rtl_dump_file, "Total number of ignored edges: %d\n",
1125 total_num_edges_ignored);
1126 fprintf (rtl_dump_file, "Total number of instrumented edges: %d\n",
1127 total_num_edges_instrumented);
1128 fprintf (rtl_dump_file, "Total number of blocks created: %d\n",
1129 total_num_blocks_created);
1130 fprintf (rtl_dump_file, "Total number of graph solution passes: %d\n",
1131 total_num_passes);
1132 if (total_num_times_called != 0)
1133 fprintf (rtl_dump_file, "Average number of graph solution passes: %d\n",
1134 (total_num_passes + (total_num_times_called >> 1))
1135 / total_num_times_called);
1136 fprintf (rtl_dump_file, "Total number of branches: %d\n",
1137 total_num_branches);
1138 fprintf (rtl_dump_file, "Total number of branches never executed: %d\n",
1139 total_num_never_executed);
1140 if (total_num_branches)
1142 int i;
1144 for (i = 0; i < 10; i++)
1145 fprintf (rtl_dump_file, "%d%% branches in range %d-%d%%\n",
1146 (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1147 / total_num_branches, 5*i, 5*i+5);
1153 /* Output instructions as RTL to increment the edge execution count. */
1155 static rtx
1156 gen_edge_profiler (int edgeno)
1158 rtx ref = coverage_counter_ref (GCOV_COUNTER_ARCS, edgeno);
1159 rtx tmp;
1160 enum machine_mode mode = GET_MODE (ref);
1161 rtx sequence;
1163 start_sequence ();
1164 ref = validize_mem (ref);
1166 tmp = expand_simple_binop (mode, PLUS, ref, const1_rtx,
1167 ref, 0, OPTAB_WIDEN);
1169 if (tmp != ref)
1170 emit_move_insn (copy_rtx (ref), tmp);
1172 sequence = get_insns ();
1173 end_sequence ();
1174 return sequence;
1177 /* Output instructions as RTL to increment the interval histogram counter.
1178 VALUE is the expression whose value is profiled. TAG is the tag of the
1179 section for counters, BASE is offset of the counter position. */
1181 static rtx
1182 gen_interval_profiler (struct histogram_value *value, unsigned tag,
1183 unsigned base)
1185 unsigned gcov_size = tree_low_cst (TYPE_SIZE (GCOV_TYPE_NODE), 1);
1186 enum machine_mode mode = mode_for_size (gcov_size, MODE_INT, 0);
1187 rtx mem_ref, tmp, tmp1, mr, val;
1188 rtx sequence;
1189 rtx more_label = gen_label_rtx ();
1190 rtx less_label = gen_label_rtx ();
1191 rtx end_of_code_label = gen_label_rtx ();
1192 int per_counter = gcov_size / BITS_PER_UNIT;
1194 start_sequence ();
1196 if (value->seq)
1197 emit_insn (value->seq);
1199 mr = gen_reg_rtx (Pmode);
1201 tmp = coverage_counter_ref (tag, base);
1202 tmp = force_reg (Pmode, XEXP (tmp, 0));
1204 val = expand_simple_binop (value->mode, MINUS,
1205 copy_rtx (value->value),
1206 GEN_INT (value->hdata.intvl.int_start),
1207 NULL_RTX, 0, OPTAB_WIDEN);
1209 if (value->hdata.intvl.may_be_more)
1210 do_compare_rtx_and_jump (copy_rtx (val), GEN_INT (value->hdata.intvl.steps),
1211 GE, 0, value->mode, NULL_RTX, NULL_RTX, more_label);
1212 if (value->hdata.intvl.may_be_less)
1213 do_compare_rtx_and_jump (copy_rtx (val), const0_rtx, LT, 0, value->mode,
1214 NULL_RTX, NULL_RTX, less_label);
1216 /* We are in range. */
1217 tmp1 = expand_simple_binop (value->mode, MULT,
1218 copy_rtx (val), GEN_INT (per_counter),
1219 NULL_RTX, 0, OPTAB_WIDEN);
1220 tmp1 = expand_simple_binop (Pmode, PLUS, copy_rtx (tmp), tmp1, mr,
1221 0, OPTAB_WIDEN);
1222 if (tmp1 != mr)
1223 emit_move_insn (copy_rtx (mr), tmp1);
1225 if (value->hdata.intvl.may_be_more
1226 || value->hdata.intvl.may_be_less)
1228 emit_jump_insn (gen_jump (end_of_code_label));
1229 emit_barrier ();
1232 /* Above the interval. */
1233 if (value->hdata.intvl.may_be_more)
1235 emit_label (more_label);
1236 tmp1 = expand_simple_binop (Pmode, PLUS, copy_rtx (tmp),
1237 GEN_INT (per_counter * value->hdata.intvl.steps),
1238 mr, 0, OPTAB_WIDEN);
1239 if (tmp1 != mr)
1240 emit_move_insn (copy_rtx (mr), tmp1);
1241 if (value->hdata.intvl.may_be_less)
1243 emit_jump_insn (gen_jump (end_of_code_label));
1244 emit_barrier ();
1248 /* Below the interval. */
1249 if (value->hdata.intvl.may_be_less)
1251 emit_label (less_label);
1252 tmp1 = expand_simple_binop (Pmode, PLUS, copy_rtx (tmp),
1253 GEN_INT (per_counter * (value->hdata.intvl.steps
1254 + (value->hdata.intvl.may_be_more ? 1 : 0))),
1255 mr, 0, OPTAB_WIDEN);
1256 if (tmp1 != mr)
1257 emit_move_insn (copy_rtx (mr), tmp1);
1260 if (value->hdata.intvl.may_be_more
1261 || value->hdata.intvl.may_be_less)
1262 emit_label (end_of_code_label);
1264 mem_ref = validize_mem (gen_rtx_MEM (mode, mr));
1266 tmp = expand_simple_binop (mode, PLUS, copy_rtx (mem_ref), const1_rtx,
1267 mem_ref, 0, OPTAB_WIDEN);
1269 if (tmp != mem_ref)
1270 emit_move_insn (copy_rtx (mem_ref), tmp);
1272 sequence = get_insns ();
1273 end_sequence ();
1274 rebuild_jump_labels (sequence);
1275 return sequence;
1278 /* Output instructions as RTL to increment the power of two histogram counter.
1279 VALUE is the expression whose value is profiled. TAG is the tag of the
1280 section for counters, BASE is offset of the counter position. */
1282 static rtx
1283 gen_pow2_profiler (struct histogram_value *value, unsigned tag, unsigned base)
1285 unsigned gcov_size = tree_low_cst (TYPE_SIZE (GCOV_TYPE_NODE), 1);
1286 enum machine_mode mode = mode_for_size (gcov_size, MODE_INT, 0);
1287 rtx mem_ref, tmp, mr, uval;
1288 rtx sequence;
1289 rtx end_of_code_label = gen_label_rtx ();
1290 rtx loop_label = gen_label_rtx ();
1291 int per_counter = gcov_size / BITS_PER_UNIT;
1293 start_sequence ();
1295 if (value->seq)
1296 emit_insn (value->seq);
1298 mr = gen_reg_rtx (Pmode);
1299 tmp = coverage_counter_ref (tag, base);
1300 tmp = force_reg (Pmode, XEXP (tmp, 0));
1301 emit_move_insn (mr, tmp);
1303 uval = gen_reg_rtx (value->mode);
1304 emit_move_insn (uval, copy_rtx (value->value));
1306 /* Check for non-power of 2. */
1307 if (value->hdata.pow2.may_be_other)
1309 do_compare_rtx_and_jump (copy_rtx (uval), const0_rtx, LE, 0, value->mode,
1310 NULL_RTX, NULL_RTX, end_of_code_label);
1311 tmp = expand_simple_binop (value->mode, PLUS, copy_rtx (uval),
1312 constm1_rtx, NULL_RTX, 0, OPTAB_WIDEN);
1313 tmp = expand_simple_binop (value->mode, AND, copy_rtx (uval), tmp,
1314 NULL_RTX, 0, OPTAB_WIDEN);
1315 do_compare_rtx_and_jump (tmp, const0_rtx, NE, 0, value->mode, NULL_RTX,
1316 NULL_RTX, end_of_code_label);
1319 /* Count log_2(value). */
1320 emit_label (loop_label);
1322 tmp = expand_simple_binop (Pmode, PLUS, copy_rtx (mr), GEN_INT (per_counter), mr, 0, OPTAB_WIDEN);
1323 if (tmp != mr)
1324 emit_move_insn (copy_rtx (mr), tmp);
1326 tmp = expand_simple_binop (value->mode, ASHIFTRT, copy_rtx (uval), const1_rtx,
1327 uval, 0, OPTAB_WIDEN);
1328 if (tmp != uval)
1329 emit_move_insn (copy_rtx (uval), tmp);
1331 do_compare_rtx_and_jump (copy_rtx (uval), const0_rtx, NE, 0, value->mode,
1332 NULL_RTX, NULL_RTX, loop_label);
1334 /* Increase the counter. */
1335 emit_label (end_of_code_label);
1337 mem_ref = validize_mem (gen_rtx_MEM (mode, mr));
1339 tmp = expand_simple_binop (mode, PLUS, copy_rtx (mem_ref), const1_rtx,
1340 mem_ref, 0, OPTAB_WIDEN);
1342 if (tmp != mem_ref)
1343 emit_move_insn (copy_rtx (mem_ref), tmp);
1345 sequence = get_insns ();
1346 end_sequence ();
1347 rebuild_jump_labels (sequence);
1348 return sequence;
1351 /* Output instructions as RTL for code to find the most common value.
1352 VALUE is the expression whose value is profiled. TAG is the tag of the
1353 section for counters, BASE is offset of the counter position. */
1355 static rtx
1356 gen_one_value_profiler (struct histogram_value *value, unsigned tag,
1357 unsigned base)
1359 unsigned gcov_size = tree_low_cst (TYPE_SIZE (GCOV_TYPE_NODE), 1);
1360 enum machine_mode mode = mode_for_size (gcov_size, MODE_INT, 0);
1361 rtx stored_value_ref, counter_ref, all_ref, stored_value, counter, all;
1362 rtx tmp, uval;
1363 rtx sequence;
1364 rtx same_label = gen_label_rtx ();
1365 rtx zero_label = gen_label_rtx ();
1366 rtx end_of_code_label = gen_label_rtx ();
1368 start_sequence ();
1370 if (value->seq)
1371 emit_insn (value->seq);
1373 stored_value_ref = coverage_counter_ref (tag, base);
1374 counter_ref = coverage_counter_ref (tag, base + 1);
1375 all_ref = coverage_counter_ref (tag, base + 2);
1376 stored_value = validize_mem (stored_value_ref);
1377 counter = validize_mem (counter_ref);
1378 all = validize_mem (all_ref);
1380 uval = gen_reg_rtx (mode);
1381 convert_move (uval, copy_rtx (value->value), 0);
1383 /* Check if the stored value matches. */
1384 do_compare_rtx_and_jump (copy_rtx (uval), copy_rtx (stored_value), EQ,
1385 0, mode, NULL_RTX, NULL_RTX, same_label);
1387 /* Does not match; check whether the counter is zero. */
1388 do_compare_rtx_and_jump (copy_rtx (counter), const0_rtx, EQ, 0, mode,
1389 NULL_RTX, NULL_RTX, zero_label);
1391 /* The counter is not zero yet. */
1392 tmp = expand_simple_binop (mode, PLUS, copy_rtx (counter), constm1_rtx,
1393 counter, 0, OPTAB_WIDEN);
1395 if (tmp != counter)
1396 emit_move_insn (copy_rtx (counter), tmp);
1398 emit_jump_insn (gen_jump (end_of_code_label));
1399 emit_barrier ();
1401 emit_label (zero_label);
1402 /* Set new value. */
1403 emit_move_insn (copy_rtx (stored_value), copy_rtx (uval));
1405 emit_label (same_label);
1406 /* Increase the counter. */
1407 tmp = expand_simple_binop (mode, PLUS, copy_rtx (counter), const1_rtx,
1408 counter, 0, OPTAB_WIDEN);
1410 if (tmp != counter)
1411 emit_move_insn (copy_rtx (counter), tmp);
1413 emit_label (end_of_code_label);
1415 /* Increase the counter of all executions; this seems redundant given
1416 that ve have counts for edges in cfg, but it may happen that some
1417 optimization will change the counts for the block (either because
1418 it is unable to update them correctly, or because it will duplicate
1419 the block or its part). */
1420 tmp = expand_simple_binop (mode, PLUS, copy_rtx (all), const1_rtx,
1421 all, 0, OPTAB_WIDEN);
1423 if (tmp != all)
1424 emit_move_insn (copy_rtx (all), tmp);
1425 sequence = get_insns ();
1426 end_sequence ();
1427 rebuild_jump_labels (sequence);
1428 return sequence;
1431 /* Output instructions as RTL for code to find the most common value of
1432 a difference between two evaluations of an expression.
1433 VALUE is the expression whose value is profiled. TAG is the tag of the
1434 section for counters, BASE is offset of the counter position. */
1436 static rtx
1437 gen_const_delta_profiler (struct histogram_value *value, unsigned tag,
1438 unsigned base)
1440 struct histogram_value one_value_delta;
1441 unsigned gcov_size = tree_low_cst (TYPE_SIZE (GCOV_TYPE_NODE), 1);
1442 enum machine_mode mode = mode_for_size (gcov_size, MODE_INT, 0);
1443 rtx stored_value_ref, stored_value, tmp, uval;
1444 rtx sequence;
1446 start_sequence ();
1448 if (value->seq)
1449 emit_insn (value->seq);
1451 stored_value_ref = coverage_counter_ref (tag, base);
1452 stored_value = validize_mem (stored_value_ref);
1454 uval = gen_reg_rtx (mode);
1455 convert_move (uval, copy_rtx (value->value), 0);
1456 tmp = expand_simple_binop (mode, MINUS,
1457 copy_rtx (uval), copy_rtx (stored_value),
1458 NULL_RTX, 0, OPTAB_WIDEN);
1460 one_value_delta.value = tmp;
1461 one_value_delta.mode = mode;
1462 one_value_delta.seq = NULL_RTX;
1463 one_value_delta.insn = value->insn;
1464 one_value_delta.type = HIST_TYPE_SINGLE_VALUE;
1465 emit_insn (gen_one_value_profiler (&one_value_delta, tag, base + 1));
1467 emit_move_insn (copy_rtx (stored_value), uval);
1468 sequence = get_insns ();
1469 end_sequence ();
1470 rebuild_jump_labels (sequence);
1471 return sequence;