[ARM] Add source mode to coprocessor pattern SETs
[official-gcc.git] / gcc / profile.c
blobc6f462d2f7a068a896db3c0750b06f5ac5b55c28
1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990-2017 Free Software Foundation, Inc.
3 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
4 based on some ideas from Dain Samples of UC Berkeley.
5 Further mangling by Bob Manson, Cygnus Support.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Generate basic block profile instrumentation and auxiliary files.
24 Profile generation is optimized, so that not all arcs in the basic
25 block graph need instrumenting. First, the BB graph is closed with
26 one entry (function start), and one exit (function exit). Any
27 ABNORMAL_EDGE cannot be instrumented (because there is no control
28 path to place the code). We close the graph by inserting fake
29 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
30 edges that do not go to the exit_block. We ignore such abnormal
31 edges. Naturally these fake edges are never directly traversed,
32 and so *cannot* be directly instrumented. Some other graph
33 massaging is done. To optimize the instrumentation we generate the
34 BB minimal span tree, only edges that are not on the span tree
35 (plus the entry point) need instrumenting. From that information
36 all other edge counts can be deduced. By construction all fake
37 edges must be on the spanning tree. We also attempt to place
38 EDGE_CRITICAL edges on the spanning tree.
40 The auxiliary files generated are <dumpbase>.gcno (at compile time)
41 and <dumpbase>.gcda (at run time). The format is
42 described in full in gcov-io.h. */
44 /* ??? Register allocation should use basic block execution counts to
45 give preference to the most commonly executed blocks. */
47 /* ??? Should calculate branch probabilities before instrumenting code, since
48 then we can use arc counts to help decide which arcs to instrument. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "backend.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "gimple.h"
57 #include "cfghooks.h"
58 #include "cgraph.h"
59 #include "coverage.h"
60 #include "diagnostic-core.h"
61 #include "cfganal.h"
62 #include "value-prof.h"
63 #include "gimple-iterator.h"
64 #include "tree-cfg.h"
65 #include "dumpfile.h"
66 #include "cfgloop.h"
68 #include "profile.h"
70 struct bb_profile_info {
71 unsigned int count_valid : 1;
73 /* Number of successor and predecessor edges. */
74 gcov_type succ_count;
75 gcov_type pred_count;
78 #define BB_INFO(b) ((struct bb_profile_info *) (b)->aux)
81 /* Counter summary from the last set of coverage counts read. */
83 const struct gcov_ctr_summary *profile_info;
85 /* Counter working set information computed from the current counter
86 summary. Not initialized unless profile_info summary is non-NULL. */
87 static gcov_working_set_t gcov_working_sets[NUM_GCOV_WORKING_SETS];
89 /* Collect statistics on the performance of this pass for the entire source
90 file. */
92 static int total_num_blocks;
93 static int total_num_edges;
94 static int total_num_edges_ignored;
95 static int total_num_edges_instrumented;
96 static int total_num_blocks_created;
97 static int total_num_passes;
98 static int total_num_times_called;
99 static int total_hist_br_prob[20];
100 static int total_num_branches;
102 /* Helper function to update gcov_working_sets. */
104 void add_working_set (gcov_working_set_t *set) {
105 int i = 0;
106 for (; i < NUM_GCOV_WORKING_SETS; i++)
107 gcov_working_sets[i] = set[i];
110 /* Forward declarations. */
111 static void find_spanning_tree (struct edge_list *);
113 /* Add edge instrumentation code to the entire insn chain.
115 F is the first insn of the chain.
116 NUM_BLOCKS is the number of basic blocks found in F. */
118 static unsigned
119 instrument_edges (struct edge_list *el)
121 unsigned num_instr_edges = 0;
122 int num_edges = NUM_EDGES (el);
123 basic_block bb;
125 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
127 edge e;
128 edge_iterator ei;
130 FOR_EACH_EDGE (e, ei, bb->succs)
132 struct edge_profile_info *inf = EDGE_INFO (e);
134 if (!inf->ignore && !inf->on_tree)
136 gcc_assert (!(e->flags & EDGE_ABNORMAL));
137 if (dump_file)
138 fprintf (dump_file, "Edge %d to %d instrumented%s\n",
139 e->src->index, e->dest->index,
140 EDGE_CRITICAL_P (e) ? " (and split)" : "");
141 gimple_gen_edge_profiler (num_instr_edges++, e);
146 total_num_blocks_created += num_edges;
147 if (dump_file)
148 fprintf (dump_file, "%d edges instrumented\n", num_instr_edges);
149 return num_instr_edges;
152 /* Add code to measure histograms for values in list VALUES. */
153 static void
154 instrument_values (histogram_values values)
156 unsigned i;
158 /* Emit code to generate the histograms before the insns. */
160 for (i = 0; i < values.length (); i++)
162 histogram_value hist = values[i];
163 unsigned t = COUNTER_FOR_HIST_TYPE (hist->type);
165 if (!coverage_counter_alloc (t, hist->n_counters))
166 continue;
168 switch (hist->type)
170 case HIST_TYPE_INTERVAL:
171 gimple_gen_interval_profiler (hist, t, 0);
172 break;
174 case HIST_TYPE_POW2:
175 gimple_gen_pow2_profiler (hist, t, 0);
176 break;
178 case HIST_TYPE_SINGLE_VALUE:
179 gimple_gen_one_value_profiler (hist, t, 0);
180 break;
182 case HIST_TYPE_INDIR_CALL:
183 case HIST_TYPE_INDIR_CALL_TOPN:
184 gimple_gen_ic_profiler (hist, t, 0);
185 break;
187 case HIST_TYPE_AVERAGE:
188 gimple_gen_average_profiler (hist, t, 0);
189 break;
191 case HIST_TYPE_IOR:
192 gimple_gen_ior_profiler (hist, t, 0);
193 break;
195 case HIST_TYPE_TIME_PROFILE:
196 gimple_gen_time_profiler (t, 0);
197 break;
199 default:
200 gcc_unreachable ();
206 /* Fill the working set information into the profile_info structure. */
208 void
209 get_working_sets (void)
211 unsigned ws_ix, pctinc, pct;
212 gcov_working_set_t *ws_info;
214 if (!profile_info)
215 return;
217 compute_working_sets (profile_info, gcov_working_sets);
219 if (dump_file)
221 fprintf (dump_file, "Counter working sets:\n");
222 /* Multiply the percentage by 100 to avoid float. */
223 pctinc = 100 * 100 / NUM_GCOV_WORKING_SETS;
224 for (ws_ix = 0, pct = pctinc; ws_ix < NUM_GCOV_WORKING_SETS;
225 ws_ix++, pct += pctinc)
227 if (ws_ix == NUM_GCOV_WORKING_SETS - 1)
228 pct = 9990;
229 ws_info = &gcov_working_sets[ws_ix];
230 /* Print out the percentage using int arithmatic to avoid float. */
231 fprintf (dump_file, "\t\t%u.%02u%%: num counts=%u, min counter="
232 "%" PRId64 "\n",
233 pct / 100, pct - (pct / 100 * 100),
234 ws_info->num_counters,
235 (int64_t)ws_info->min_counter);
240 /* Given a the desired percentage of the full profile (sum_all from the
241 summary), multiplied by 10 to avoid float in PCT_TIMES_10, returns
242 the corresponding working set information. If an exact match for
243 the percentage isn't found, the closest value is used. */
245 gcov_working_set_t *
246 find_working_set (unsigned pct_times_10)
248 unsigned i;
249 if (!profile_info)
250 return NULL;
251 gcc_assert (pct_times_10 <= 1000);
252 if (pct_times_10 >= 999)
253 return &gcov_working_sets[NUM_GCOV_WORKING_SETS - 1];
254 i = pct_times_10 * NUM_GCOV_WORKING_SETS / 1000;
255 if (!i)
256 return &gcov_working_sets[0];
257 return &gcov_working_sets[i - 1];
260 /* Computes hybrid profile for all matching entries in da_file.
262 CFG_CHECKSUM is the precomputed checksum for the CFG. */
264 static gcov_type *
265 get_exec_counts (unsigned cfg_checksum, unsigned lineno_checksum)
267 unsigned num_edges = 0;
268 basic_block bb;
269 gcov_type *counts;
271 /* Count the edges to be (possibly) instrumented. */
272 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
274 edge e;
275 edge_iterator ei;
277 FOR_EACH_EDGE (e, ei, bb->succs)
278 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
279 num_edges++;
282 counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, cfg_checksum,
283 lineno_checksum, &profile_info);
284 if (!counts)
285 return NULL;
287 get_working_sets ();
289 if (dump_file && profile_info)
290 fprintf (dump_file, "Merged %u profiles with maximal count %u.\n",
291 profile_info->runs, (unsigned) profile_info->sum_max);
293 return counts;
297 static bool
298 is_edge_inconsistent (vec<edge, va_gc> *edges)
300 edge e;
301 edge_iterator ei;
302 FOR_EACH_EDGE (e, ei, edges)
304 if (!EDGE_INFO (e)->ignore)
306 if (e->count < 0
307 && (!(e->flags & EDGE_FAKE)
308 || !block_ends_with_call_p (e->src)))
310 if (dump_file)
312 fprintf (dump_file,
313 "Edge %i->%i is inconsistent, count%" PRId64,
314 e->src->index, e->dest->index, e->count);
315 dump_bb (dump_file, e->src, 0, TDF_DETAILS);
316 dump_bb (dump_file, e->dest, 0, TDF_DETAILS);
318 return true;
322 return false;
325 static void
326 correct_negative_edge_counts (void)
328 basic_block bb;
329 edge e;
330 edge_iterator ei;
332 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
334 FOR_EACH_EDGE (e, ei, bb->succs)
336 if (e->count < 0)
337 e->count = 0;
342 /* Check consistency.
343 Return true if inconsistency is found. */
344 static bool
345 is_inconsistent (void)
347 basic_block bb;
348 bool inconsistent = false;
349 FOR_EACH_BB_FN (bb, cfun)
351 inconsistent |= is_edge_inconsistent (bb->preds);
352 if (!dump_file && inconsistent)
353 return true;
354 inconsistent |= is_edge_inconsistent (bb->succs);
355 if (!dump_file && inconsistent)
356 return true;
357 if (bb->count < 0)
359 if (dump_file)
361 fprintf (dump_file, "BB %i count is negative "
362 "%" PRId64,
363 bb->index,
364 bb->count);
365 dump_bb (dump_file, bb, 0, TDF_DETAILS);
367 inconsistent = true;
369 if (bb->count != sum_edge_counts (bb->preds))
371 if (dump_file)
373 fprintf (dump_file, "BB %i count does not match sum of incoming edges "
374 "%" PRId64" should be %" PRId64,
375 bb->index,
376 bb->count,
377 sum_edge_counts (bb->preds));
378 dump_bb (dump_file, bb, 0, TDF_DETAILS);
380 inconsistent = true;
382 if (bb->count != sum_edge_counts (bb->succs) &&
383 ! (find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun)) != NULL
384 && block_ends_with_call_p (bb)))
386 if (dump_file)
388 fprintf (dump_file, "BB %i count does not match sum of outgoing edges "
389 "%" PRId64" should be %" PRId64,
390 bb->index,
391 bb->count,
392 sum_edge_counts (bb->succs));
393 dump_bb (dump_file, bb, 0, TDF_DETAILS);
395 inconsistent = true;
397 if (!dump_file && inconsistent)
398 return true;
401 return inconsistent;
404 /* Set each basic block count to the sum of its outgoing edge counts */
405 static void
406 set_bb_counts (void)
408 basic_block bb;
409 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
411 bb->count = sum_edge_counts (bb->succs);
412 gcc_assert (bb->count >= 0);
416 /* Reads profile data and returns total number of edge counts read */
417 static int
418 read_profile_edge_counts (gcov_type *exec_counts)
420 basic_block bb;
421 int num_edges = 0;
422 int exec_counts_pos = 0;
423 /* For each edge not on the spanning tree, set its execution count from
424 the .da file. */
425 /* The first count in the .da file is the number of times that the function
426 was entered. This is the exec_count for block zero. */
428 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
430 edge e;
431 edge_iterator ei;
433 FOR_EACH_EDGE (e, ei, bb->succs)
434 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
436 num_edges++;
437 if (exec_counts)
439 e->count = exec_counts[exec_counts_pos++];
440 if (e->count > profile_info->sum_max)
442 if (flag_profile_correction)
444 static bool informed = 0;
445 if (dump_enabled_p () && !informed)
446 dump_printf_loc (MSG_NOTE, input_location,
447 "corrupted profile info: edge count"
448 " exceeds maximal count\n");
449 informed = 1;
451 else
452 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
453 bb->index, e->dest->index);
456 else
457 e->count = 0;
459 EDGE_INFO (e)->count_valid = 1;
460 BB_INFO (bb)->succ_count--;
461 BB_INFO (e->dest)->pred_count--;
462 if (dump_file)
464 fprintf (dump_file, "\nRead edge from %i to %i, count:",
465 bb->index, e->dest->index);
466 fprintf (dump_file, "%" PRId64,
467 (int64_t) e->count);
472 return num_edges;
475 #define OVERLAP_BASE 10000
477 /* Compare the static estimated profile to the actual profile, and
478 return the "degree of overlap" measure between them.
480 Degree of overlap is a number between 0 and OVERLAP_BASE. It is
481 the sum of each basic block's minimum relative weights between
482 two profiles. And overlap of OVERLAP_BASE means two profiles are
483 identical. */
485 static int
486 compute_frequency_overlap (void)
488 gcov_type count_total = 0, freq_total = 0;
489 int overlap = 0;
490 basic_block bb;
492 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
494 count_total += bb->count;
495 freq_total += bb->frequency;
498 if (count_total == 0 || freq_total == 0)
499 return 0;
501 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
502 overlap += MIN (bb->count * OVERLAP_BASE / count_total,
503 bb->frequency * OVERLAP_BASE / freq_total);
505 return overlap;
508 /* Compute the branch probabilities for the various branches.
509 Annotate them accordingly.
511 CFG_CHECKSUM is the precomputed checksum for the CFG. */
513 static void
514 compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
516 basic_block bb;
517 int i;
518 int num_edges = 0;
519 int changes;
520 int passes;
521 int hist_br_prob[20];
522 int num_branches;
523 gcov_type *exec_counts = get_exec_counts (cfg_checksum, lineno_checksum);
524 int inconsistent = 0;
526 /* Very simple sanity checks so we catch bugs in our profiling code. */
527 if (!profile_info)
528 return;
530 if (profile_info->sum_all < profile_info->sum_max)
532 error ("corrupted profile info: sum_all is smaller than sum_max");
533 exec_counts = NULL;
536 /* Attach extra info block to each bb. */
537 alloc_aux_for_blocks (sizeof (struct bb_profile_info));
538 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
540 edge e;
541 edge_iterator ei;
543 FOR_EACH_EDGE (e, ei, bb->succs)
544 if (!EDGE_INFO (e)->ignore)
545 BB_INFO (bb)->succ_count++;
546 FOR_EACH_EDGE (e, ei, bb->preds)
547 if (!EDGE_INFO (e)->ignore)
548 BB_INFO (bb)->pred_count++;
551 /* Avoid predicting entry on exit nodes. */
552 BB_INFO (EXIT_BLOCK_PTR_FOR_FN (cfun))->succ_count = 2;
553 BB_INFO (ENTRY_BLOCK_PTR_FOR_FN (cfun))->pred_count = 2;
555 num_edges = read_profile_edge_counts (exec_counts);
557 if (dump_file)
558 fprintf (dump_file, "\n%d edge counts read\n", num_edges);
560 /* For every block in the file,
561 - if every exit/entrance edge has a known count, then set the block count
562 - if the block count is known, and every exit/entrance edge but one has
563 a known execution count, then set the count of the remaining edge
565 As edge counts are set, decrement the succ/pred count, but don't delete
566 the edge, that way we can easily tell when all edges are known, or only
567 one edge is unknown. */
569 /* The order that the basic blocks are iterated through is important.
570 Since the code that finds spanning trees starts with block 0, low numbered
571 edges are put on the spanning tree in preference to high numbered edges.
572 Hence, most instrumented edges are at the end. Graph solving works much
573 faster if we propagate numbers from the end to the start.
575 This takes an average of slightly more than 3 passes. */
577 changes = 1;
578 passes = 0;
579 while (changes)
581 passes++;
582 changes = 0;
583 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, prev_bb)
585 struct bb_profile_info *bi = BB_INFO (bb);
586 if (! bi->count_valid)
588 if (bi->succ_count == 0)
590 edge e;
591 edge_iterator ei;
592 gcov_type total = 0;
594 FOR_EACH_EDGE (e, ei, bb->succs)
595 total += e->count;
596 bb->count = total;
597 bi->count_valid = 1;
598 changes = 1;
600 else if (bi->pred_count == 0)
602 edge e;
603 edge_iterator ei;
604 gcov_type total = 0;
606 FOR_EACH_EDGE (e, ei, bb->preds)
607 total += e->count;
608 bb->count = total;
609 bi->count_valid = 1;
610 changes = 1;
613 if (bi->count_valid)
615 if (bi->succ_count == 1)
617 edge e;
618 edge_iterator ei;
619 gcov_type total = 0;
621 /* One of the counts will be invalid, but it is zero,
622 so adding it in also doesn't hurt. */
623 FOR_EACH_EDGE (e, ei, bb->succs)
624 total += e->count;
626 /* Search for the invalid edge, and set its count. */
627 FOR_EACH_EDGE (e, ei, bb->succs)
628 if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
629 break;
631 /* Calculate count for remaining edge by conservation. */
632 total = bb->count - total;
634 gcc_assert (e);
635 EDGE_INFO (e)->count_valid = 1;
636 e->count = total;
637 bi->succ_count--;
639 BB_INFO (e->dest)->pred_count--;
640 changes = 1;
642 if (bi->pred_count == 1)
644 edge e;
645 edge_iterator ei;
646 gcov_type total = 0;
648 /* One of the counts will be invalid, but it is zero,
649 so adding it in also doesn't hurt. */
650 FOR_EACH_EDGE (e, ei, bb->preds)
651 total += e->count;
653 /* Search for the invalid edge, and set its count. */
654 FOR_EACH_EDGE (e, ei, bb->preds)
655 if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
656 break;
658 /* Calculate count for remaining edge by conservation. */
659 total = bb->count - total + e->count;
661 gcc_assert (e);
662 EDGE_INFO (e)->count_valid = 1;
663 e->count = total;
664 bi->pred_count--;
666 BB_INFO (e->src)->succ_count--;
667 changes = 1;
672 if (dump_file)
674 int overlap = compute_frequency_overlap ();
675 gimple_dump_cfg (dump_file, dump_flags);
676 fprintf (dump_file, "Static profile overlap: %d.%d%%\n",
677 overlap / (OVERLAP_BASE / 100),
678 overlap % (OVERLAP_BASE / 100));
681 total_num_passes += passes;
682 if (dump_file)
683 fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
685 /* If the graph has been correctly solved, every block will have a
686 succ and pred count of zero. */
687 FOR_EACH_BB_FN (bb, cfun)
689 gcc_assert (!BB_INFO (bb)->succ_count && !BB_INFO (bb)->pred_count);
692 /* Check for inconsistent basic block counts */
693 inconsistent = is_inconsistent ();
695 if (inconsistent)
697 if (flag_profile_correction)
699 /* Inconsistency detected. Make it flow-consistent. */
700 static int informed = 0;
701 if (dump_enabled_p () && informed == 0)
703 informed = 1;
704 dump_printf_loc (MSG_NOTE, input_location,
705 "correcting inconsistent profile data\n");
707 correct_negative_edge_counts ();
708 /* Set bb counts to the sum of the outgoing edge counts */
709 set_bb_counts ();
710 if (dump_file)
711 fprintf (dump_file, "\nCalling mcf_smooth_cfg\n");
712 mcf_smooth_cfg ();
714 else
715 error ("corrupted profile info: profile data is not flow-consistent");
718 /* For every edge, calculate its branch probability and add a reg_note
719 to the branch insn to indicate this. */
721 for (i = 0; i < 20; i++)
722 hist_br_prob[i] = 0;
723 num_branches = 0;
725 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
727 edge e;
728 edge_iterator ei;
730 if (bb->count < 0)
732 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
733 bb->index, (int)bb->count);
734 bb->count = 0;
736 FOR_EACH_EDGE (e, ei, bb->succs)
738 /* Function may return twice in the cased the called function is
739 setjmp or calls fork, but we can't represent this by extra
740 edge from the entry, since extra edge from the exit is
741 already present. We get negative frequency from the entry
742 point. */
743 if ((e->count < 0
744 && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
745 || (e->count > bb->count
746 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)))
748 if (block_ends_with_call_p (bb))
749 e->count = e->count < 0 ? 0 : bb->count;
751 if (e->count < 0 || e->count > bb->count)
753 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
754 e->src->index, e->dest->index,
755 (int)e->count);
756 e->count = bb->count / 2;
759 if (bb->count)
761 FOR_EACH_EDGE (e, ei, bb->succs)
762 e->probability = GCOV_COMPUTE_SCALE (e->count, bb->count);
763 if (bb->index >= NUM_FIXED_BLOCKS
764 && block_ends_with_condjump_p (bb)
765 && EDGE_COUNT (bb->succs) >= 2)
767 int prob;
768 edge e;
769 int index;
771 /* Find the branch edge. It is possible that we do have fake
772 edges here. */
773 FOR_EACH_EDGE (e, ei, bb->succs)
774 if (!(e->flags & (EDGE_FAKE | EDGE_FALLTHRU)))
775 break;
777 prob = e->probability;
778 index = prob * 20 / REG_BR_PROB_BASE;
780 if (index == 20)
781 index = 19;
782 hist_br_prob[index]++;
784 num_branches++;
787 /* As a last resort, distribute the probabilities evenly.
788 Use simple heuristics that if there are normal edges,
789 give all abnormals frequency of 0, otherwise distribute the
790 frequency over abnormals (this is the case of noreturn
791 calls). */
792 else if (profile_status_for_fn (cfun) == PROFILE_ABSENT)
794 int total = 0;
796 FOR_EACH_EDGE (e, ei, bb->succs)
797 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
798 total ++;
799 if (total)
801 FOR_EACH_EDGE (e, ei, bb->succs)
802 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
803 e->probability = REG_BR_PROB_BASE / total;
804 else
805 e->probability = 0;
807 else
809 total += EDGE_COUNT (bb->succs);
810 FOR_EACH_EDGE (e, ei, bb->succs)
811 e->probability = REG_BR_PROB_BASE / total;
813 if (bb->index >= NUM_FIXED_BLOCKS
814 && block_ends_with_condjump_p (bb)
815 && EDGE_COUNT (bb->succs) >= 2)
816 num_branches++;
819 counts_to_freqs ();
821 if (dump_file)
823 fprintf (dump_file, "%d branches\n", num_branches);
824 if (num_branches)
825 for (i = 0; i < 10; i++)
826 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
827 (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
828 5 * i, 5 * i + 5);
830 total_num_branches += num_branches;
831 for (i = 0; i < 20; i++)
832 total_hist_br_prob[i] += hist_br_prob[i];
834 fputc ('\n', dump_file);
835 fputc ('\n', dump_file);
838 free_aux_for_blocks ();
841 /* Load value histograms values whose description is stored in VALUES array
842 from .gcda file.
844 CFG_CHECKSUM is the precomputed checksum for the CFG. */
846 static void
847 compute_value_histograms (histogram_values values, unsigned cfg_checksum,
848 unsigned lineno_checksum)
850 unsigned i, j, t, any;
851 unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS];
852 gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS];
853 gcov_type *act_count[GCOV_N_VALUE_COUNTERS];
854 gcov_type *aact_count;
855 struct cgraph_node *node;
857 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
858 n_histogram_counters[t] = 0;
860 for (i = 0; i < values.length (); i++)
862 histogram_value hist = values[i];
863 n_histogram_counters[(int) hist->type] += hist->n_counters;
866 any = 0;
867 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
869 if (!n_histogram_counters[t])
871 histogram_counts[t] = NULL;
872 continue;
875 histogram_counts[t] =
876 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t),
877 n_histogram_counters[t], cfg_checksum,
878 lineno_checksum, NULL);
879 if (histogram_counts[t])
880 any = 1;
881 act_count[t] = histogram_counts[t];
883 if (!any)
884 return;
886 for (i = 0; i < values.length (); i++)
888 histogram_value hist = values[i];
889 gimple *stmt = hist->hvalue.stmt;
891 t = (int) hist->type;
893 aact_count = act_count[t];
895 if (act_count[t])
896 act_count[t] += hist->n_counters;
898 gimple_add_histogram_value (cfun, stmt, hist);
899 hist->hvalue.counters = XNEWVEC (gcov_type, hist->n_counters);
900 for (j = 0; j < hist->n_counters; j++)
901 if (aact_count)
902 hist->hvalue.counters[j] = aact_count[j];
903 else
904 hist->hvalue.counters[j] = 0;
906 /* Time profiler counter is not related to any statement,
907 so that we have to read the counter and set the value to
908 the corresponding call graph node. */
909 if (hist->type == HIST_TYPE_TIME_PROFILE)
911 node = cgraph_node::get (hist->fun->decl);
912 node->tp_first_run = hist->hvalue.counters[0];
914 if (dump_file)
915 fprintf (dump_file, "Read tp_first_run: %d\n", node->tp_first_run);
919 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
920 free (histogram_counts[t]);
923 /* When passed NULL as file_name, initialize.
924 When passed something else, output the necessary commands to change
925 line to LINE and offset to FILE_NAME. */
926 static void
927 output_location (char const *file_name, int line,
928 gcov_position_t *offset, basic_block bb)
930 static char const *prev_file_name;
931 static int prev_line;
932 bool name_differs, line_differs;
934 if (!file_name)
936 prev_file_name = NULL;
937 prev_line = -1;
938 return;
941 name_differs = !prev_file_name || filename_cmp (file_name, prev_file_name);
942 line_differs = prev_line != line;
944 if (name_differs || line_differs)
946 if (!*offset)
948 *offset = gcov_write_tag (GCOV_TAG_LINES);
949 gcov_write_unsigned (bb->index);
950 name_differs = line_differs=true;
953 /* If this is a new source file, then output the
954 file's name to the .bb file. */
955 if (name_differs)
957 prev_file_name = file_name;
958 gcov_write_unsigned (0);
959 gcov_write_string (prev_file_name);
961 if (line_differs)
963 gcov_write_unsigned (line);
964 prev_line = line;
969 /* Instrument and/or analyze program behavior based on program the CFG.
971 This function creates a representation of the control flow graph (of
972 the function being compiled) that is suitable for the instrumentation
973 of edges and/or converting measured edge counts to counts on the
974 complete CFG.
976 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
977 the flow graph that are needed to reconstruct the dynamic behavior of the
978 flow graph. This data is written to the gcno file for gcov.
980 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
981 information from the gcda file containing edge count information from
982 previous executions of the function being compiled. In this case, the
983 control flow graph is annotated with actual execution counts by
984 compute_branch_probabilities().
986 Main entry point of this file. */
988 void
989 branch_prob (void)
991 basic_block bb;
992 unsigned i;
993 unsigned num_edges, ignored_edges;
994 unsigned num_instrumented;
995 struct edge_list *el;
996 histogram_values values = histogram_values ();
997 unsigned cfg_checksum, lineno_checksum;
999 total_num_times_called++;
1001 flow_call_edges_add (NULL);
1002 add_noreturn_fake_exit_edges ();
1004 /* We can't handle cyclic regions constructed using abnormal edges.
1005 To avoid these we replace every source of abnormal edge by a fake
1006 edge from entry node and every destination by fake edge to exit.
1007 This keeps graph acyclic and our calculation exact for all normal
1008 edges except for exit and entrance ones.
1010 We also add fake exit edges for each call and asm statement in the
1011 basic, since it may not return. */
1013 FOR_EACH_BB_FN (bb, cfun)
1015 int need_exit_edge = 0, need_entry_edge = 0;
1016 int have_exit_edge = 0, have_entry_edge = 0;
1017 edge e;
1018 edge_iterator ei;
1020 /* Functions returning multiple times are not handled by extra edges.
1021 Instead we simply allow negative counts on edges from exit to the
1022 block past call and corresponding probabilities. We can't go
1023 with the extra edges because that would result in flowgraph that
1024 needs to have fake edges outside the spanning tree. */
1026 FOR_EACH_EDGE (e, ei, bb->succs)
1028 gimple_stmt_iterator gsi;
1029 gimple *last = NULL;
1031 /* It may happen that there are compiler generated statements
1032 without a locus at all. Go through the basic block from the
1033 last to the first statement looking for a locus. */
1034 for (gsi = gsi_last_nondebug_bb (bb);
1035 !gsi_end_p (gsi);
1036 gsi_prev_nondebug (&gsi))
1038 last = gsi_stmt (gsi);
1039 if (!RESERVED_LOCATION_P (gimple_location (last)))
1040 break;
1043 /* Edge with goto locus might get wrong coverage info unless
1044 it is the only edge out of BB.
1045 Don't do that when the locuses match, so
1046 if (blah) goto something;
1047 is not computed twice. */
1048 if (last
1049 && gimple_has_location (last)
1050 && !RESERVED_LOCATION_P (e->goto_locus)
1051 && !single_succ_p (bb)
1052 && (LOCATION_FILE (e->goto_locus)
1053 != LOCATION_FILE (gimple_location (last))
1054 || (LOCATION_LINE (e->goto_locus)
1055 != LOCATION_LINE (gimple_location (last)))))
1057 basic_block new_bb = split_edge (e);
1058 edge ne = single_succ_edge (new_bb);
1059 ne->goto_locus = e->goto_locus;
1061 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1062 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1063 need_exit_edge = 1;
1064 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1065 have_exit_edge = 1;
1067 FOR_EACH_EDGE (e, ei, bb->preds)
1069 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1070 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1071 need_entry_edge = 1;
1072 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1073 have_entry_edge = 1;
1076 if (need_exit_edge && !have_exit_edge)
1078 if (dump_file)
1079 fprintf (dump_file, "Adding fake exit edge to bb %i\n",
1080 bb->index);
1081 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
1083 if (need_entry_edge && !have_entry_edge)
1085 if (dump_file)
1086 fprintf (dump_file, "Adding fake entry edge to bb %i\n",
1087 bb->index);
1088 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FAKE);
1089 /* Avoid bbs that have both fake entry edge and also some
1090 exit edge. One of those edges wouldn't be added to the
1091 spanning tree, but we can't instrument any of them. */
1092 if (have_exit_edge || need_exit_edge)
1094 gimple_stmt_iterator gsi;
1095 gimple *first;
1097 gsi = gsi_start_nondebug_after_labels_bb (bb);
1098 gcc_checking_assert (!gsi_end_p (gsi));
1099 first = gsi_stmt (gsi);
1100 /* Don't split the bbs containing __builtin_setjmp_receiver
1101 or ABNORMAL_DISPATCHER calls. These are very
1102 special and don't expect anything to be inserted before
1103 them. */
1104 if (is_gimple_call (first)
1105 && (gimple_call_builtin_p (first, BUILT_IN_SETJMP_RECEIVER)
1106 || (gimple_call_flags (first) & ECF_RETURNS_TWICE)
1107 || (gimple_call_internal_p (first)
1108 && (gimple_call_internal_fn (first)
1109 == IFN_ABNORMAL_DISPATCHER))))
1110 continue;
1112 if (dump_file)
1113 fprintf (dump_file, "Splitting bb %i after labels\n",
1114 bb->index);
1115 split_block_after_labels (bb);
1120 el = create_edge_list ();
1121 num_edges = NUM_EDGES (el);
1122 alloc_aux_for_edges (sizeof (struct edge_profile_info));
1124 /* The basic blocks are expected to be numbered sequentially. */
1125 compact_blocks ();
1127 ignored_edges = 0;
1128 for (i = 0 ; i < num_edges ; i++)
1130 edge e = INDEX_EDGE (el, i);
1131 e->count = 0;
1133 /* Mark edges we've replaced by fake edges above as ignored. */
1134 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1135 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1136 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1138 EDGE_INFO (e)->ignore = 1;
1139 ignored_edges++;
1143 /* Create spanning tree from basic block graph, mark each edge that is
1144 on the spanning tree. We insert as many abnormal and critical edges
1145 as possible to minimize number of edge splits necessary. */
1147 find_spanning_tree (el);
1149 /* Fake edges that are not on the tree will not be instrumented, so
1150 mark them ignored. */
1151 for (num_instrumented = i = 0; i < num_edges; i++)
1153 edge e = INDEX_EDGE (el, i);
1154 struct edge_profile_info *inf = EDGE_INFO (e);
1156 if (inf->ignore || inf->on_tree)
1157 /*NOP*/;
1158 else if (e->flags & EDGE_FAKE)
1160 inf->ignore = 1;
1161 ignored_edges++;
1163 else
1164 num_instrumented++;
1167 total_num_blocks += n_basic_blocks_for_fn (cfun);
1168 if (dump_file)
1169 fprintf (dump_file, "%d basic blocks\n", n_basic_blocks_for_fn (cfun));
1171 total_num_edges += num_edges;
1172 if (dump_file)
1173 fprintf (dump_file, "%d edges\n", num_edges);
1175 total_num_edges_ignored += ignored_edges;
1176 if (dump_file)
1177 fprintf (dump_file, "%d ignored edges\n", ignored_edges);
1179 total_num_edges_instrumented += num_instrumented;
1180 if (dump_file)
1181 fprintf (dump_file, "%d instrumentation edges\n", num_instrumented);
1183 /* Compute two different checksums. Note that we want to compute
1184 the checksum in only once place, since it depends on the shape
1185 of the control flow which can change during
1186 various transformations. */
1187 cfg_checksum = coverage_compute_cfg_checksum (cfun);
1188 lineno_checksum = coverage_compute_lineno_checksum ();
1190 /* Write the data from which gcov can reconstruct the basic block
1191 graph and function line numbers (the gcno file). */
1192 if (coverage_begin_function (lineno_checksum, cfg_checksum))
1194 gcov_position_t offset;
1196 /* Basic block flags */
1197 offset = gcov_write_tag (GCOV_TAG_BLOCKS);
1198 for (i = 0; i != (unsigned) (n_basic_blocks_for_fn (cfun)); i++)
1199 gcov_write_unsigned (0);
1200 gcov_write_length (offset);
1202 /* Arcs */
1203 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
1204 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1206 edge e;
1207 edge_iterator ei;
1209 offset = gcov_write_tag (GCOV_TAG_ARCS);
1210 gcov_write_unsigned (bb->index);
1212 FOR_EACH_EDGE (e, ei, bb->succs)
1214 struct edge_profile_info *i = EDGE_INFO (e);
1215 if (!i->ignore)
1217 unsigned flag_bits = 0;
1219 if (i->on_tree)
1220 flag_bits |= GCOV_ARC_ON_TREE;
1221 if (e->flags & EDGE_FAKE)
1222 flag_bits |= GCOV_ARC_FAKE;
1223 if (e->flags & EDGE_FALLTHRU)
1224 flag_bits |= GCOV_ARC_FALLTHROUGH;
1225 /* On trees we don't have fallthru flags, but we can
1226 recompute them from CFG shape. */
1227 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)
1228 && e->src->next_bb == e->dest)
1229 flag_bits |= GCOV_ARC_FALLTHROUGH;
1231 gcov_write_unsigned (e->dest->index);
1232 gcov_write_unsigned (flag_bits);
1236 gcov_write_length (offset);
1239 /* Line numbers. */
1240 /* Initialize the output. */
1241 output_location (NULL, 0, NULL, NULL);
1243 FOR_EACH_BB_FN (bb, cfun)
1245 gimple_stmt_iterator gsi;
1246 gcov_position_t offset = 0;
1248 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
1250 expanded_location curr_location =
1251 expand_location (DECL_SOURCE_LOCATION (current_function_decl));
1252 output_location (curr_location.file, curr_location.line,
1253 &offset, bb);
1256 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1258 gimple *stmt = gsi_stmt (gsi);
1259 if (!RESERVED_LOCATION_P (gimple_location (stmt)))
1260 output_location (gimple_filename (stmt), gimple_lineno (stmt),
1261 &offset, bb);
1264 /* Notice GOTO expressions eliminated while constructing the CFG. */
1265 if (single_succ_p (bb)
1266 && !RESERVED_LOCATION_P (single_succ_edge (bb)->goto_locus))
1268 expanded_location curr_location
1269 = expand_location (single_succ_edge (bb)->goto_locus);
1270 output_location (curr_location.file, curr_location.line,
1271 &offset, bb);
1274 if (offset)
1276 /* A file of NULL indicates the end of run. */
1277 gcov_write_unsigned (0);
1278 gcov_write_string (NULL);
1279 gcov_write_length (offset);
1284 if (flag_profile_values)
1285 gimple_find_values_to_profile (&values);
1287 if (flag_branch_probabilities)
1289 compute_branch_probabilities (cfg_checksum, lineno_checksum);
1290 if (flag_profile_values)
1291 compute_value_histograms (values, cfg_checksum, lineno_checksum);
1294 remove_fake_edges ();
1296 /* For each edge not on the spanning tree, add counting code. */
1297 if (profile_arc_flag
1298 && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
1300 unsigned n_instrumented;
1302 gimple_init_gcov_profiler ();
1304 n_instrumented = instrument_edges (el);
1306 gcc_assert (n_instrumented == num_instrumented);
1308 if (flag_profile_values)
1309 instrument_values (values);
1311 /* Commit changes done by instrumentation. */
1312 gsi_commit_edge_inserts ();
1315 free_aux_for_edges ();
1317 values.release ();
1318 free_edge_list (el);
1319 coverage_end_function (lineno_checksum, cfg_checksum);
1320 if (flag_branch_probabilities && profile_info)
1322 struct loop *loop;
1323 if (dump_file && (dump_flags & TDF_DETAILS))
1324 report_predictor_hitrates ();
1325 profile_status_for_fn (cfun) = PROFILE_READ;
1327 /* At this moment we have precise loop iteration count estimates.
1328 Record them to loop structure before the profile gets out of date. */
1329 FOR_EACH_LOOP (loop, 0)
1330 if (loop->header->count)
1332 gcov_type nit = expected_loop_iterations_unbounded (loop);
1333 widest_int bound = gcov_type_to_wide_int (nit);
1334 loop->any_estimate = false;
1335 record_niter_bound (loop, bound, true, false);
1337 compute_function_frequency ();
1341 /* Union find algorithm implementation for the basic blocks using
1342 aux fields. */
1344 static basic_block
1345 find_group (basic_block bb)
1347 basic_block group = bb, bb1;
1349 while ((basic_block) group->aux != group)
1350 group = (basic_block) group->aux;
1352 /* Compress path. */
1353 while ((basic_block) bb->aux != group)
1355 bb1 = (basic_block) bb->aux;
1356 bb->aux = (void *) group;
1357 bb = bb1;
1359 return group;
1362 static void
1363 union_groups (basic_block bb1, basic_block bb2)
1365 basic_block bb1g = find_group (bb1);
1366 basic_block bb2g = find_group (bb2);
1368 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1369 this code is unlikely going to be performance problem anyway. */
1370 gcc_assert (bb1g != bb2g);
1372 bb1g->aux = bb2g;
1375 /* This function searches all of the edges in the program flow graph, and puts
1376 as many bad edges as possible onto the spanning tree. Bad edges include
1377 abnormals edges, which can't be instrumented at the moment. Since it is
1378 possible for fake edges to form a cycle, we will have to develop some
1379 better way in the future. Also put critical edges to the tree, since they
1380 are more expensive to instrument. */
1382 static void
1383 find_spanning_tree (struct edge_list *el)
1385 int i;
1386 int num_edges = NUM_EDGES (el);
1387 basic_block bb;
1389 /* We use aux field for standard union-find algorithm. */
1390 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
1391 bb->aux = bb;
1393 /* Add fake edge exit to entry we can't instrument. */
1394 union_groups (EXIT_BLOCK_PTR_FOR_FN (cfun), ENTRY_BLOCK_PTR_FOR_FN (cfun));
1396 /* First add all abnormal edges to the tree unless they form a cycle. Also
1397 add all edges to the exit block to avoid inserting profiling code behind
1398 setting return value from function. */
1399 for (i = 0; i < num_edges; i++)
1401 edge e = INDEX_EDGE (el, i);
1402 if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
1403 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1404 && !EDGE_INFO (e)->ignore
1405 && (find_group (e->src) != find_group (e->dest)))
1407 if (dump_file)
1408 fprintf (dump_file, "Abnormal edge %d to %d put to tree\n",
1409 e->src->index, e->dest->index);
1410 EDGE_INFO (e)->on_tree = 1;
1411 union_groups (e->src, e->dest);
1415 /* Now insert all critical edges to the tree unless they form a cycle. */
1416 for (i = 0; i < num_edges; i++)
1418 edge e = INDEX_EDGE (el, i);
1419 if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore
1420 && find_group (e->src) != find_group (e->dest))
1422 if (dump_file)
1423 fprintf (dump_file, "Critical edge %d to %d put to tree\n",
1424 e->src->index, e->dest->index);
1425 EDGE_INFO (e)->on_tree = 1;
1426 union_groups (e->src, e->dest);
1430 /* And now the rest. */
1431 for (i = 0; i < num_edges; i++)
1433 edge e = INDEX_EDGE (el, i);
1434 if (!EDGE_INFO (e)->ignore
1435 && find_group (e->src) != find_group (e->dest))
1437 if (dump_file)
1438 fprintf (dump_file, "Normal edge %d to %d put to tree\n",
1439 e->src->index, e->dest->index);
1440 EDGE_INFO (e)->on_tree = 1;
1441 union_groups (e->src, e->dest);
1445 clear_aux_for_blocks ();
1448 /* Perform file-level initialization for branch-prob processing. */
1450 void
1451 init_branch_prob (void)
1453 int i;
1455 total_num_blocks = 0;
1456 total_num_edges = 0;
1457 total_num_edges_ignored = 0;
1458 total_num_edges_instrumented = 0;
1459 total_num_blocks_created = 0;
1460 total_num_passes = 0;
1461 total_num_times_called = 0;
1462 total_num_branches = 0;
1463 for (i = 0; i < 20; i++)
1464 total_hist_br_prob[i] = 0;
1467 /* Performs file-level cleanup after branch-prob processing
1468 is completed. */
1470 void
1471 end_branch_prob (void)
1473 if (dump_file)
1475 fprintf (dump_file, "\n");
1476 fprintf (dump_file, "Total number of blocks: %d\n",
1477 total_num_blocks);
1478 fprintf (dump_file, "Total number of edges: %d\n", total_num_edges);
1479 fprintf (dump_file, "Total number of ignored edges: %d\n",
1480 total_num_edges_ignored);
1481 fprintf (dump_file, "Total number of instrumented edges: %d\n",
1482 total_num_edges_instrumented);
1483 fprintf (dump_file, "Total number of blocks created: %d\n",
1484 total_num_blocks_created);
1485 fprintf (dump_file, "Total number of graph solution passes: %d\n",
1486 total_num_passes);
1487 if (total_num_times_called != 0)
1488 fprintf (dump_file, "Average number of graph solution passes: %d\n",
1489 (total_num_passes + (total_num_times_called >> 1))
1490 / total_num_times_called);
1491 fprintf (dump_file, "Total number of branches: %d\n",
1492 total_num_branches);
1493 if (total_num_branches)
1495 int i;
1497 for (i = 0; i < 10; i++)
1498 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1499 (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1500 / total_num_branches, 5*i, 5*i+5);