/cp
[official-gcc.git] / gcc / value-prof.c
blobf9574b679bd6e3dcefaec2d806f70be1530c9386
1 /* Transformations based on profile information for values.
2 Copyright (C) 2003-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "cfghooks.h"
28 #include "ssa.h"
29 #include "cgraph.h"
30 #include "coverage.h"
31 #include "data-streamer.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "tree-nested.h"
35 #include "calls.h"
36 #include "expr.h"
37 #include "value-prof.h"
38 #include "tree-eh.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-cfg.h"
42 #include "gimple-pretty-print.h"
43 #include "dumpfile.h"
44 #include "builtins.h"
45 #include "params.h"
46 #include "tree-chkp.h"
48 /* In this file value profile based optimizations are placed. Currently the
49 following optimizations are implemented (for more detailed descriptions
50 see comments at value_profile_transformations):
52 1) Division/modulo specialization. Provided that we can determine that the
53 operands of the division have some special properties, we may use it to
54 produce more effective code.
56 2) Indirect/virtual call specialization. If we can determine most
57 common function callee in indirect/virtual call. We can use this
58 information to improve code effectiveness (especially info for
59 the inliner).
61 3) Speculative prefetching. If we are able to determine that the difference
62 between addresses accessed by a memory reference is usually constant, we
63 may add the prefetch instructions.
64 FIXME: This transformation was removed together with RTL based value
65 profiling.
68 Value profiling internals
69 ==========================
71 Every value profiling transformation starts with defining what values
72 to profile. There are different histogram types (see HIST_TYPE_* in
73 value-prof.h) and each transformation can request one or more histogram
74 types per GIMPLE statement. The function gimple_find_values_to_profile()
75 collects the values to profile in a vec, and adds the number of counters
76 required for the different histogram types.
78 For a -fprofile-generate run, the statements for which values should be
79 recorded, are instrumented in instrument_values(). The instrumentation
80 is done by helper functions that can be found in tree-profile.c, where
81 new types of histograms can be added if necessary.
83 After a -fprofile-use, the value profiling data is read back in by
84 compute_value_histograms() that translates the collected data to
85 histograms and attaches them to the profiled statements via
86 gimple_add_histogram_value(). Histograms are stored in a hash table
87 that is attached to every intrumented function, see VALUE_HISTOGRAMS
88 in function.h.
90 The value-profile transformations driver is the function
91 gimple_value_profile_transformations(). It traverses all statements in
92 the to-be-transformed function, and looks for statements with one or
93 more histograms attached to it. If a statement has histograms, the
94 transformation functions are called on the statement.
96 Limitations / FIXME / TODO:
97 * Only one histogram of each type can be associated with a statement.
98 * Currently, HIST_TYPE_CONST_DELTA is not implemented.
99 (This type of histogram was originally used to implement a form of
100 stride profiling based speculative prefetching to improve SPEC2000
101 scores for memory-bound benchmarks, mcf and equake. However, this
102 was an RTL value-profiling transformation, and those have all been
103 removed.)
104 * Some value profile transformations are done in builtins.c (?!)
105 * Updating of histograms needs some TLC.
106 * The value profiling code could be used to record analysis results
107 from non-profiling (e.g. VRP).
108 * Adding new profilers should be simplified, starting with a cleanup
109 of what-happens-where andwith making gimple_find_values_to_profile
110 and gimple_value_profile_transformations table-driven, perhaps...
113 static tree gimple_divmod_fixed_value (gassign *, tree, int, gcov_type,
114 gcov_type);
115 static tree gimple_mod_pow2 (gassign *, int, gcov_type, gcov_type);
116 static tree gimple_mod_subtract (gassign *, int, int, int, gcov_type,
117 gcov_type, gcov_type);
118 static bool gimple_divmod_fixed_value_transform (gimple_stmt_iterator *);
119 static bool gimple_mod_pow2_value_transform (gimple_stmt_iterator *);
120 static bool gimple_mod_subtract_transform (gimple_stmt_iterator *);
121 static bool gimple_stringops_transform (gimple_stmt_iterator *);
122 static bool gimple_ic_transform (gimple_stmt_iterator *);
124 /* Allocate histogram value. */
126 histogram_value
127 gimple_alloc_histogram_value (struct function *fun ATTRIBUTE_UNUSED,
128 enum hist_type type, gimple *stmt, tree value)
130 histogram_value hist = (histogram_value) xcalloc (1, sizeof (*hist));
131 hist->hvalue.value = value;
132 hist->hvalue.stmt = stmt;
133 hist->type = type;
134 return hist;
137 /* Hash value for histogram. */
139 static hashval_t
140 histogram_hash (const void *x)
142 return htab_hash_pointer (((const_histogram_value)x)->hvalue.stmt);
145 /* Return nonzero if statement for histogram_value X is Y. */
147 static int
148 histogram_eq (const void *x, const void *y)
150 return ((const_histogram_value) x)->hvalue.stmt == (const gimple *) y;
153 /* Set histogram for STMT. */
155 static void
156 set_histogram_value (struct function *fun, gimple *stmt, histogram_value hist)
158 void **loc;
159 if (!hist && !VALUE_HISTOGRAMS (fun))
160 return;
161 if (!VALUE_HISTOGRAMS (fun))
162 VALUE_HISTOGRAMS (fun) = htab_create (1, histogram_hash,
163 histogram_eq, NULL);
164 loc = htab_find_slot_with_hash (VALUE_HISTOGRAMS (fun), stmt,
165 htab_hash_pointer (stmt),
166 hist ? INSERT : NO_INSERT);
167 if (!hist)
169 if (loc)
170 htab_clear_slot (VALUE_HISTOGRAMS (fun), loc);
171 return;
173 *loc = hist;
176 /* Get histogram list for STMT. */
178 histogram_value
179 gimple_histogram_value (struct function *fun, gimple *stmt)
181 if (!VALUE_HISTOGRAMS (fun))
182 return NULL;
183 return (histogram_value) htab_find_with_hash (VALUE_HISTOGRAMS (fun), stmt,
184 htab_hash_pointer (stmt));
187 /* Add histogram for STMT. */
189 void
190 gimple_add_histogram_value (struct function *fun, gimple *stmt,
191 histogram_value hist)
193 hist->hvalue.next = gimple_histogram_value (fun, stmt);
194 set_histogram_value (fun, stmt, hist);
195 hist->fun = fun;
198 /* Remove histogram HIST from STMT's histogram list. */
200 void
201 gimple_remove_histogram_value (struct function *fun, gimple *stmt,
202 histogram_value hist)
204 histogram_value hist2 = gimple_histogram_value (fun, stmt);
205 if (hist == hist2)
207 set_histogram_value (fun, stmt, hist->hvalue.next);
209 else
211 while (hist2->hvalue.next != hist)
212 hist2 = hist2->hvalue.next;
213 hist2->hvalue.next = hist->hvalue.next;
215 free (hist->hvalue.counters);
216 if (flag_checking)
217 memset (hist, 0xab, sizeof (*hist));
218 free (hist);
221 /* Lookup histogram of type TYPE in the STMT. */
223 histogram_value
224 gimple_histogram_value_of_type (struct function *fun, gimple *stmt,
225 enum hist_type type)
227 histogram_value hist;
228 for (hist = gimple_histogram_value (fun, stmt); hist;
229 hist = hist->hvalue.next)
230 if (hist->type == type)
231 return hist;
232 return NULL;
235 /* Dump information about HIST to DUMP_FILE. */
237 static void
238 dump_histogram_value (FILE *dump_file, histogram_value hist)
240 switch (hist->type)
242 case HIST_TYPE_INTERVAL:
243 fprintf (dump_file, "Interval counter range %d -- %d",
244 hist->hdata.intvl.int_start,
245 (hist->hdata.intvl.int_start
246 + hist->hdata.intvl.steps - 1));
247 if (hist->hvalue.counters)
249 unsigned int i;
250 fprintf (dump_file, " [");
251 for (i = 0; i < hist->hdata.intvl.steps; i++)
252 fprintf (dump_file, " %d:%" PRId64,
253 hist->hdata.intvl.int_start + i,
254 (int64_t) hist->hvalue.counters[i]);
255 fprintf (dump_file, " ] outside range:%" PRId64,
256 (int64_t) hist->hvalue.counters[i]);
258 fprintf (dump_file, ".\n");
259 break;
261 case HIST_TYPE_POW2:
262 fprintf (dump_file, "Pow2 counter ");
263 if (hist->hvalue.counters)
265 fprintf (dump_file, "pow2:%" PRId64
266 " nonpow2:%" PRId64,
267 (int64_t) hist->hvalue.counters[0],
268 (int64_t) hist->hvalue.counters[1]);
270 fprintf (dump_file, ".\n");
271 break;
273 case HIST_TYPE_SINGLE_VALUE:
274 fprintf (dump_file, "Single value ");
275 if (hist->hvalue.counters)
277 fprintf (dump_file, "value:%" PRId64
278 " match:%" PRId64
279 " wrong:%" PRId64,
280 (int64_t) hist->hvalue.counters[0],
281 (int64_t) hist->hvalue.counters[1],
282 (int64_t) hist->hvalue.counters[2]);
284 fprintf (dump_file, ".\n");
285 break;
287 case HIST_TYPE_AVERAGE:
288 fprintf (dump_file, "Average value ");
289 if (hist->hvalue.counters)
291 fprintf (dump_file, "sum:%" PRId64
292 " times:%" PRId64,
293 (int64_t) hist->hvalue.counters[0],
294 (int64_t) hist->hvalue.counters[1]);
296 fprintf (dump_file, ".\n");
297 break;
299 case HIST_TYPE_IOR:
300 fprintf (dump_file, "IOR value ");
301 if (hist->hvalue.counters)
303 fprintf (dump_file, "ior:%" PRId64,
304 (int64_t) hist->hvalue.counters[0]);
306 fprintf (dump_file, ".\n");
307 break;
309 case HIST_TYPE_CONST_DELTA:
310 fprintf (dump_file, "Constant delta ");
311 if (hist->hvalue.counters)
313 fprintf (dump_file, "value:%" PRId64
314 " match:%" PRId64
315 " wrong:%" PRId64,
316 (int64_t) hist->hvalue.counters[0],
317 (int64_t) hist->hvalue.counters[1],
318 (int64_t) hist->hvalue.counters[2]);
320 fprintf (dump_file, ".\n");
321 break;
322 case HIST_TYPE_INDIR_CALL:
323 fprintf (dump_file, "Indirect call ");
324 if (hist->hvalue.counters)
326 fprintf (dump_file, "value:%" PRId64
327 " match:%" PRId64
328 " all:%" PRId64,
329 (int64_t) hist->hvalue.counters[0],
330 (int64_t) hist->hvalue.counters[1],
331 (int64_t) hist->hvalue.counters[2]);
333 fprintf (dump_file, ".\n");
334 break;
335 case HIST_TYPE_TIME_PROFILE:
336 fprintf (dump_file, "Time profile ");
337 if (hist->hvalue.counters)
339 fprintf (dump_file, "time:%" PRId64,
340 (int64_t) hist->hvalue.counters[0]);
342 fprintf (dump_file, ".\n");
343 break;
344 case HIST_TYPE_INDIR_CALL_TOPN:
345 fprintf (dump_file, "Indirect call topn ");
346 if (hist->hvalue.counters)
348 int i;
350 fprintf (dump_file, "accu:%" PRId64, hist->hvalue.counters[0]);
351 for (i = 1; i < (GCOV_ICALL_TOPN_VAL << 2); i += 2)
353 fprintf (dump_file, " target:%" PRId64 " value:%" PRId64,
354 (int64_t) hist->hvalue.counters[i],
355 (int64_t) hist->hvalue.counters[i+1]);
358 fprintf (dump_file, ".\n");
359 break;
360 case HIST_TYPE_MAX:
361 gcc_unreachable ();
365 /* Dump information about HIST to DUMP_FILE. */
367 void
368 stream_out_histogram_value (struct output_block *ob, histogram_value hist)
370 struct bitpack_d bp;
371 unsigned int i;
373 bp = bitpack_create (ob->main_stream);
374 bp_pack_enum (&bp, hist_type, HIST_TYPE_MAX, hist->type);
375 bp_pack_value (&bp, hist->hvalue.next != NULL, 1);
376 streamer_write_bitpack (&bp);
377 switch (hist->type)
379 case HIST_TYPE_INTERVAL:
380 streamer_write_hwi (ob, hist->hdata.intvl.int_start);
381 streamer_write_uhwi (ob, hist->hdata.intvl.steps);
382 break;
383 default:
384 break;
386 for (i = 0; i < hist->n_counters; i++)
387 streamer_write_gcov_count (ob, hist->hvalue.counters[i]);
388 if (hist->hvalue.next)
389 stream_out_histogram_value (ob, hist->hvalue.next);
392 /* Dump information about HIST to DUMP_FILE. */
394 void
395 stream_in_histogram_value (struct lto_input_block *ib, gimple *stmt)
397 enum hist_type type;
398 unsigned int ncounters = 0;
399 struct bitpack_d bp;
400 unsigned int i;
401 histogram_value new_val;
402 bool next;
403 histogram_value *next_p = NULL;
407 bp = streamer_read_bitpack (ib);
408 type = bp_unpack_enum (&bp, hist_type, HIST_TYPE_MAX);
409 next = bp_unpack_value (&bp, 1);
410 new_val = gimple_alloc_histogram_value (cfun, type, stmt, NULL);
411 switch (type)
413 case HIST_TYPE_INTERVAL:
414 new_val->hdata.intvl.int_start = streamer_read_hwi (ib);
415 new_val->hdata.intvl.steps = streamer_read_uhwi (ib);
416 ncounters = new_val->hdata.intvl.steps + 2;
417 break;
419 case HIST_TYPE_POW2:
420 case HIST_TYPE_AVERAGE:
421 ncounters = 2;
422 break;
424 case HIST_TYPE_SINGLE_VALUE:
425 case HIST_TYPE_INDIR_CALL:
426 ncounters = 3;
427 break;
429 case HIST_TYPE_CONST_DELTA:
430 ncounters = 4;
431 break;
433 case HIST_TYPE_IOR:
434 case HIST_TYPE_TIME_PROFILE:
435 ncounters = 1;
436 break;
438 case HIST_TYPE_INDIR_CALL_TOPN:
439 ncounters = (GCOV_ICALL_TOPN_VAL << 2) + 1;
440 break;
442 case HIST_TYPE_MAX:
443 gcc_unreachable ();
445 new_val->hvalue.counters = XNEWVAR (gcov_type, sizeof (*new_val->hvalue.counters) * ncounters);
446 new_val->n_counters = ncounters;
447 for (i = 0; i < ncounters; i++)
448 new_val->hvalue.counters[i] = streamer_read_gcov_count (ib);
449 if (!next_p)
450 gimple_add_histogram_value (cfun, stmt, new_val);
451 else
452 *next_p = new_val;
453 next_p = &new_val->hvalue.next;
455 while (next);
458 /* Dump all histograms attached to STMT to DUMP_FILE. */
460 void
461 dump_histograms_for_stmt (struct function *fun, FILE *dump_file, gimple *stmt)
463 histogram_value hist;
464 for (hist = gimple_histogram_value (fun, stmt); hist; hist = hist->hvalue.next)
465 dump_histogram_value (dump_file, hist);
468 /* Remove all histograms associated with STMT. */
470 void
471 gimple_remove_stmt_histograms (struct function *fun, gimple *stmt)
473 histogram_value val;
474 while ((val = gimple_histogram_value (fun, stmt)) != NULL)
475 gimple_remove_histogram_value (fun, stmt, val);
478 /* Duplicate all histograms associates with OSTMT to STMT. */
480 void
481 gimple_duplicate_stmt_histograms (struct function *fun, gimple *stmt,
482 struct function *ofun, gimple *ostmt)
484 histogram_value val;
485 for (val = gimple_histogram_value (ofun, ostmt); val != NULL; val = val->hvalue.next)
487 histogram_value new_val = gimple_alloc_histogram_value (fun, val->type, NULL, NULL);
488 memcpy (new_val, val, sizeof (*val));
489 new_val->hvalue.stmt = stmt;
490 new_val->hvalue.counters = XNEWVAR (gcov_type, sizeof (*new_val->hvalue.counters) * new_val->n_counters);
491 memcpy (new_val->hvalue.counters, val->hvalue.counters, sizeof (*new_val->hvalue.counters) * new_val->n_counters);
492 gimple_add_histogram_value (fun, stmt, new_val);
496 /* Move all histograms associated with OSTMT to STMT. */
498 void
499 gimple_move_stmt_histograms (struct function *fun, gimple *stmt, gimple *ostmt)
501 histogram_value val = gimple_histogram_value (fun, ostmt);
502 if (val)
504 /* The following three statements can't be reordered,
505 because histogram hashtab relies on stmt field value
506 for finding the exact slot. */
507 set_histogram_value (fun, ostmt, NULL);
508 for (; val != NULL; val = val->hvalue.next)
509 val->hvalue.stmt = stmt;
510 set_histogram_value (fun, stmt, val);
514 static bool error_found = false;
516 /* Helper function for verify_histograms. For each histogram reachable via htab
517 walk verify that it was reached via statement walk. */
519 static int
520 visit_hist (void **slot, void *data)
522 hash_set<histogram_value> *visited = (hash_set<histogram_value> *) data;
523 histogram_value hist = *(histogram_value *) slot;
525 if (!visited->contains (hist)
526 && hist->type != HIST_TYPE_TIME_PROFILE)
528 error ("dead histogram");
529 dump_histogram_value (stderr, hist);
530 debug_gimple_stmt (hist->hvalue.stmt);
531 error_found = true;
533 return 1;
536 /* Verify sanity of the histograms. */
538 DEBUG_FUNCTION void
539 verify_histograms (void)
541 basic_block bb;
542 gimple_stmt_iterator gsi;
543 histogram_value hist;
545 error_found = false;
546 hash_set<histogram_value> visited_hists;
547 FOR_EACH_BB_FN (bb, cfun)
548 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
550 gimple *stmt = gsi_stmt (gsi);
552 for (hist = gimple_histogram_value (cfun, stmt); hist;
553 hist = hist->hvalue.next)
555 if (hist->hvalue.stmt != stmt)
557 error ("Histogram value statement does not correspond to "
558 "the statement it is associated with");
559 debug_gimple_stmt (stmt);
560 dump_histogram_value (stderr, hist);
561 error_found = true;
563 visited_hists.add (hist);
566 if (VALUE_HISTOGRAMS (cfun))
567 htab_traverse (VALUE_HISTOGRAMS (cfun), visit_hist, &visited_hists);
568 if (error_found)
569 internal_error ("verify_histograms failed");
572 /* Helper function for verify_histograms. For each histogram reachable via htab
573 walk verify that it was reached via statement walk. */
575 static int
576 free_hist (void **slot, void *data ATTRIBUTE_UNUSED)
578 histogram_value hist = *(histogram_value *) slot;
579 free (hist->hvalue.counters);
580 if (flag_checking)
581 memset (hist, 0xab, sizeof (*hist));
582 free (hist);
583 return 1;
586 void
587 free_histograms (struct function *fn)
589 if (VALUE_HISTOGRAMS (fn))
591 htab_traverse (VALUE_HISTOGRAMS (fn), free_hist, NULL);
592 htab_delete (VALUE_HISTOGRAMS (fn));
593 VALUE_HISTOGRAMS (fn) = NULL;
597 /* The overall number of invocations of the counter should match
598 execution count of basic block. Report it as error rather than
599 internal error as it might mean that user has misused the profile
600 somehow. */
602 static bool
603 check_counter (gimple *stmt, const char * name,
604 gcov_type *count, gcov_type *all, gcov_type bb_count)
606 if (*all != bb_count || *count > *all)
608 location_t locus;
609 locus = (stmt != NULL)
610 ? gimple_location (stmt)
611 : DECL_SOURCE_LOCATION (current_function_decl);
612 if (flag_profile_correction)
614 if (dump_enabled_p ())
615 dump_printf_loc (MSG_MISSED_OPTIMIZATION, locus,
616 "correcting inconsistent value profile: %s "
617 "profiler overall count (%d) does not match BB "
618 "count (%d)\n", name, (int)*all, (int)bb_count);
619 *all = bb_count;
620 if (*count > *all)
621 *count = *all;
622 return false;
624 else
626 error_at (locus, "corrupted value profile: %s "
627 "profile counter (%d out of %d) inconsistent with "
628 "basic-block count (%d)",
629 name,
630 (int) *count,
631 (int) *all,
632 (int) bb_count);
633 return true;
637 return false;
640 /* GIMPLE based transformations. */
642 bool
643 gimple_value_profile_transformations (void)
645 basic_block bb;
646 gimple_stmt_iterator gsi;
647 bool changed = false;
648 FOR_EACH_BB_FN (bb, cfun)
650 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
652 gimple *stmt = gsi_stmt (gsi);
653 histogram_value th = gimple_histogram_value (cfun, stmt);
654 if (!th)
655 continue;
657 if (dump_file)
659 fprintf (dump_file, "Trying transformations on stmt ");
660 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
661 dump_histograms_for_stmt (cfun, dump_file, stmt);
664 /* Transformations: */
665 /* The order of things in this conditional controls which
666 transformation is used when more than one is applicable. */
667 /* It is expected that any code added by the transformations
668 will be added before the current statement, and that the
669 current statement remain valid (although possibly
670 modified) upon return. */
671 if (gimple_mod_subtract_transform (&gsi)
672 || gimple_divmod_fixed_value_transform (&gsi)
673 || gimple_mod_pow2_value_transform (&gsi)
674 || gimple_stringops_transform (&gsi)
675 || gimple_ic_transform (&gsi))
677 stmt = gsi_stmt (gsi);
678 changed = true;
679 /* Original statement may no longer be in the same block. */
680 if (bb != gimple_bb (stmt))
682 bb = gimple_bb (stmt);
683 gsi = gsi_for_stmt (stmt);
689 if (changed)
691 counts_to_freqs ();
694 return changed;
697 /* Generate code for transformation 1 (with parent gimple assignment
698 STMT and probability of taking the optimal path PROB, which is
699 equivalent to COUNT/ALL within roundoff error). This generates the
700 result into a temp and returns the temp; it does not replace or
701 alter the original STMT. */
703 static tree
704 gimple_divmod_fixed_value (gassign *stmt, tree value, int prob,
705 gcov_type count, gcov_type all)
707 gassign *stmt1, *stmt2;
708 gcond *stmt3;
709 tree tmp0, tmp1, tmp2;
710 gimple *bb1end, *bb2end, *bb3end;
711 basic_block bb, bb2, bb3, bb4;
712 tree optype, op1, op2;
713 edge e12, e13, e23, e24, e34;
714 gimple_stmt_iterator gsi;
716 gcc_assert (is_gimple_assign (stmt)
717 && (gimple_assign_rhs_code (stmt) == TRUNC_DIV_EXPR
718 || gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR));
720 optype = TREE_TYPE (gimple_assign_lhs (stmt));
721 op1 = gimple_assign_rhs1 (stmt);
722 op2 = gimple_assign_rhs2 (stmt);
724 bb = gimple_bb (stmt);
725 gsi = gsi_for_stmt (stmt);
727 tmp0 = make_temp_ssa_name (optype, NULL, "PROF");
728 tmp1 = make_temp_ssa_name (optype, NULL, "PROF");
729 stmt1 = gimple_build_assign (tmp0, fold_convert (optype, value));
730 stmt2 = gimple_build_assign (tmp1, op2);
731 stmt3 = gimple_build_cond (NE_EXPR, tmp1, tmp0, NULL_TREE, NULL_TREE);
732 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
733 gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
734 gsi_insert_before (&gsi, stmt3, GSI_SAME_STMT);
735 bb1end = stmt3;
737 tmp2 = create_tmp_reg (optype, "PROF");
738 stmt1 = gimple_build_assign (tmp2, gimple_assign_rhs_code (stmt), op1, tmp0);
739 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
740 bb2end = stmt1;
742 stmt1 = gimple_build_assign (tmp2, gimple_assign_rhs_code (stmt), op1, op2);
743 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
744 bb3end = stmt1;
746 /* Fix CFG. */
747 /* Edge e23 connects bb2 to bb3, etc. */
748 e12 = split_block (bb, bb1end);
749 bb2 = e12->dest;
750 bb2->count = count;
751 e23 = split_block (bb2, bb2end);
752 bb3 = e23->dest;
753 bb3->count = all - count;
754 e34 = split_block (bb3, bb3end);
755 bb4 = e34->dest;
756 bb4->count = all;
758 e12->flags &= ~EDGE_FALLTHRU;
759 e12->flags |= EDGE_FALSE_VALUE;
760 e12->probability = prob;
761 e12->count = count;
763 e13 = make_edge (bb, bb3, EDGE_TRUE_VALUE);
764 e13->probability = REG_BR_PROB_BASE - prob;
765 e13->count = all - count;
767 remove_edge (e23);
769 e24 = make_edge (bb2, bb4, EDGE_FALLTHRU);
770 e24->probability = REG_BR_PROB_BASE;
771 e24->count = count;
773 e34->probability = REG_BR_PROB_BASE;
774 e34->count = all - count;
776 return tmp2;
779 /* Do transform 1) on INSN if applicable. */
781 static bool
782 gimple_divmod_fixed_value_transform (gimple_stmt_iterator *si)
784 histogram_value histogram;
785 enum tree_code code;
786 gcov_type val, count, all;
787 tree result, value, tree_val;
788 gcov_type prob;
789 gassign *stmt;
791 stmt = dyn_cast <gassign *> (gsi_stmt (*si));
792 if (!stmt)
793 return false;
795 if (!INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (stmt))))
796 return false;
798 code = gimple_assign_rhs_code (stmt);
800 if (code != TRUNC_DIV_EXPR && code != TRUNC_MOD_EXPR)
801 return false;
803 histogram = gimple_histogram_value_of_type (cfun, stmt,
804 HIST_TYPE_SINGLE_VALUE);
805 if (!histogram)
806 return false;
808 value = histogram->hvalue.value;
809 val = histogram->hvalue.counters[0];
810 count = histogram->hvalue.counters[1];
811 all = histogram->hvalue.counters[2];
812 gimple_remove_histogram_value (cfun, stmt, histogram);
814 /* We require that count is at least half of all; this means
815 that for the transformation to fire the value must be constant
816 at least 50% of time (and 75% gives the guarantee of usage). */
817 if (simple_cst_equal (gimple_assign_rhs2 (stmt), value) != 1
818 || 2 * count < all
819 || optimize_bb_for_size_p (gimple_bb (stmt)))
820 return false;
822 if (check_counter (stmt, "value", &count, &all, gimple_bb (stmt)->count))
823 return false;
825 /* Compute probability of taking the optimal path. */
826 if (all > 0)
827 prob = GCOV_COMPUTE_SCALE (count, all);
828 else
829 prob = 0;
831 if (sizeof (gcov_type) == sizeof (HOST_WIDE_INT))
832 tree_val = build_int_cst (get_gcov_type (), val);
833 else
835 HOST_WIDE_INT a[2];
836 a[0] = (unsigned HOST_WIDE_INT) val;
837 a[1] = val >> (HOST_BITS_PER_WIDE_INT - 1) >> 1;
839 tree_val = wide_int_to_tree (get_gcov_type (), wide_int::from_array (a, 2,
840 TYPE_PRECISION (get_gcov_type ()), false));
842 result = gimple_divmod_fixed_value (stmt, tree_val, prob, count, all);
844 if (dump_file)
846 fprintf (dump_file, "Div/mod by constant ");
847 print_generic_expr (dump_file, value, TDF_SLIM);
848 fprintf (dump_file, "=");
849 print_generic_expr (dump_file, tree_val, TDF_SLIM);
850 fprintf (dump_file, " transformation on insn ");
851 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
854 gimple_assign_set_rhs_from_tree (si, result);
855 update_stmt (gsi_stmt (*si));
857 return true;
860 /* Generate code for transformation 2 (with parent gimple assign STMT and
861 probability of taking the optimal path PROB, which is equivalent to COUNT/ALL
862 within roundoff error). This generates the result into a temp and returns
863 the temp; it does not replace or alter the original STMT. */
865 static tree
866 gimple_mod_pow2 (gassign *stmt, int prob, gcov_type count, gcov_type all)
868 gassign *stmt1, *stmt2, *stmt3;
869 gcond *stmt4;
870 tree tmp2, tmp3;
871 gimple *bb1end, *bb2end, *bb3end;
872 basic_block bb, bb2, bb3, bb4;
873 tree optype, op1, op2;
874 edge e12, e13, e23, e24, e34;
875 gimple_stmt_iterator gsi;
876 tree result;
878 gcc_assert (is_gimple_assign (stmt)
879 && gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR);
881 optype = TREE_TYPE (gimple_assign_lhs (stmt));
882 op1 = gimple_assign_rhs1 (stmt);
883 op2 = gimple_assign_rhs2 (stmt);
885 bb = gimple_bb (stmt);
886 gsi = gsi_for_stmt (stmt);
888 result = create_tmp_reg (optype, "PROF");
889 tmp2 = make_temp_ssa_name (optype, NULL, "PROF");
890 tmp3 = make_temp_ssa_name (optype, NULL, "PROF");
891 stmt2 = gimple_build_assign (tmp2, PLUS_EXPR, op2,
892 build_int_cst (optype, -1));
893 stmt3 = gimple_build_assign (tmp3, BIT_AND_EXPR, tmp2, op2);
894 stmt4 = gimple_build_cond (NE_EXPR, tmp3, build_int_cst (optype, 0),
895 NULL_TREE, NULL_TREE);
896 gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
897 gsi_insert_before (&gsi, stmt3, GSI_SAME_STMT);
898 gsi_insert_before (&gsi, stmt4, GSI_SAME_STMT);
899 bb1end = stmt4;
901 /* tmp2 == op2-1 inherited from previous block. */
902 stmt1 = gimple_build_assign (result, BIT_AND_EXPR, op1, tmp2);
903 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
904 bb2end = stmt1;
906 stmt1 = gimple_build_assign (result, gimple_assign_rhs_code (stmt),
907 op1, op2);
908 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
909 bb3end = stmt1;
911 /* Fix CFG. */
912 /* Edge e23 connects bb2 to bb3, etc. */
913 e12 = split_block (bb, bb1end);
914 bb2 = e12->dest;
915 bb2->count = count;
916 e23 = split_block (bb2, bb2end);
917 bb3 = e23->dest;
918 bb3->count = all - count;
919 e34 = split_block (bb3, bb3end);
920 bb4 = e34->dest;
921 bb4->count = all;
923 e12->flags &= ~EDGE_FALLTHRU;
924 e12->flags |= EDGE_FALSE_VALUE;
925 e12->probability = prob;
926 e12->count = count;
928 e13 = make_edge (bb, bb3, EDGE_TRUE_VALUE);
929 e13->probability = REG_BR_PROB_BASE - prob;
930 e13->count = all - count;
932 remove_edge (e23);
934 e24 = make_edge (bb2, bb4, EDGE_FALLTHRU);
935 e24->probability = REG_BR_PROB_BASE;
936 e24->count = count;
938 e34->probability = REG_BR_PROB_BASE;
939 e34->count = all - count;
941 return result;
944 /* Do transform 2) on INSN if applicable. */
946 static bool
947 gimple_mod_pow2_value_transform (gimple_stmt_iterator *si)
949 histogram_value histogram;
950 enum tree_code code;
951 gcov_type count, wrong_values, all;
952 tree lhs_type, result, value;
953 gcov_type prob;
954 gassign *stmt;
956 stmt = dyn_cast <gassign *> (gsi_stmt (*si));
957 if (!stmt)
958 return false;
960 lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
961 if (!INTEGRAL_TYPE_P (lhs_type))
962 return false;
964 code = gimple_assign_rhs_code (stmt);
966 if (code != TRUNC_MOD_EXPR || !TYPE_UNSIGNED (lhs_type))
967 return false;
969 histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_POW2);
970 if (!histogram)
971 return false;
973 value = histogram->hvalue.value;
974 wrong_values = histogram->hvalue.counters[0];
975 count = histogram->hvalue.counters[1];
977 gimple_remove_histogram_value (cfun, stmt, histogram);
979 /* We require that we hit a power of 2 at least half of all evaluations. */
980 if (simple_cst_equal (gimple_assign_rhs2 (stmt), value) != 1
981 || count < wrong_values
982 || optimize_bb_for_size_p (gimple_bb (stmt)))
983 return false;
985 if (dump_file)
987 fprintf (dump_file, "Mod power of 2 transformation on insn ");
988 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
991 /* Compute probability of taking the optimal path. */
992 all = count + wrong_values;
994 if (check_counter (stmt, "pow2", &count, &all, gimple_bb (stmt)->count))
995 return false;
997 if (all > 0)
998 prob = GCOV_COMPUTE_SCALE (count, all);
999 else
1000 prob = 0;
1002 result = gimple_mod_pow2 (stmt, prob, count, all);
1004 gimple_assign_set_rhs_from_tree (si, result);
1005 update_stmt (gsi_stmt (*si));
1007 return true;
1010 /* Generate code for transformations 3 and 4 (with parent gimple assign STMT, and
1011 NCOUNTS the number of cases to support. Currently only NCOUNTS==0 or 1 is
1012 supported and this is built into this interface. The probabilities of taking
1013 the optimal paths are PROB1 and PROB2, which are equivalent to COUNT1/ALL and
1014 COUNT2/ALL respectively within roundoff error). This generates the
1015 result into a temp and returns the temp; it does not replace or alter
1016 the original STMT. */
1017 /* FIXME: Generalize the interface to handle NCOUNTS > 1. */
1019 static tree
1020 gimple_mod_subtract (gassign *stmt, int prob1, int prob2, int ncounts,
1021 gcov_type count1, gcov_type count2, gcov_type all)
1023 gassign *stmt1;
1024 gimple *stmt2;
1025 gcond *stmt3;
1026 tree tmp1;
1027 gimple *bb1end, *bb2end = NULL, *bb3end;
1028 basic_block bb, bb2, bb3, bb4;
1029 tree optype, op1, op2;
1030 edge e12, e23 = 0, e24, e34, e14;
1031 gimple_stmt_iterator gsi;
1032 tree result;
1034 gcc_assert (is_gimple_assign (stmt)
1035 && gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR);
1037 optype = TREE_TYPE (gimple_assign_lhs (stmt));
1038 op1 = gimple_assign_rhs1 (stmt);
1039 op2 = gimple_assign_rhs2 (stmt);
1041 bb = gimple_bb (stmt);
1042 gsi = gsi_for_stmt (stmt);
1044 result = create_tmp_reg (optype, "PROF");
1045 tmp1 = make_temp_ssa_name (optype, NULL, "PROF");
1046 stmt1 = gimple_build_assign (result, op1);
1047 stmt2 = gimple_build_assign (tmp1, op2);
1048 stmt3 = gimple_build_cond (LT_EXPR, result, tmp1, NULL_TREE, NULL_TREE);
1049 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
1050 gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
1051 gsi_insert_before (&gsi, stmt3, GSI_SAME_STMT);
1052 bb1end = stmt3;
1054 if (ncounts) /* Assumed to be 0 or 1 */
1056 stmt1 = gimple_build_assign (result, MINUS_EXPR, result, tmp1);
1057 stmt2 = gimple_build_cond (LT_EXPR, result, tmp1, NULL_TREE, NULL_TREE);
1058 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
1059 gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
1060 bb2end = stmt2;
1063 /* Fallback case. */
1064 stmt1 = gimple_build_assign (result, gimple_assign_rhs_code (stmt),
1065 result, tmp1);
1066 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
1067 bb3end = stmt1;
1069 /* Fix CFG. */
1070 /* Edge e23 connects bb2 to bb3, etc. */
1071 /* However block 3 is optional; if it is not there, references
1072 to 3 really refer to block 2. */
1073 e12 = split_block (bb, bb1end);
1074 bb2 = e12->dest;
1075 bb2->count = all - count1;
1077 if (ncounts) /* Assumed to be 0 or 1. */
1079 e23 = split_block (bb2, bb2end);
1080 bb3 = e23->dest;
1081 bb3->count = all - count1 - count2;
1084 e34 = split_block (ncounts ? bb3 : bb2, bb3end);
1085 bb4 = e34->dest;
1086 bb4->count = all;
1088 e12->flags &= ~EDGE_FALLTHRU;
1089 e12->flags |= EDGE_FALSE_VALUE;
1090 e12->probability = REG_BR_PROB_BASE - prob1;
1091 e12->count = all - count1;
1093 e14 = make_edge (bb, bb4, EDGE_TRUE_VALUE);
1094 e14->probability = prob1;
1095 e14->count = count1;
1097 if (ncounts) /* Assumed to be 0 or 1. */
1099 e23->flags &= ~EDGE_FALLTHRU;
1100 e23->flags |= EDGE_FALSE_VALUE;
1101 e23->count = all - count1 - count2;
1102 e23->probability = REG_BR_PROB_BASE - prob2;
1104 e24 = make_edge (bb2, bb4, EDGE_TRUE_VALUE);
1105 e24->probability = prob2;
1106 e24->count = count2;
1109 e34->probability = REG_BR_PROB_BASE;
1110 e34->count = all - count1 - count2;
1112 return result;
1115 /* Do transforms 3) and 4) on the statement pointed-to by SI if applicable. */
1117 static bool
1118 gimple_mod_subtract_transform (gimple_stmt_iterator *si)
1120 histogram_value histogram;
1121 enum tree_code code;
1122 gcov_type count, wrong_values, all;
1123 tree lhs_type, result;
1124 gcov_type prob1, prob2;
1125 unsigned int i, steps;
1126 gcov_type count1, count2;
1127 gassign *stmt;
1128 stmt = dyn_cast <gassign *> (gsi_stmt (*si));
1129 if (!stmt)
1130 return false;
1132 lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
1133 if (!INTEGRAL_TYPE_P (lhs_type))
1134 return false;
1136 code = gimple_assign_rhs_code (stmt);
1138 if (code != TRUNC_MOD_EXPR || !TYPE_UNSIGNED (lhs_type))
1139 return false;
1141 histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_INTERVAL);
1142 if (!histogram)
1143 return false;
1145 all = 0;
1146 wrong_values = 0;
1147 for (i = 0; i < histogram->hdata.intvl.steps; i++)
1148 all += histogram->hvalue.counters[i];
1150 wrong_values += histogram->hvalue.counters[i];
1151 wrong_values += histogram->hvalue.counters[i+1];
1152 steps = histogram->hdata.intvl.steps;
1153 all += wrong_values;
1154 count1 = histogram->hvalue.counters[0];
1155 count2 = histogram->hvalue.counters[1];
1157 /* Compute probability of taking the optimal path. */
1158 if (check_counter (stmt, "interval", &count1, &all, gimple_bb (stmt)->count))
1160 gimple_remove_histogram_value (cfun, stmt, histogram);
1161 return false;
1164 if (flag_profile_correction && count1 + count2 > all)
1165 all = count1 + count2;
1167 gcc_assert (count1 + count2 <= all);
1169 /* We require that we use just subtractions in at least 50% of all
1170 evaluations. */
1171 count = 0;
1172 for (i = 0; i < histogram->hdata.intvl.steps; i++)
1174 count += histogram->hvalue.counters[i];
1175 if (count * 2 >= all)
1176 break;
1178 if (i == steps
1179 || optimize_bb_for_size_p (gimple_bb (stmt)))
1180 return false;
1182 gimple_remove_histogram_value (cfun, stmt, histogram);
1183 if (dump_file)
1185 fprintf (dump_file, "Mod subtract transformation on insn ");
1186 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1189 /* Compute probability of taking the optimal path(s). */
1190 if (all > 0)
1192 prob1 = GCOV_COMPUTE_SCALE (count1, all);
1193 prob2 = GCOV_COMPUTE_SCALE (count2, all);
1195 else
1197 prob1 = prob2 = 0;
1200 /* In practice, "steps" is always 2. This interface reflects this,
1201 and will need to be changed if "steps" can change. */
1202 result = gimple_mod_subtract (stmt, prob1, prob2, i, count1, count2, all);
1204 gimple_assign_set_rhs_from_tree (si, result);
1205 update_stmt (gsi_stmt (*si));
1207 return true;
1210 typedef int_hash <unsigned int, 0, UINT_MAX> profile_id_hash;
1212 static hash_map<profile_id_hash, cgraph_node *> *cgraph_node_map = 0;
1214 /* Returns true if node graph is initialized. This
1215 is used to test if profile_id has been created
1216 for cgraph_nodes. */
1218 bool
1219 coverage_node_map_initialized_p (void)
1221 return cgraph_node_map != 0;
1224 /* Initialize map from PROFILE_ID to CGRAPH_NODE.
1225 When LOCAL is true, the PROFILE_IDs are computed. when it is false we assume
1226 that the PROFILE_IDs was already assigned. */
1228 void
1229 init_node_map (bool local)
1231 struct cgraph_node *n;
1232 cgraph_node_map = new hash_map<profile_id_hash, cgraph_node *>;
1234 FOR_EACH_DEFINED_FUNCTION (n)
1235 if (n->has_gimple_body_p ())
1237 cgraph_node **val;
1238 if (local)
1240 n->profile_id = coverage_compute_profile_id (n);
1241 while ((val = cgraph_node_map->get (n->profile_id))
1242 || !n->profile_id)
1244 if (dump_file)
1245 fprintf (dump_file, "Local profile-id %i conflict"
1246 " with nodes %s/%i %s/%i\n",
1247 n->profile_id,
1248 n->name (),
1249 n->order,
1250 (*val)->name (),
1251 (*val)->order);
1252 n->profile_id = (n->profile_id + 1) & 0x7fffffff;
1255 else if (!n->profile_id)
1257 if (dump_file)
1258 fprintf (dump_file,
1259 "Node %s/%i has no profile-id"
1260 " (profile feedback missing?)\n",
1261 n->name (),
1262 n->order);
1263 continue;
1265 else if ((val = cgraph_node_map->get (n->profile_id)))
1267 if (dump_file)
1268 fprintf (dump_file,
1269 "Node %s/%i has IP profile-id %i conflict. "
1270 "Giving up.\n",
1271 n->name (),
1272 n->order,
1273 n->profile_id);
1274 *val = NULL;
1275 continue;
1277 cgraph_node_map->put (n->profile_id, n);
1281 /* Delete the CGRAPH_NODE_MAP. */
1283 void
1284 del_node_map (void)
1286 delete cgraph_node_map;
1289 /* Return cgraph node for function with pid */
1291 struct cgraph_node*
1292 find_func_by_profile_id (int profile_id)
1294 cgraph_node **val = cgraph_node_map->get (profile_id);
1295 if (val)
1296 return *val;
1297 else
1298 return NULL;
1301 /* Perform sanity check on the indirect call target. Due to race conditions,
1302 false function target may be attributed to an indirect call site. If the
1303 call expression type mismatches with the target function's type, expand_call
1304 may ICE. Here we only do very minimal sanity check just to make compiler happy.
1305 Returns true if TARGET is considered ok for call CALL_STMT. */
1307 bool
1308 check_ic_target (gcall *call_stmt, struct cgraph_node *target)
1310 location_t locus;
1311 if (gimple_check_call_matching_types (call_stmt, target->decl, true))
1312 return true;
1314 locus = gimple_location (call_stmt);
1315 if (dump_enabled_p ())
1316 dump_printf_loc (MSG_MISSED_OPTIMIZATION, locus,
1317 "Skipping target %s with mismatching types for icall\n",
1318 target->name ());
1319 return false;
1322 /* Do transformation
1324 if (actual_callee_address == address_of_most_common_function/method)
1325 do direct call
1326 else
1327 old call
1330 gcall *
1331 gimple_ic (gcall *icall_stmt, struct cgraph_node *direct_call,
1332 int prob, gcov_type count, gcov_type all)
1334 gcall *dcall_stmt;
1335 gassign *load_stmt;
1336 gcond *cond_stmt;
1337 gcall *iretbnd_stmt = NULL;
1338 tree tmp0, tmp1, tmp;
1339 basic_block cond_bb, dcall_bb, icall_bb, join_bb = NULL;
1340 tree optype = build_pointer_type (void_type_node);
1341 edge e_cd, e_ci, e_di, e_dj = NULL, e_ij;
1342 gimple_stmt_iterator gsi;
1343 int lp_nr, dflags;
1344 edge e_eh, e;
1345 edge_iterator ei;
1346 gimple_stmt_iterator psi;
1348 cond_bb = gimple_bb (icall_stmt);
1349 gsi = gsi_for_stmt (icall_stmt);
1351 if (gimple_call_with_bounds_p (icall_stmt) && gimple_call_lhs (icall_stmt))
1352 iretbnd_stmt = chkp_retbnd_call_by_val (gimple_call_lhs (icall_stmt));
1354 tmp0 = make_temp_ssa_name (optype, NULL, "PROF");
1355 tmp1 = make_temp_ssa_name (optype, NULL, "PROF");
1356 tmp = unshare_expr (gimple_call_fn (icall_stmt));
1357 load_stmt = gimple_build_assign (tmp0, tmp);
1358 gsi_insert_before (&gsi, load_stmt, GSI_SAME_STMT);
1360 tmp = fold_convert (optype, build_addr (direct_call->decl));
1361 load_stmt = gimple_build_assign (tmp1, tmp);
1362 gsi_insert_before (&gsi, load_stmt, GSI_SAME_STMT);
1364 cond_stmt = gimple_build_cond (EQ_EXPR, tmp1, tmp0, NULL_TREE, NULL_TREE);
1365 gsi_insert_before (&gsi, cond_stmt, GSI_SAME_STMT);
1367 if (TREE_CODE (gimple_vdef (icall_stmt)) == SSA_NAME)
1369 unlink_stmt_vdef (icall_stmt);
1370 release_ssa_name (gimple_vdef (icall_stmt));
1372 gimple_set_vdef (icall_stmt, NULL_TREE);
1373 gimple_set_vuse (icall_stmt, NULL_TREE);
1374 update_stmt (icall_stmt);
1375 dcall_stmt = as_a <gcall *> (gimple_copy (icall_stmt));
1376 gimple_call_set_fndecl (dcall_stmt, direct_call->decl);
1377 dflags = flags_from_decl_or_type (direct_call->decl);
1378 if ((dflags & ECF_NORETURN) != 0)
1379 gimple_call_set_lhs (dcall_stmt, NULL_TREE);
1380 gsi_insert_before (&gsi, dcall_stmt, GSI_SAME_STMT);
1382 /* Fix CFG. */
1383 /* Edge e_cd connects cond_bb to dcall_bb, etc; note the first letters. */
1384 e_cd = split_block (cond_bb, cond_stmt);
1385 dcall_bb = e_cd->dest;
1386 dcall_bb->count = count;
1388 e_di = split_block (dcall_bb, dcall_stmt);
1389 icall_bb = e_di->dest;
1390 icall_bb->count = all - count;
1392 /* Do not disturb existing EH edges from the indirect call. */
1393 if (!stmt_ends_bb_p (icall_stmt))
1394 e_ij = split_block (icall_bb, icall_stmt);
1395 else
1397 e_ij = find_fallthru_edge (icall_bb->succs);
1398 /* The indirect call might be noreturn. */
1399 if (e_ij != NULL)
1401 e_ij->probability = REG_BR_PROB_BASE;
1402 e_ij->count = all - count;
1403 e_ij = single_pred_edge (split_edge (e_ij));
1406 if (e_ij != NULL)
1408 join_bb = e_ij->dest;
1409 join_bb->count = all;
1412 e_cd->flags = (e_cd->flags & ~EDGE_FALLTHRU) | EDGE_TRUE_VALUE;
1413 e_cd->probability = prob;
1414 e_cd->count = count;
1416 e_ci = make_edge (cond_bb, icall_bb, EDGE_FALSE_VALUE);
1417 e_ci->probability = REG_BR_PROB_BASE - prob;
1418 e_ci->count = all - count;
1420 remove_edge (e_di);
1422 if (e_ij != NULL)
1424 if ((dflags & ECF_NORETURN) != 0)
1425 e_ij->count = all;
1426 else
1428 e_dj = make_edge (dcall_bb, join_bb, EDGE_FALLTHRU);
1429 e_dj->probability = REG_BR_PROB_BASE;
1430 e_dj->count = count;
1432 e_ij->count = all - count;
1434 e_ij->probability = REG_BR_PROB_BASE;
1437 /* Insert PHI node for the call result if necessary. */
1438 if (gimple_call_lhs (icall_stmt)
1439 && TREE_CODE (gimple_call_lhs (icall_stmt)) == SSA_NAME
1440 && (dflags & ECF_NORETURN) == 0)
1442 tree result = gimple_call_lhs (icall_stmt);
1443 gphi *phi = create_phi_node (result, join_bb);
1444 gimple_call_set_lhs (icall_stmt,
1445 duplicate_ssa_name (result, icall_stmt));
1446 add_phi_arg (phi, gimple_call_lhs (icall_stmt), e_ij, UNKNOWN_LOCATION);
1447 gimple_call_set_lhs (dcall_stmt,
1448 duplicate_ssa_name (result, dcall_stmt));
1449 add_phi_arg (phi, gimple_call_lhs (dcall_stmt), e_dj, UNKNOWN_LOCATION);
1451 /* If indirect call has following BUILT_IN_CHKP_BNDRET
1452 call then we need to make it's copy for the direct
1453 call. */
1454 if (iretbnd_stmt)
1456 if (gimple_call_lhs (iretbnd_stmt))
1458 gimple *copy;
1460 if (TREE_CODE (gimple_vdef (iretbnd_stmt)) == SSA_NAME)
1462 unlink_stmt_vdef (iretbnd_stmt);
1463 release_ssa_name (gimple_vdef (iretbnd_stmt));
1465 gimple_set_vdef (iretbnd_stmt, NULL_TREE);
1466 gimple_set_vuse (iretbnd_stmt, NULL_TREE);
1467 update_stmt (iretbnd_stmt);
1469 result = gimple_call_lhs (iretbnd_stmt);
1470 phi = create_phi_node (result, join_bb);
1472 copy = gimple_copy (iretbnd_stmt);
1473 gimple_call_set_arg (copy, 0,
1474 gimple_call_lhs (dcall_stmt));
1475 gimple_call_set_lhs (copy, duplicate_ssa_name (result, copy));
1476 gsi_insert_on_edge (e_dj, copy);
1477 add_phi_arg (phi, gimple_call_lhs (copy),
1478 e_dj, UNKNOWN_LOCATION);
1480 gimple_call_set_arg (iretbnd_stmt, 0,
1481 gimple_call_lhs (icall_stmt));
1482 gimple_call_set_lhs (iretbnd_stmt,
1483 duplicate_ssa_name (result, iretbnd_stmt));
1484 psi = gsi_for_stmt (iretbnd_stmt);
1485 gsi_remove (&psi, false);
1486 gsi_insert_on_edge (e_ij, iretbnd_stmt);
1487 add_phi_arg (phi, gimple_call_lhs (iretbnd_stmt),
1488 e_ij, UNKNOWN_LOCATION);
1490 gsi_commit_one_edge_insert (e_dj, NULL);
1491 gsi_commit_one_edge_insert (e_ij, NULL);
1493 else
1495 psi = gsi_for_stmt (iretbnd_stmt);
1496 gsi_remove (&psi, true);
1501 /* Build an EH edge for the direct call if necessary. */
1502 lp_nr = lookup_stmt_eh_lp (icall_stmt);
1503 if (lp_nr > 0 && stmt_could_throw_p (dcall_stmt))
1505 add_stmt_to_eh_lp (dcall_stmt, lp_nr);
1508 FOR_EACH_EDGE (e_eh, ei, icall_bb->succs)
1509 if (e_eh->flags & (EDGE_EH | EDGE_ABNORMAL))
1511 e = make_edge (dcall_bb, e_eh->dest, e_eh->flags);
1512 for (gphi_iterator psi = gsi_start_phis (e_eh->dest);
1513 !gsi_end_p (psi); gsi_next (&psi))
1515 gphi *phi = psi.phi ();
1516 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1517 PHI_ARG_DEF_FROM_EDGE (phi, e_eh));
1520 if (!stmt_could_throw_p (dcall_stmt))
1521 gimple_purge_dead_eh_edges (dcall_bb);
1522 return dcall_stmt;
1526 For every checked indirect/virtual call determine if most common pid of
1527 function/class method has probability more than 50%. If yes modify code of
1528 this call to:
1531 static bool
1532 gimple_ic_transform (gimple_stmt_iterator *gsi)
1534 gcall *stmt;
1535 histogram_value histogram;
1536 gcov_type val, count, all, bb_all;
1537 struct cgraph_node *direct_call;
1539 stmt = dyn_cast <gcall *> (gsi_stmt (*gsi));
1540 if (!stmt)
1541 return false;
1543 if (gimple_call_fndecl (stmt) != NULL_TREE)
1544 return false;
1546 if (gimple_call_internal_p (stmt))
1547 return false;
1549 histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_INDIR_CALL);
1550 if (!histogram)
1551 return false;
1553 val = histogram->hvalue.counters [0];
1554 count = histogram->hvalue.counters [1];
1555 all = histogram->hvalue.counters [2];
1557 bb_all = gimple_bb (stmt)->count;
1558 /* The order of CHECK_COUNTER calls is important -
1559 since check_counter can correct the third parameter
1560 and we want to make count <= all <= bb_all. */
1561 if ( check_counter (stmt, "ic", &all, &bb_all, bb_all)
1562 || check_counter (stmt, "ic", &count, &all, all))
1564 gimple_remove_histogram_value (cfun, stmt, histogram);
1565 return false;
1568 if (4 * count <= 3 * all)
1569 return false;
1571 direct_call = find_func_by_profile_id ((int)val);
1573 if (direct_call == NULL)
1575 if (val)
1577 if (dump_file)
1579 fprintf (dump_file, "Indirect call -> direct call from other module");
1580 print_generic_expr (dump_file, gimple_call_fn (stmt), TDF_SLIM);
1581 fprintf (dump_file, "=> %i (will resolve only with LTO)\n", (int)val);
1584 return false;
1587 if (!check_ic_target (stmt, direct_call))
1589 if (dump_file)
1591 fprintf (dump_file, "Indirect call -> direct call ");
1592 print_generic_expr (dump_file, gimple_call_fn (stmt), TDF_SLIM);
1593 fprintf (dump_file, "=> ");
1594 print_generic_expr (dump_file, direct_call->decl, TDF_SLIM);
1595 fprintf (dump_file, " transformation skipped because of type mismatch");
1596 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1598 gimple_remove_histogram_value (cfun, stmt, histogram);
1599 return false;
1602 if (dump_file)
1604 fprintf (dump_file, "Indirect call -> direct call ");
1605 print_generic_expr (dump_file, gimple_call_fn (stmt), TDF_SLIM);
1606 fprintf (dump_file, "=> ");
1607 print_generic_expr (dump_file, direct_call->decl, TDF_SLIM);
1608 fprintf (dump_file, " transformation on insn postponned to ipa-profile");
1609 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1610 fprintf (dump_file, "hist->count %" PRId64
1611 " hist->all %" PRId64"\n", count, all);
1614 return true;
1617 /* Return true if the stringop CALL shall be profiled. SIZE_ARG be
1618 set to the argument index for the size of the string operation. */
1620 static bool
1621 interesting_stringop_to_profile_p (gcall *call, int *size_arg)
1623 enum built_in_function fcode;
1625 fcode = DECL_FUNCTION_CODE (gimple_call_fndecl (call));
1626 if (fcode != BUILT_IN_MEMCPY && fcode != BUILT_IN_MEMPCPY
1627 && fcode != BUILT_IN_MEMSET && fcode != BUILT_IN_BZERO)
1628 return false;
1630 switch (fcode)
1632 case BUILT_IN_MEMCPY:
1633 case BUILT_IN_MEMPCPY:
1634 *size_arg = 2;
1635 return validate_gimple_arglist (call, POINTER_TYPE, POINTER_TYPE,
1636 INTEGER_TYPE, VOID_TYPE);
1637 case BUILT_IN_MEMSET:
1638 *size_arg = 2;
1639 return validate_gimple_arglist (call, POINTER_TYPE, INTEGER_TYPE,
1640 INTEGER_TYPE, VOID_TYPE);
1641 case BUILT_IN_BZERO:
1642 *size_arg = 1;
1643 return validate_gimple_arglist (call, POINTER_TYPE, INTEGER_TYPE,
1644 VOID_TYPE);
1645 default:
1646 gcc_unreachable ();
1650 /* Convert stringop (..., vcall_size)
1651 into
1652 if (vcall_size == icall_size)
1653 stringop (..., icall_size);
1654 else
1655 stringop (..., vcall_size);
1656 assuming we'll propagate a true constant into ICALL_SIZE later. */
1658 static void
1659 gimple_stringop_fixed_value (gcall *vcall_stmt, tree icall_size, int prob,
1660 gcov_type count, gcov_type all)
1662 gassign *tmp_stmt;
1663 gcond *cond_stmt;
1664 gcall *icall_stmt;
1665 tree tmp0, tmp1, vcall_size, optype;
1666 basic_block cond_bb, icall_bb, vcall_bb, join_bb;
1667 edge e_ci, e_cv, e_iv, e_ij, e_vj;
1668 gimple_stmt_iterator gsi;
1669 int size_arg;
1671 if (!interesting_stringop_to_profile_p (vcall_stmt, &size_arg))
1672 gcc_unreachable ();
1674 cond_bb = gimple_bb (vcall_stmt);
1675 gsi = gsi_for_stmt (vcall_stmt);
1677 vcall_size = gimple_call_arg (vcall_stmt, size_arg);
1678 optype = TREE_TYPE (vcall_size);
1680 tmp0 = make_temp_ssa_name (optype, NULL, "PROF");
1681 tmp1 = make_temp_ssa_name (optype, NULL, "PROF");
1682 tmp_stmt = gimple_build_assign (tmp0, fold_convert (optype, icall_size));
1683 gsi_insert_before (&gsi, tmp_stmt, GSI_SAME_STMT);
1685 tmp_stmt = gimple_build_assign (tmp1, vcall_size);
1686 gsi_insert_before (&gsi, tmp_stmt, GSI_SAME_STMT);
1688 cond_stmt = gimple_build_cond (EQ_EXPR, tmp1, tmp0, NULL_TREE, NULL_TREE);
1689 gsi_insert_before (&gsi, cond_stmt, GSI_SAME_STMT);
1691 if (TREE_CODE (gimple_vdef (vcall_stmt)) == SSA_NAME)
1693 unlink_stmt_vdef (vcall_stmt);
1694 release_ssa_name (gimple_vdef (vcall_stmt));
1696 gimple_set_vdef (vcall_stmt, NULL);
1697 gimple_set_vuse (vcall_stmt, NULL);
1698 update_stmt (vcall_stmt);
1699 icall_stmt = as_a <gcall *> (gimple_copy (vcall_stmt));
1700 gimple_call_set_arg (icall_stmt, size_arg,
1701 fold_convert (optype, icall_size));
1702 gsi_insert_before (&gsi, icall_stmt, GSI_SAME_STMT);
1704 /* Fix CFG. */
1705 /* Edge e_ci connects cond_bb to icall_bb, etc. */
1706 e_ci = split_block (cond_bb, cond_stmt);
1707 icall_bb = e_ci->dest;
1708 icall_bb->count = count;
1710 e_iv = split_block (icall_bb, icall_stmt);
1711 vcall_bb = e_iv->dest;
1712 vcall_bb->count = all - count;
1714 e_vj = split_block (vcall_bb, vcall_stmt);
1715 join_bb = e_vj->dest;
1716 join_bb->count = all;
1718 e_ci->flags = (e_ci->flags & ~EDGE_FALLTHRU) | EDGE_TRUE_VALUE;
1719 e_ci->probability = prob;
1720 e_ci->count = count;
1722 e_cv = make_edge (cond_bb, vcall_bb, EDGE_FALSE_VALUE);
1723 e_cv->probability = REG_BR_PROB_BASE - prob;
1724 e_cv->count = all - count;
1726 remove_edge (e_iv);
1728 e_ij = make_edge (icall_bb, join_bb, EDGE_FALLTHRU);
1729 e_ij->probability = REG_BR_PROB_BASE;
1730 e_ij->count = count;
1732 e_vj->probability = REG_BR_PROB_BASE;
1733 e_vj->count = all - count;
1735 /* Insert PHI node for the call result if necessary. */
1736 if (gimple_call_lhs (vcall_stmt)
1737 && TREE_CODE (gimple_call_lhs (vcall_stmt)) == SSA_NAME)
1739 tree result = gimple_call_lhs (vcall_stmt);
1740 gphi *phi = create_phi_node (result, join_bb);
1741 gimple_call_set_lhs (vcall_stmt,
1742 duplicate_ssa_name (result, vcall_stmt));
1743 add_phi_arg (phi, gimple_call_lhs (vcall_stmt), e_vj, UNKNOWN_LOCATION);
1744 gimple_call_set_lhs (icall_stmt,
1745 duplicate_ssa_name (result, icall_stmt));
1746 add_phi_arg (phi, gimple_call_lhs (icall_stmt), e_ij, UNKNOWN_LOCATION);
1749 /* Because these are all string op builtins, they're all nothrow. */
1750 gcc_assert (!stmt_could_throw_p (vcall_stmt));
1751 gcc_assert (!stmt_could_throw_p (icall_stmt));
1754 /* Find values inside STMT for that we want to measure histograms for
1755 division/modulo optimization. */
1757 static bool
1758 gimple_stringops_transform (gimple_stmt_iterator *gsi)
1760 gcall *stmt;
1761 tree blck_size;
1762 enum built_in_function fcode;
1763 histogram_value histogram;
1764 gcov_type count, all, val;
1765 tree dest, src;
1766 unsigned int dest_align, src_align;
1767 gcov_type prob;
1768 tree tree_val;
1769 int size_arg;
1771 stmt = dyn_cast <gcall *> (gsi_stmt (*gsi));
1772 if (!stmt)
1773 return false;
1775 if (!gimple_call_builtin_p (gsi_stmt (*gsi), BUILT_IN_NORMAL))
1776 return false;
1778 if (!interesting_stringop_to_profile_p (stmt, &size_arg))
1779 return false;
1781 blck_size = gimple_call_arg (stmt, size_arg);
1782 if (TREE_CODE (blck_size) == INTEGER_CST)
1783 return false;
1785 histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_SINGLE_VALUE);
1786 if (!histogram)
1787 return false;
1789 val = histogram->hvalue.counters[0];
1790 count = histogram->hvalue.counters[1];
1791 all = histogram->hvalue.counters[2];
1792 gimple_remove_histogram_value (cfun, stmt, histogram);
1794 /* We require that count is at least half of all; this means
1795 that for the transformation to fire the value must be constant
1796 at least 80% of time. */
1797 if ((6 * count / 5) < all || optimize_bb_for_size_p (gimple_bb (stmt)))
1798 return false;
1799 if (check_counter (stmt, "value", &count, &all, gimple_bb (stmt)->count))
1800 return false;
1801 if (all > 0)
1802 prob = GCOV_COMPUTE_SCALE (count, all);
1803 else
1804 prob = 0;
1806 dest = gimple_call_arg (stmt, 0);
1807 dest_align = get_pointer_alignment (dest);
1808 fcode = DECL_FUNCTION_CODE (gimple_call_fndecl (stmt));
1809 switch (fcode)
1811 case BUILT_IN_MEMCPY:
1812 case BUILT_IN_MEMPCPY:
1813 src = gimple_call_arg (stmt, 1);
1814 src_align = get_pointer_alignment (src);
1815 if (!can_move_by_pieces (val, MIN (dest_align, src_align)))
1816 return false;
1817 break;
1818 case BUILT_IN_MEMSET:
1819 if (!can_store_by_pieces (val, builtin_memset_read_str,
1820 gimple_call_arg (stmt, 1),
1821 dest_align, true))
1822 return false;
1823 break;
1824 case BUILT_IN_BZERO:
1825 if (!can_store_by_pieces (val, builtin_memset_read_str,
1826 integer_zero_node,
1827 dest_align, true))
1828 return false;
1829 break;
1830 default:
1831 gcc_unreachable ();
1834 if (sizeof (gcov_type) == sizeof (HOST_WIDE_INT))
1835 tree_val = build_int_cst (get_gcov_type (), val);
1836 else
1838 HOST_WIDE_INT a[2];
1839 a[0] = (unsigned HOST_WIDE_INT) val;
1840 a[1] = val >> (HOST_BITS_PER_WIDE_INT - 1) >> 1;
1842 tree_val = wide_int_to_tree (get_gcov_type (), wide_int::from_array (a, 2,
1843 TYPE_PRECISION (get_gcov_type ()), false));
1846 if (dump_file)
1848 fprintf (dump_file, "Single value %i stringop transformation on ",
1849 (int)val);
1850 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1853 gimple_stringop_fixed_value (stmt, tree_val, prob, count, all);
1855 return true;
1858 void
1859 stringop_block_profile (gimple *stmt, unsigned int *expected_align,
1860 HOST_WIDE_INT *expected_size)
1862 histogram_value histogram;
1863 histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_AVERAGE);
1865 if (!histogram)
1866 *expected_size = -1;
1867 else if (!histogram->hvalue.counters[1])
1869 *expected_size = -1;
1870 gimple_remove_histogram_value (cfun, stmt, histogram);
1872 else
1874 gcov_type size;
1875 size = ((histogram->hvalue.counters[0]
1876 + histogram->hvalue.counters[1] / 2)
1877 / histogram->hvalue.counters[1]);
1878 /* Even if we can hold bigger value in SIZE, INT_MAX
1879 is safe "infinity" for code generation strategies. */
1880 if (size > INT_MAX)
1881 size = INT_MAX;
1882 *expected_size = size;
1883 gimple_remove_histogram_value (cfun, stmt, histogram);
1886 histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_IOR);
1888 if (!histogram)
1889 *expected_align = 0;
1890 else if (!histogram->hvalue.counters[0])
1892 gimple_remove_histogram_value (cfun, stmt, histogram);
1893 *expected_align = 0;
1895 else
1897 gcov_type count;
1898 int alignment;
1900 count = histogram->hvalue.counters[0];
1901 alignment = 1;
1902 while (!(count & alignment)
1903 && (alignment * 2 * BITS_PER_UNIT))
1904 alignment <<= 1;
1905 *expected_align = alignment * BITS_PER_UNIT;
1906 gimple_remove_histogram_value (cfun, stmt, histogram);
1911 /* Find values inside STMT for that we want to measure histograms for
1912 division/modulo optimization. */
1914 static void
1915 gimple_divmod_values_to_profile (gimple *stmt, histogram_values *values)
1917 tree lhs, divisor, op0, type;
1918 histogram_value hist;
1920 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1921 return;
1923 lhs = gimple_assign_lhs (stmt);
1924 type = TREE_TYPE (lhs);
1925 if (!INTEGRAL_TYPE_P (type))
1926 return;
1928 switch (gimple_assign_rhs_code (stmt))
1930 case TRUNC_DIV_EXPR:
1931 case TRUNC_MOD_EXPR:
1932 divisor = gimple_assign_rhs2 (stmt);
1933 op0 = gimple_assign_rhs1 (stmt);
1935 values->reserve (3);
1937 if (TREE_CODE (divisor) == SSA_NAME)
1938 /* Check for the case where the divisor is the same value most
1939 of the time. */
1940 values->quick_push (gimple_alloc_histogram_value (cfun,
1941 HIST_TYPE_SINGLE_VALUE,
1942 stmt, divisor));
1944 /* For mod, check whether it is not often a noop (or replaceable by
1945 a few subtractions). */
1946 if (gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR
1947 && TYPE_UNSIGNED (type))
1949 tree val;
1950 /* Check for a special case where the divisor is power of 2. */
1951 values->quick_push (gimple_alloc_histogram_value (cfun,
1952 HIST_TYPE_POW2,
1953 stmt, divisor));
1955 val = build2 (TRUNC_DIV_EXPR, type, op0, divisor);
1956 hist = gimple_alloc_histogram_value (cfun, HIST_TYPE_INTERVAL,
1957 stmt, val);
1958 hist->hdata.intvl.int_start = 0;
1959 hist->hdata.intvl.steps = 2;
1960 values->quick_push (hist);
1962 return;
1964 default:
1965 return;
1969 /* Find calls inside STMT for that we want to measure histograms for
1970 indirect/virtual call optimization. */
1972 static void
1973 gimple_indirect_call_to_profile (gimple *stmt, histogram_values *values)
1975 tree callee;
1977 if (gimple_code (stmt) != GIMPLE_CALL
1978 || gimple_call_internal_p (stmt)
1979 || gimple_call_fndecl (stmt) != NULL_TREE)
1980 return;
1982 callee = gimple_call_fn (stmt);
1984 values->reserve (3);
1986 values->quick_push (gimple_alloc_histogram_value (
1987 cfun,
1988 PARAM_VALUE (PARAM_INDIR_CALL_TOPN_PROFILE) ?
1989 HIST_TYPE_INDIR_CALL_TOPN :
1990 HIST_TYPE_INDIR_CALL,
1991 stmt, callee));
1993 return;
1996 /* Find values inside STMT for that we want to measure histograms for
1997 string operations. */
1999 static void
2000 gimple_stringops_values_to_profile (gimple *gs, histogram_values *values)
2002 gcall *stmt;
2003 tree blck_size;
2004 tree dest;
2005 int size_arg;
2007 stmt = dyn_cast <gcall *> (gs);
2008 if (!stmt)
2009 return;
2011 if (!gimple_call_builtin_p (gs, BUILT_IN_NORMAL))
2012 return;
2014 if (!interesting_stringop_to_profile_p (stmt, &size_arg))
2015 return;
2017 dest = gimple_call_arg (stmt, 0);
2018 blck_size = gimple_call_arg (stmt, size_arg);
2020 if (TREE_CODE (blck_size) != INTEGER_CST)
2022 values->safe_push (gimple_alloc_histogram_value (cfun,
2023 HIST_TYPE_SINGLE_VALUE,
2024 stmt, blck_size));
2025 values->safe_push (gimple_alloc_histogram_value (cfun, HIST_TYPE_AVERAGE,
2026 stmt, blck_size));
2029 if (TREE_CODE (blck_size) != INTEGER_CST)
2030 values->safe_push (gimple_alloc_histogram_value (cfun, HIST_TYPE_IOR,
2031 stmt, dest));
2034 /* Find values inside STMT for that we want to measure histograms and adds
2035 them to list VALUES. */
2037 static void
2038 gimple_values_to_profile (gimple *stmt, histogram_values *values)
2040 gimple_divmod_values_to_profile (stmt, values);
2041 gimple_stringops_values_to_profile (stmt, values);
2042 gimple_indirect_call_to_profile (stmt, values);
2045 void
2046 gimple_find_values_to_profile (histogram_values *values)
2048 basic_block bb;
2049 gimple_stmt_iterator gsi;
2050 unsigned i;
2051 histogram_value hist = NULL;
2052 values->create (0);
2054 FOR_EACH_BB_FN (bb, cfun)
2055 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2056 gimple_values_to_profile (gsi_stmt (gsi), values);
2058 values->safe_push (gimple_alloc_histogram_value (cfun, HIST_TYPE_TIME_PROFILE, 0, 0));
2060 FOR_EACH_VEC_ELT (*values, i, hist)
2062 switch (hist->type)
2064 case HIST_TYPE_INTERVAL:
2065 hist->n_counters = hist->hdata.intvl.steps + 2;
2066 break;
2068 case HIST_TYPE_POW2:
2069 hist->n_counters = 2;
2070 break;
2072 case HIST_TYPE_SINGLE_VALUE:
2073 hist->n_counters = 3;
2074 break;
2076 case HIST_TYPE_CONST_DELTA:
2077 hist->n_counters = 4;
2078 break;
2080 case HIST_TYPE_INDIR_CALL:
2081 hist->n_counters = 3;
2082 break;
2084 case HIST_TYPE_TIME_PROFILE:
2085 hist->n_counters = 1;
2086 break;
2088 case HIST_TYPE_AVERAGE:
2089 hist->n_counters = 2;
2090 break;
2092 case HIST_TYPE_IOR:
2093 hist->n_counters = 1;
2094 break;
2096 case HIST_TYPE_INDIR_CALL_TOPN:
2097 hist->n_counters = GCOV_ICALL_TOPN_NCOUNTS;
2098 break;
2100 default:
2101 gcc_unreachable ();
2103 if (dump_file)
2105 fprintf (dump_file, "Stmt ");
2106 print_gimple_stmt (dump_file, hist->hvalue.stmt, 0, TDF_SLIM);
2107 dump_histogram_value (dump_file, hist);