1 /* Transformations based on profile information for values.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008 Free Software
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "value-prof.h"
32 #include "insn-config.h"
37 #include "tree-flow.h"
38 #include "tree-flow-inline.h"
39 #include "diagnostic.h"
45 #include "tree-pass.h"
47 #include "pointer-set.h"
49 static struct value_prof_hooks
*value_prof_hooks
;
51 /* In this file value profile based optimizations are placed. Currently the
52 following optimizations are implemented (for more detailed descriptions
53 see comments at value_profile_transformations):
55 1) Division/modulo specialization. Provided that we can determine that the
56 operands of the division have some special properties, we may use it to
57 produce more effective code.
58 2) Speculative prefetching. If we are able to determine that the difference
59 between addresses accessed by a memory reference is usually constant, we
60 may add the prefetch instructions.
61 FIXME: This transformation was removed together with RTL based value
64 3) Indirect/virtual call specialization. If we can determine most
65 common function callee in indirect/virtual call. We can use this
66 information to improve code effectiveness (especially info for
69 Every such optimization should add its requirements for profiled values to
70 insn_values_to_profile function. This function is called from branch_prob
71 in profile.c and the requested values are instrumented by it in the first
72 compilation with -fprofile-arcs. The optimization may then read the
73 gathered data in the second compilation with -fbranch-probabilities.
75 The measured data is pointed to from the histograms
76 field of the statement annotation of the instrumented insns. It is
77 kept as a linked list of struct histogram_value_t's, which contain the
78 same information as above. */
81 static tree
gimple_divmod_fixed_value (gimple
, tree
, int, gcov_type
, gcov_type
);
82 static tree
gimple_mod_pow2 (gimple
, int, gcov_type
, gcov_type
);
83 static tree
gimple_mod_subtract (gimple
, int, int, int, gcov_type
, gcov_type
,
85 static bool gimple_divmod_fixed_value_transform (gimple_stmt_iterator
*);
86 static bool gimple_mod_pow2_value_transform (gimple_stmt_iterator
*);
87 static bool gimple_mod_subtract_transform (gimple_stmt_iterator
*);
88 static bool gimple_stringops_transform (gimple_stmt_iterator
*);
89 static bool gimple_ic_transform (gimple
);
91 /* Allocate histogram value. */
93 static histogram_value
94 gimple_alloc_histogram_value (struct function
*fun ATTRIBUTE_UNUSED
,
95 enum hist_type type
, gimple stmt
, tree value
)
97 histogram_value hist
= (histogram_value
) xcalloc (1, sizeof (*hist
));
98 hist
->hvalue
.value
= value
;
99 hist
->hvalue
.stmt
= stmt
;
104 /* Hash value for histogram. */
107 histogram_hash (const void *x
)
109 return htab_hash_pointer (((const_histogram_value
)x
)->hvalue
.stmt
);
112 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
115 histogram_eq (const void *x
, const void *y
)
117 return ((const_histogram_value
) x
)->hvalue
.stmt
== (const_gimple
) y
;
120 /* Set histogram for STMT. */
123 set_histogram_value (struct function
*fun
, gimple stmt
, histogram_value hist
)
126 if (!hist
&& !VALUE_HISTOGRAMS (fun
))
128 if (!VALUE_HISTOGRAMS (fun
))
129 VALUE_HISTOGRAMS (fun
) = htab_create (1, histogram_hash
,
131 loc
= htab_find_slot_with_hash (VALUE_HISTOGRAMS (fun
), stmt
,
132 htab_hash_pointer (stmt
),
133 hist
? INSERT
: NO_INSERT
);
137 htab_clear_slot (VALUE_HISTOGRAMS (fun
), loc
);
143 /* Get histogram list for STMT. */
146 gimple_histogram_value (struct function
*fun
, gimple stmt
)
148 if (!VALUE_HISTOGRAMS (fun
))
150 return (histogram_value
) htab_find_with_hash (VALUE_HISTOGRAMS (fun
), stmt
,
151 htab_hash_pointer (stmt
));
154 /* Add histogram for STMT. */
157 gimple_add_histogram_value (struct function
*fun
, gimple stmt
,
158 histogram_value hist
)
160 hist
->hvalue
.next
= gimple_histogram_value (fun
, stmt
);
161 set_histogram_value (fun
, stmt
, hist
);
165 /* Remove histogram HIST from STMT's histogram list. */
168 gimple_remove_histogram_value (struct function
*fun
, gimple stmt
,
169 histogram_value hist
)
171 histogram_value hist2
= gimple_histogram_value (fun
, stmt
);
174 set_histogram_value (fun
, stmt
, hist
->hvalue
.next
);
178 while (hist2
->hvalue
.next
!= hist
)
179 hist2
= hist2
->hvalue
.next
;
180 hist2
->hvalue
.next
= hist
->hvalue
.next
;
182 free (hist
->hvalue
.counters
);
183 #ifdef ENABLE_CHECKING
184 memset (hist
, 0xab, sizeof (*hist
));
190 /* Lookup histogram of type TYPE in the STMT. */
193 gimple_histogram_value_of_type (struct function
*fun
, gimple stmt
,
196 histogram_value hist
;
197 for (hist
= gimple_histogram_value (fun
, stmt
); hist
;
198 hist
= hist
->hvalue
.next
)
199 if (hist
->type
== type
)
204 /* Dump information about HIST to DUMP_FILE. */
207 dump_histogram_value (FILE *dump_file
, histogram_value hist
)
211 case HIST_TYPE_INTERVAL
:
212 fprintf (dump_file
, "Interval counter range %d -- %d",
213 hist
->hdata
.intvl
.int_start
,
214 (hist
->hdata
.intvl
.int_start
215 + hist
->hdata
.intvl
.steps
- 1));
216 if (hist
->hvalue
.counters
)
219 fprintf(dump_file
, " [");
220 for (i
= 0; i
< hist
->hdata
.intvl
.steps
; i
++)
221 fprintf (dump_file
, " %d:"HOST_WIDEST_INT_PRINT_DEC
,
222 hist
->hdata
.intvl
.int_start
+ i
,
223 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[i
]);
224 fprintf (dump_file
, " ] outside range:"HOST_WIDEST_INT_PRINT_DEC
,
225 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[i
]);
227 fprintf (dump_file
, ".\n");
231 fprintf (dump_file
, "Pow2 counter ");
232 if (hist
->hvalue
.counters
)
234 fprintf (dump_file
, "pow2:"HOST_WIDEST_INT_PRINT_DEC
235 " nonpow2:"HOST_WIDEST_INT_PRINT_DEC
,
236 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0],
237 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[1]);
239 fprintf (dump_file
, ".\n");
242 case HIST_TYPE_SINGLE_VALUE
:
243 fprintf (dump_file
, "Single value ");
244 if (hist
->hvalue
.counters
)
246 fprintf (dump_file
, "value:"HOST_WIDEST_INT_PRINT_DEC
247 " match:"HOST_WIDEST_INT_PRINT_DEC
248 " wrong:"HOST_WIDEST_INT_PRINT_DEC
,
249 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0],
250 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[1],
251 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[2]);
253 fprintf (dump_file
, ".\n");
256 case HIST_TYPE_AVERAGE
:
257 fprintf (dump_file
, "Average value ");
258 if (hist
->hvalue
.counters
)
260 fprintf (dump_file
, "sum:"HOST_WIDEST_INT_PRINT_DEC
261 " times:"HOST_WIDEST_INT_PRINT_DEC
,
262 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0],
263 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[1]);
265 fprintf (dump_file
, ".\n");
269 fprintf (dump_file
, "IOR value ");
270 if (hist
->hvalue
.counters
)
272 fprintf (dump_file
, "ior:"HOST_WIDEST_INT_PRINT_DEC
,
273 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0]);
275 fprintf (dump_file
, ".\n");
278 case HIST_TYPE_CONST_DELTA
:
279 fprintf (dump_file
, "Constant delta ");
280 if (hist
->hvalue
.counters
)
282 fprintf (dump_file
, "value:"HOST_WIDEST_INT_PRINT_DEC
283 " match:"HOST_WIDEST_INT_PRINT_DEC
284 " wrong:"HOST_WIDEST_INT_PRINT_DEC
,
285 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0],
286 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[1],
287 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[2]);
289 fprintf (dump_file
, ".\n");
291 case HIST_TYPE_INDIR_CALL
:
292 fprintf (dump_file
, "Indirect call ");
293 if (hist
->hvalue
.counters
)
295 fprintf (dump_file
, "value:"HOST_WIDEST_INT_PRINT_DEC
296 " match:"HOST_WIDEST_INT_PRINT_DEC
297 " all:"HOST_WIDEST_INT_PRINT_DEC
,
298 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0],
299 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[1],
300 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[2]);
302 fprintf (dump_file
, ".\n");
307 /* Dump all histograms attached to STMT to DUMP_FILE. */
310 dump_histograms_for_stmt (struct function
*fun
, FILE *dump_file
, gimple stmt
)
312 histogram_value hist
;
313 for (hist
= gimple_histogram_value (fun
, stmt
); hist
; hist
= hist
->hvalue
.next
)
314 dump_histogram_value (dump_file
, hist
);
317 /* Remove all histograms associated with STMT. */
320 gimple_remove_stmt_histograms (struct function
*fun
, gimple stmt
)
323 while ((val
= gimple_histogram_value (fun
, stmt
)) != NULL
)
324 gimple_remove_histogram_value (fun
, stmt
, val
);
327 /* Duplicate all histograms associates with OSTMT to STMT. */
330 gimple_duplicate_stmt_histograms (struct function
*fun
, gimple stmt
,
331 struct function
*ofun
, gimple ostmt
)
334 for (val
= gimple_histogram_value (ofun
, ostmt
); val
!= NULL
; val
= val
->hvalue
.next
)
336 histogram_value new_val
= gimple_alloc_histogram_value (fun
, val
->type
, NULL
, NULL
);
337 memcpy (new_val
, val
, sizeof (*val
));
338 new_val
->hvalue
.stmt
= stmt
;
339 new_val
->hvalue
.counters
= XNEWVAR (gcov_type
, sizeof (*new_val
->hvalue
.counters
) * new_val
->n_counters
);
340 memcpy (new_val
->hvalue
.counters
, val
->hvalue
.counters
, sizeof (*new_val
->hvalue
.counters
) * new_val
->n_counters
);
341 gimple_add_histogram_value (fun
, stmt
, new_val
);
346 /* Move all histograms associated with OSTMT to STMT. */
349 gimple_move_stmt_histograms (struct function
*fun
, gimple stmt
, gimple ostmt
)
351 histogram_value val
= gimple_histogram_value (fun
, ostmt
);
354 /* The following three statements can't be reordered,
355 because histogram hashtab relies on stmt field value
356 for finding the exact slot. */
357 set_histogram_value (fun
, ostmt
, NULL
);
358 for (; val
!= NULL
; val
= val
->hvalue
.next
)
359 val
->hvalue
.stmt
= stmt
;
360 set_histogram_value (fun
, stmt
, val
);
364 static bool error_found
= false;
366 /* Helper function for verify_histograms. For each histogram reachable via htab
367 walk verify that it was reached via statement walk. */
370 visit_hist (void **slot
, void *data
)
372 struct pointer_set_t
*visited
= (struct pointer_set_t
*) data
;
373 histogram_value hist
= *(histogram_value
*) slot
;
374 if (!pointer_set_contains (visited
, hist
))
376 error ("Dead histogram");
377 dump_histogram_value (stderr
, hist
);
378 debug_gimple_stmt (hist
->hvalue
.stmt
);
385 /* Verify sanity of the histograms. */
388 verify_histograms (void)
391 gimple_stmt_iterator gsi
;
392 histogram_value hist
;
393 struct pointer_set_t
*visited_hists
;
396 visited_hists
= pointer_set_create ();
398 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
400 gimple stmt
= gsi_stmt (gsi
);
402 for (hist
= gimple_histogram_value (cfun
, stmt
); hist
;
403 hist
= hist
->hvalue
.next
)
405 if (hist
->hvalue
.stmt
!= stmt
)
407 error ("Histogram value statement does not correspond to "
408 "the statement it is associated with");
409 debug_gimple_stmt (stmt
);
410 dump_histogram_value (stderr
, hist
);
413 pointer_set_insert (visited_hists
, hist
);
416 if (VALUE_HISTOGRAMS (cfun
))
417 htab_traverse (VALUE_HISTOGRAMS (cfun
), visit_hist
, visited_hists
);
418 pointer_set_destroy (visited_hists
);
420 internal_error ("verify_histograms failed");
423 /* Helper function for verify_histograms. For each histogram reachable via htab
424 walk verify that it was reached via statement walk. */
427 free_hist (void **slot
, void *data ATTRIBUTE_UNUSED
)
429 histogram_value hist
= *(histogram_value
*) slot
;
430 free (hist
->hvalue
.counters
);
431 #ifdef ENABLE_CHECKING
432 memset (hist
, 0xab, sizeof (*hist
));
439 free_histograms (void)
441 if (VALUE_HISTOGRAMS (cfun
))
443 htab_traverse (VALUE_HISTOGRAMS (cfun
), free_hist
, NULL
);
444 htab_delete (VALUE_HISTOGRAMS (cfun
));
445 VALUE_HISTOGRAMS (cfun
) = NULL
;
450 /* The overall number of invocations of the counter should match
451 execution count of basic block. Report it as error rather than
452 internal error as it might mean that user has misused the profile
456 check_counter (gimple stmt
, const char * name
,
457 gcov_type
*count
, gcov_type
*all
, gcov_type bb_count
)
459 if (*all
!= bb_count
|| *count
> *all
)
462 locus
= (stmt
!= NULL
)
463 ? gimple_location (stmt
)
464 : DECL_SOURCE_LOCATION (current_function_decl
);
465 if (flag_profile_correction
)
467 inform (locus
, "Correcting inconsistent value profile: "
468 "%s profiler overall count (%d) does not match BB count "
469 "(%d)", name
, (int)*all
, (int)bb_count
);
477 error ("%HCorrupted value profile: %s profiler overall count (%d) "
478 "does not match BB count (%d)", &locus
, name
, (int)*all
,
488 /* GIMPLE based transformations. */
491 gimple_value_profile_transformations (void)
494 gimple_stmt_iterator gsi
;
495 bool changed
= false;
499 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
501 gimple stmt
= gsi_stmt (gsi
);
502 histogram_value th
= gimple_histogram_value (cfun
, stmt
);
508 fprintf (dump_file
, "Trying transformations on stmt ");
509 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
510 dump_histograms_for_stmt (cfun
, dump_file
, stmt
);
513 /* Transformations: */
514 /* The order of things in this conditional controls which
515 transformation is used when more than one is applicable. */
516 /* It is expected that any code added by the transformations
517 will be added before the current statement, and that the
518 current statement remain valid (although possibly
519 modified) upon return. */
520 if (flag_value_profile_transformations
521 && (gimple_mod_subtract_transform (&gsi
)
522 || gimple_divmod_fixed_value_transform (&gsi
)
523 || gimple_mod_pow2_value_transform (&gsi
)
524 || gimple_stringops_transform (&gsi
)
525 || gimple_ic_transform (stmt
)))
527 stmt
= gsi_stmt (gsi
);
529 /* Original statement may no longer be in the same block. */
530 if (bb
!= gimple_bb (stmt
))
532 bb
= gimple_bb (stmt
);
533 gsi
= gsi_for_stmt (stmt
);
548 /* Generate code for transformation 1 (with parent gimple assignment
549 STMT and probability of taking the optimal path PROB, which is
550 equivalent to COUNT/ALL within roundoff error). This generates the
551 result into a temp and returns the temp; it does not replace or
552 alter the original STMT. */
555 gimple_divmod_fixed_value (gimple stmt
, tree value
, int prob
, gcov_type count
,
558 gimple stmt1
, stmt2
, stmt3
;
559 tree tmp1
, tmp2
, tmpv
;
560 gimple bb1end
, bb2end
, bb3end
;
561 basic_block bb
, bb2
, bb3
, bb4
;
562 tree optype
, op1
, op2
;
563 edge e12
, e13
, e23
, e24
, e34
;
564 gimple_stmt_iterator gsi
;
566 gcc_assert (is_gimple_assign (stmt
)
567 && (gimple_assign_rhs_code (stmt
) == TRUNC_DIV_EXPR
568 || gimple_assign_rhs_code (stmt
) == TRUNC_MOD_EXPR
));
570 optype
= TREE_TYPE (gimple_assign_lhs (stmt
));
571 op1
= gimple_assign_rhs1 (stmt
);
572 op2
= gimple_assign_rhs2 (stmt
);
574 bb
= gimple_bb (stmt
);
575 gsi
= gsi_for_stmt (stmt
);
577 tmpv
= create_tmp_var (optype
, "PROF");
578 tmp1
= create_tmp_var (optype
, "PROF");
579 stmt1
= gimple_build_assign (tmpv
, fold_convert (optype
, value
));
580 stmt2
= gimple_build_assign (tmp1
, op2
);
581 stmt3
= gimple_build_cond (NE_EXPR
, tmp1
, tmpv
, NULL_TREE
, NULL_TREE
);
582 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
583 gsi_insert_before (&gsi
, stmt2
, GSI_SAME_STMT
);
584 gsi_insert_before (&gsi
, stmt3
, GSI_SAME_STMT
);
587 tmp2
= create_tmp_var (optype
, "PROF");
588 stmt1
= gimple_build_assign_with_ops (gimple_assign_rhs_code (stmt
), tmp2
,
590 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
593 stmt1
= gimple_build_assign_with_ops (gimple_assign_rhs_code (stmt
), tmp2
,
595 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
599 /* Edge e23 connects bb2 to bb3, etc. */
600 e12
= split_block (bb
, bb1end
);
603 e23
= split_block (bb2
, bb2end
);
605 bb3
->count
= all
- count
;
606 e34
= split_block (bb3
, bb3end
);
610 e12
->flags
&= ~EDGE_FALLTHRU
;
611 e12
->flags
|= EDGE_FALSE_VALUE
;
612 e12
->probability
= prob
;
615 e13
= make_edge (bb
, bb3
, EDGE_TRUE_VALUE
);
616 e13
->probability
= REG_BR_PROB_BASE
- prob
;
617 e13
->count
= all
- count
;
621 e24
= make_edge (bb2
, bb4
, EDGE_FALLTHRU
);
622 e24
->probability
= REG_BR_PROB_BASE
;
625 e34
->probability
= REG_BR_PROB_BASE
;
626 e34
->count
= all
- count
;
632 /* Do transform 1) on INSN if applicable. */
635 gimple_divmod_fixed_value_transform (gimple_stmt_iterator
*si
)
637 histogram_value histogram
;
639 gcov_type val
, count
, all
;
640 tree result
, value
, tree_val
;
644 stmt
= gsi_stmt (*si
);
645 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
648 if (!INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (stmt
))))
651 code
= gimple_assign_rhs_code (stmt
);
653 if (code
!= TRUNC_DIV_EXPR
&& code
!= TRUNC_MOD_EXPR
)
656 histogram
= gimple_histogram_value_of_type (cfun
, stmt
,
657 HIST_TYPE_SINGLE_VALUE
);
661 value
= histogram
->hvalue
.value
;
662 val
= histogram
->hvalue
.counters
[0];
663 count
= histogram
->hvalue
.counters
[1];
664 all
= histogram
->hvalue
.counters
[2];
665 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
667 /* We require that count is at least half of all; this means
668 that for the transformation to fire the value must be constant
669 at least 50% of time (and 75% gives the guarantee of usage). */
670 if (simple_cst_equal (gimple_assign_rhs2 (stmt
), value
) != 1
672 || optimize_bb_for_size_p (gimple_bb (stmt
)))
675 if (check_counter (stmt
, "value", &count
, &all
, gimple_bb (stmt
)->count
))
678 /* Compute probability of taking the optimal path. */
680 prob
= (count
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
684 tree_val
= build_int_cst_wide (get_gcov_type (),
685 (unsigned HOST_WIDE_INT
) val
,
686 val
>> (HOST_BITS_PER_WIDE_INT
- 1) >> 1);
687 result
= gimple_divmod_fixed_value (stmt
, tree_val
, prob
, count
, all
);
691 fprintf (dump_file
, "Div/mod by constant ");
692 print_generic_expr (dump_file
, value
, TDF_SLIM
);
693 fprintf (dump_file
, "=");
694 print_generic_expr (dump_file
, tree_val
, TDF_SLIM
);
695 fprintf (dump_file
, " transformation on insn ");
696 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
699 gimple_assign_set_rhs_from_tree (si
, result
);
704 /* Generate code for transformation 2 (with parent gimple assign STMT and
705 probability of taking the optimal path PROB, which is equivalent to COUNT/ALL
706 within roundoff error). This generates the result into a temp and returns
707 the temp; it does not replace or alter the original STMT. */
709 gimple_mod_pow2 (gimple stmt
, int prob
, gcov_type count
, gcov_type all
)
711 gimple stmt1
, stmt2
, stmt3
, stmt4
;
713 gimple bb1end
, bb2end
, bb3end
;
714 basic_block bb
, bb2
, bb3
, bb4
;
715 tree optype
, op1
, op2
;
716 edge e12
, e13
, e23
, e24
, e34
;
717 gimple_stmt_iterator gsi
;
720 gcc_assert (is_gimple_assign (stmt
)
721 && gimple_assign_rhs_code (stmt
) == TRUNC_MOD_EXPR
);
723 optype
= TREE_TYPE (gimple_assign_lhs (stmt
));
724 op1
= gimple_assign_rhs1 (stmt
);
725 op2
= gimple_assign_rhs2 (stmt
);
727 bb
= gimple_bb (stmt
);
728 gsi
= gsi_for_stmt (stmt
);
730 result
= create_tmp_var (optype
, "PROF");
731 tmp2
= create_tmp_var (optype
, "PROF");
732 tmp3
= create_tmp_var (optype
, "PROF");
733 stmt2
= gimple_build_assign_with_ops (PLUS_EXPR
, tmp2
, op2
,
734 build_int_cst (optype
, -1));
735 stmt3
= gimple_build_assign_with_ops (BIT_AND_EXPR
, tmp3
, tmp2
, op2
);
736 stmt4
= gimple_build_cond (NE_EXPR
, tmp3
, build_int_cst (optype
, 0),
737 NULL_TREE
, NULL_TREE
);
738 gsi_insert_before (&gsi
, stmt2
, GSI_SAME_STMT
);
739 gsi_insert_before (&gsi
, stmt3
, GSI_SAME_STMT
);
740 gsi_insert_before (&gsi
, stmt4
, GSI_SAME_STMT
);
743 /* tmp2 == op2-1 inherited from previous block. */
744 stmt1
= gimple_build_assign_with_ops (BIT_AND_EXPR
, result
, op1
, tmp2
);
745 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
748 stmt1
= gimple_build_assign_with_ops (gimple_assign_rhs_code (stmt
), result
,
750 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
754 /* Edge e23 connects bb2 to bb3, etc. */
755 e12
= split_block (bb
, bb1end
);
758 e23
= split_block (bb2
, bb2end
);
760 bb3
->count
= all
- count
;
761 e34
= split_block (bb3
, bb3end
);
765 e12
->flags
&= ~EDGE_FALLTHRU
;
766 e12
->flags
|= EDGE_FALSE_VALUE
;
767 e12
->probability
= prob
;
770 e13
= make_edge (bb
, bb3
, EDGE_TRUE_VALUE
);
771 e13
->probability
= REG_BR_PROB_BASE
- prob
;
772 e13
->count
= all
- count
;
776 e24
= make_edge (bb2
, bb4
, EDGE_FALLTHRU
);
777 e24
->probability
= REG_BR_PROB_BASE
;
780 e34
->probability
= REG_BR_PROB_BASE
;
781 e34
->count
= all
- count
;
786 /* Do transform 2) on INSN if applicable. */
788 gimple_mod_pow2_value_transform (gimple_stmt_iterator
*si
)
790 histogram_value histogram
;
792 gcov_type count
, wrong_values
, all
;
793 tree lhs_type
, result
, value
;
797 stmt
= gsi_stmt (*si
);
798 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
801 lhs_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
802 if (!INTEGRAL_TYPE_P (lhs_type
))
805 code
= gimple_assign_rhs_code (stmt
);
807 if (code
!= TRUNC_MOD_EXPR
|| !TYPE_UNSIGNED (lhs_type
))
810 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_POW2
);
814 value
= histogram
->hvalue
.value
;
815 wrong_values
= histogram
->hvalue
.counters
[0];
816 count
= histogram
->hvalue
.counters
[1];
818 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
820 /* We require that we hit a power of 2 at least half of all evaluations. */
821 if (simple_cst_equal (gimple_assign_rhs2 (stmt
), value
) != 1
822 || count
< wrong_values
823 || optimize_bb_for_size_p (gimple_bb (stmt
)))
828 fprintf (dump_file
, "Mod power of 2 transformation on insn ");
829 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
832 /* Compute probability of taking the optimal path. */
833 all
= count
+ wrong_values
;
835 if (check_counter (stmt
, "pow2", &count
, &all
, gimple_bb (stmt
)->count
))
839 prob
= (count
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
843 result
= gimple_mod_pow2 (stmt
, prob
, count
, all
);
845 gimple_assign_set_rhs_from_tree (si
, result
);
850 /* Generate code for transformations 3 and 4 (with parent gimple assign STMT, and
851 NCOUNTS the number of cases to support. Currently only NCOUNTS==0 or 1 is
852 supported and this is built into this interface. The probabilities of taking
853 the optimal paths are PROB1 and PROB2, which are equivalent to COUNT1/ALL and
854 COUNT2/ALL respectively within roundoff error). This generates the
855 result into a temp and returns the temp; it does not replace or alter
856 the original STMT. */
857 /* FIXME: Generalize the interface to handle NCOUNTS > 1. */
860 gimple_mod_subtract (gimple stmt
, int prob1
, int prob2
, int ncounts
,
861 gcov_type count1
, gcov_type count2
, gcov_type all
)
863 gimple stmt1
, stmt2
, stmt3
;
865 gimple bb1end
, bb2end
= NULL
, bb3end
;
866 basic_block bb
, bb2
, bb3
, bb4
;
867 tree optype
, op1
, op2
;
868 edge e12
, e23
= 0, e24
, e34
, e14
;
869 gimple_stmt_iterator gsi
;
872 gcc_assert (is_gimple_assign (stmt
)
873 && gimple_assign_rhs_code (stmt
) == TRUNC_MOD_EXPR
);
875 optype
= TREE_TYPE (gimple_assign_lhs (stmt
));
876 op1
= gimple_assign_rhs1 (stmt
);
877 op2
= gimple_assign_rhs2 (stmt
);
879 bb
= gimple_bb (stmt
);
880 gsi
= gsi_for_stmt (stmt
);
882 result
= create_tmp_var (optype
, "PROF");
883 tmp1
= create_tmp_var (optype
, "PROF");
884 stmt1
= gimple_build_assign (result
, op1
);
885 stmt2
= gimple_build_assign (tmp1
, op2
);
886 stmt3
= gimple_build_cond (LT_EXPR
, result
, tmp1
, NULL_TREE
, NULL_TREE
);
887 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
888 gsi_insert_before (&gsi
, stmt2
, GSI_SAME_STMT
);
889 gsi_insert_before (&gsi
, stmt3
, GSI_SAME_STMT
);
892 if (ncounts
) /* Assumed to be 0 or 1 */
894 stmt1
= gimple_build_assign_with_ops (MINUS_EXPR
, result
, result
, tmp1
);
895 stmt2
= gimple_build_cond (LT_EXPR
, result
, tmp1
, NULL_TREE
, NULL_TREE
);
896 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
897 gsi_insert_before (&gsi
, stmt2
, GSI_SAME_STMT
);
902 stmt1
= gimple_build_assign_with_ops (gimple_assign_rhs_code (stmt
), result
,
904 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
908 /* Edge e23 connects bb2 to bb3, etc. */
909 /* However block 3 is optional; if it is not there, references
910 to 3 really refer to block 2. */
911 e12
= split_block (bb
, bb1end
);
913 bb2
->count
= all
- count1
;
915 if (ncounts
) /* Assumed to be 0 or 1. */
917 e23
= split_block (bb2
, bb2end
);
919 bb3
->count
= all
- count1
- count2
;
922 e34
= split_block (ncounts
? bb3
: bb2
, bb3end
);
926 e12
->flags
&= ~EDGE_FALLTHRU
;
927 e12
->flags
|= EDGE_FALSE_VALUE
;
928 e12
->probability
= REG_BR_PROB_BASE
- prob1
;
929 e12
->count
= all
- count1
;
931 e14
= make_edge (bb
, bb4
, EDGE_TRUE_VALUE
);
932 e14
->probability
= prob1
;
935 if (ncounts
) /* Assumed to be 0 or 1. */
937 e23
->flags
&= ~EDGE_FALLTHRU
;
938 e23
->flags
|= EDGE_FALSE_VALUE
;
939 e23
->count
= all
- count1
- count2
;
940 e23
->probability
= REG_BR_PROB_BASE
- prob2
;
942 e24
= make_edge (bb2
, bb4
, EDGE_TRUE_VALUE
);
943 e24
->probability
= prob2
;
947 e34
->probability
= REG_BR_PROB_BASE
;
948 e34
->count
= all
- count1
- count2
;
954 /* Do transforms 3) and 4) on the statement pointed-to by SI if applicable. */
957 gimple_mod_subtract_transform (gimple_stmt_iterator
*si
)
959 histogram_value histogram
;
961 gcov_type count
, wrong_values
, all
;
962 tree lhs_type
, result
, value
;
963 gcov_type prob1
, prob2
;
964 unsigned int i
, steps
;
965 gcov_type count1
, count2
;
968 stmt
= gsi_stmt (*si
);
969 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
972 lhs_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
973 if (!INTEGRAL_TYPE_P (lhs_type
))
976 code
= gimple_assign_rhs_code (stmt
);
978 if (code
!= TRUNC_MOD_EXPR
|| !TYPE_UNSIGNED (lhs_type
))
981 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_INTERVAL
);
985 value
= histogram
->hvalue
.value
;
988 for (i
= 0; i
< histogram
->hdata
.intvl
.steps
; i
++)
989 all
+= histogram
->hvalue
.counters
[i
];
991 wrong_values
+= histogram
->hvalue
.counters
[i
];
992 wrong_values
+= histogram
->hvalue
.counters
[i
+1];
993 steps
= histogram
->hdata
.intvl
.steps
;
995 count1
= histogram
->hvalue
.counters
[0];
996 count2
= histogram
->hvalue
.counters
[1];
998 /* Compute probability of taking the optimal path. */
999 if (check_counter (stmt
, "interval", &count1
, &all
, gimple_bb (stmt
)->count
))
1001 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1005 if (flag_profile_correction
&& count1
+ count2
> all
)
1006 all
= count1
+ count2
;
1008 gcc_assert (count1
+ count2
<= all
);
1010 /* We require that we use just subtractions in at least 50% of all
1013 for (i
= 0; i
< histogram
->hdata
.intvl
.steps
; i
++)
1015 count
+= histogram
->hvalue
.counters
[i
];
1016 if (count
* 2 >= all
)
1020 || optimize_bb_for_size_p (gimple_bb (stmt
)))
1023 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1026 fprintf (dump_file
, "Mod subtract transformation on insn ");
1027 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1030 /* Compute probability of taking the optimal path(s). */
1033 prob1
= (count1
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
1034 prob2
= (count2
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
1041 /* In practice, "steps" is always 2. This interface reflects this,
1042 and will need to be changed if "steps" can change. */
1043 result
= gimple_mod_subtract (stmt
, prob1
, prob2
, i
, count1
, count2
, all
);
1045 gimple_assign_set_rhs_from_tree (si
, result
);
1050 static struct cgraph_node
** pid_map
= NULL
;
1052 /* Initialize map of pids (pid -> cgraph node) */
1057 struct cgraph_node
*n
;
1059 if (pid_map
!= NULL
)
1063 = (struct cgraph_node
**) xmalloc (sizeof (struct cgraph_node
*) * cgraph_max_pid
);
1065 for (n
= cgraph_nodes
; n
; n
= n
->next
)
1068 pid_map
[n
->pid
] = n
;
1072 /* Return cgraph node for function with pid */
1074 static inline struct cgraph_node
*
1075 find_func_by_pid (int pid
)
1079 return pid_map
[pid
];
1082 /* Do transformation
1084 if (actual_callee_address == address_of_most_common_function/method)
1091 gimple_ic (gimple stmt
, gimple call
, struct cgraph_node
*direct_call
,
1092 int prob
, gcov_type count
, gcov_type all
)
1094 gimple stmt1
, stmt2
, stmt3
;
1095 tree tmp1
, tmpv
, tmp
;
1096 gimple bb1end
, bb2end
, bb3end
;
1097 basic_block bb
, bb2
, bb3
, bb4
;
1098 tree optype
= build_pointer_type (void_type_node
);
1099 edge e12
, e13
, e23
, e24
, e34
;
1100 gimple_stmt_iterator gsi
;
1103 bb
= gimple_bb (stmt
);
1104 gsi
= gsi_for_stmt (stmt
);
1106 tmpv
= create_tmp_var (optype
, "PROF");
1107 tmp1
= create_tmp_var (optype
, "PROF");
1108 stmt1
= gimple_build_assign (tmpv
, unshare_expr (gimple_call_fn (call
)));
1110 tmp
= fold_convert (optype
, build_addr (direct_call
->decl
,
1111 current_function_decl
));
1112 stmt2
= gimple_build_assign (tmp1
, tmp
);
1113 stmt3
= gimple_build_cond (NE_EXPR
, tmp1
, tmpv
, NULL_TREE
, NULL_TREE
);
1114 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
1115 gsi_insert_before (&gsi
, stmt2
, GSI_SAME_STMT
);
1116 gsi_insert_before (&gsi
, stmt3
, GSI_SAME_STMT
);
1119 stmt1
= gimple_copy (stmt
);
1120 gimple_call_set_fndecl (stmt1
, direct_call
->decl
);
1121 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
1126 /* Edge e23 connects bb2 to bb3, etc. */
1127 e12
= split_block (bb
, bb1end
);
1130 e23
= split_block (bb2
, bb2end
);
1132 bb3
->count
= all
- count
;
1133 e34
= split_block (bb3
, bb3end
);
1137 e12
->flags
&= ~EDGE_FALLTHRU
;
1138 e12
->flags
|= EDGE_FALSE_VALUE
;
1139 e12
->probability
= prob
;
1142 e13
= make_edge (bb
, bb3
, EDGE_TRUE_VALUE
);
1143 e13
->probability
= REG_BR_PROB_BASE
- prob
;
1144 e13
->count
= all
- count
;
1148 e24
= make_edge (bb2
, bb4
, EDGE_FALLTHRU
);
1149 e24
->probability
= REG_BR_PROB_BASE
;
1151 e34
->probability
= REG_BR_PROB_BASE
;
1152 e34
->count
= all
- count
;
1155 region
= lookup_stmt_eh_region (stmt
);
1156 if (region
>= 0 && stmt_could_throw_p (stmt1
))
1158 add_stmt_to_eh_region (stmt1
, region
);
1159 make_eh_edges (stmt1
);
1162 if (region
>= 0 && stmt_could_throw_p (stmt
))
1164 gimple_purge_dead_eh_edges (bb4
);
1165 make_eh_edges (stmt
);
1172 For every checked indirect/virtual call determine if most common pid of
1173 function/class method has probability more than 50%. If yes modify code of
1178 gimple_ic_transform (gimple stmt
)
1180 histogram_value histogram
;
1181 gcov_type val
, count
, all
, bb_all
;
1185 struct cgraph_node
*direct_call
;
1187 if (gimple_code (stmt
) != GIMPLE_CALL
)
1190 callee
= gimple_call_fn (stmt
);
1192 if (TREE_CODE (callee
) == FUNCTION_DECL
)
1195 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_INDIR_CALL
);
1199 val
= histogram
->hvalue
.counters
[0];
1200 count
= histogram
->hvalue
.counters
[1];
1201 all
= histogram
->hvalue
.counters
[2];
1202 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1204 if (4 * count
<= 3 * all
)
1207 bb_all
= gimple_bb (stmt
)->count
;
1208 /* The order of CHECK_COUNTER calls is important -
1209 since check_counter can correct the third parameter
1210 and we want to make count <= all <= bb_all. */
1211 if ( check_counter (stmt
, "ic", &all
, &bb_all
, bb_all
)
1212 || check_counter (stmt
, "ic", &count
, &all
, all
))
1216 prob
= (count
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
1219 direct_call
= find_func_by_pid ((int)val
);
1221 if (direct_call
== NULL
)
1224 modify
= gimple_ic (stmt
, stmt
, direct_call
, prob
, count
, all
);
1228 fprintf (dump_file
, "Indirect call -> direct call ");
1229 print_generic_expr (dump_file
, gimple_call_fn (stmt
), TDF_SLIM
);
1230 fprintf (dump_file
, "=> ");
1231 print_generic_expr (dump_file
, direct_call
->decl
, TDF_SLIM
);
1232 fprintf (dump_file
, " transformation on insn ");
1233 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1234 fprintf (dump_file
, " to ");
1235 print_gimple_stmt (dump_file
, modify
, 0, TDF_SLIM
);
1236 fprintf (dump_file
, "hist->count "HOST_WIDEST_INT_PRINT_DEC
1237 " hist->all "HOST_WIDEST_INT_PRINT_DEC
"\n", count
, all
);
1243 /* Return true if the stringop CALL with FNDECL shall be profiled. */
1245 interesting_stringop_to_profile_p (tree fndecl
, gimple call
)
1247 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
1249 if (fcode
!= BUILT_IN_MEMCPY
&& fcode
!= BUILT_IN_MEMPCPY
1250 && fcode
!= BUILT_IN_MEMSET
&& fcode
!= BUILT_IN_BZERO
)
1255 case BUILT_IN_MEMCPY
:
1256 case BUILT_IN_MEMPCPY
:
1257 return validate_gimple_arglist (call
, POINTER_TYPE
, POINTER_TYPE
,
1258 INTEGER_TYPE
, VOID_TYPE
);
1259 case BUILT_IN_MEMSET
:
1260 return validate_gimple_arglist (call
, POINTER_TYPE
, INTEGER_TYPE
,
1261 INTEGER_TYPE
, VOID_TYPE
);
1262 case BUILT_IN_BZERO
:
1263 return validate_gimple_arglist (call
, POINTER_TYPE
, INTEGER_TYPE
,
1270 /* Convert stringop (..., size)
1273 stringop (...., VALUE);
1275 stringop (...., size);
1276 assuming constant propagation of VALUE will happen later.
1279 gimple_stringop_fixed_value (gimple stmt
, tree value
, int prob
, gcov_type count
,
1282 gimple stmt1
, stmt2
, stmt3
;
1284 gimple bb1end
, bb2end
;
1285 basic_block bb
, bb2
, bb3
, bb4
;
1286 edge e12
, e13
, e23
, e24
, e34
;
1287 gimple_stmt_iterator gsi
;
1288 tree blck_size
= gimple_call_arg (stmt
, 2);
1289 tree optype
= TREE_TYPE (blck_size
);
1292 bb
= gimple_bb (stmt
);
1293 gsi
= gsi_for_stmt (stmt
);
1295 if (gsi_end_p (gsi
))
1298 for (ei
= ei_start (bb
->succs
); (e34
= ei_safe_edge (ei
)); )
1299 if (!(e34
->flags
& EDGE_ABNORMAL
))
1304 e34
= split_block (bb
, stmt
);
1305 gsi
= gsi_for_stmt (stmt
);
1309 tmpv
= create_tmp_var (optype
, "PROF");
1310 tmp1
= create_tmp_var (optype
, "PROF");
1311 stmt1
= gimple_build_assign (tmpv
, fold_convert (optype
, value
));
1312 stmt2
= gimple_build_assign (tmp1
, blck_size
);
1313 stmt3
= gimple_build_cond (NE_EXPR
, tmp1
, tmpv
, NULL_TREE
, NULL_TREE
);
1314 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
1315 gsi_insert_before (&gsi
, stmt2
, GSI_SAME_STMT
);
1316 gsi_insert_before (&gsi
, stmt3
, GSI_SAME_STMT
);
1319 stmt1
= gimple_copy (stmt
);
1320 gimple_call_set_arg (stmt1
, 2, value
);
1321 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
1322 region
= lookup_stmt_eh_region (stmt
);
1324 add_stmt_to_eh_region (stmt1
, region
);
1328 /* Edge e23 connects bb2 to bb3, etc. */
1329 e12
= split_block (bb
, bb1end
);
1332 e23
= split_block (bb2
, bb2end
);
1334 bb3
->count
= all
- count
;
1336 e12
->flags
&= ~EDGE_FALLTHRU
;
1337 e12
->flags
|= EDGE_FALSE_VALUE
;
1338 e12
->probability
= prob
;
1341 e13
= make_edge (bb
, bb3
, EDGE_TRUE_VALUE
);
1342 e13
->probability
= REG_BR_PROB_BASE
- prob
;
1343 e13
->count
= all
- count
;
1347 e24
= make_edge (bb2
, bb4
, EDGE_FALLTHRU
);
1348 e24
->probability
= REG_BR_PROB_BASE
;
1351 e34
->probability
= REG_BR_PROB_BASE
;
1352 e34
->count
= all
- count
;
1355 /* Find values inside STMT for that we want to measure histograms for
1356 division/modulo optimization. */
1358 gimple_stringops_transform (gimple_stmt_iterator
*gsi
)
1360 gimple stmt
= gsi_stmt (*gsi
);
1363 enum built_in_function fcode
;
1364 histogram_value histogram
;
1365 gcov_type count
, all
, val
;
1368 unsigned int dest_align
, src_align
;
1372 if (gimple_code (stmt
) != GIMPLE_CALL
)
1374 fndecl
= gimple_call_fndecl (stmt
);
1377 fcode
= DECL_FUNCTION_CODE (fndecl
);
1378 if (!interesting_stringop_to_profile_p (fndecl
, stmt
))
1381 if (fcode
== BUILT_IN_BZERO
)
1382 blck_size
= gimple_call_arg (stmt
, 1);
1384 blck_size
= gimple_call_arg (stmt
, 2);
1385 if (TREE_CODE (blck_size
) == INTEGER_CST
)
1388 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_SINGLE_VALUE
);
1391 value
= histogram
->hvalue
.value
;
1392 val
= histogram
->hvalue
.counters
[0];
1393 count
= histogram
->hvalue
.counters
[1];
1394 all
= histogram
->hvalue
.counters
[2];
1395 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1396 /* We require that count is at least half of all; this means
1397 that for the transformation to fire the value must be constant
1398 at least 80% of time. */
1399 if ((6 * count
/ 5) < all
|| optimize_bb_for_size_p (gimple_bb (stmt
)))
1401 if (check_counter (stmt
, "value", &count
, &all
, gimple_bb (stmt
)->count
))
1404 prob
= (count
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
1407 dest
= gimple_call_arg (stmt
, 0);
1408 dest_align
= get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
1411 case BUILT_IN_MEMCPY
:
1412 case BUILT_IN_MEMPCPY
:
1413 src
= gimple_call_arg (stmt
, 1);
1414 src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
1415 if (!can_move_by_pieces (val
, MIN (dest_align
, src_align
)))
1418 case BUILT_IN_MEMSET
:
1419 if (!can_store_by_pieces (val
, builtin_memset_read_str
,
1420 gimple_call_arg (stmt
, 1),
1424 case BUILT_IN_BZERO
:
1425 if (!can_store_by_pieces (val
, builtin_memset_read_str
,
1433 tree_val
= build_int_cst_wide (get_gcov_type (),
1434 (unsigned HOST_WIDE_INT
) val
,
1435 val
>> (HOST_BITS_PER_WIDE_INT
- 1) >> 1);
1438 fprintf (dump_file
, "Single value %i stringop transformation on ",
1440 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1442 gimple_stringop_fixed_value (stmt
, tree_val
, prob
, count
, all
);
1448 stringop_block_profile (gimple stmt
, unsigned int *expected_align
,
1449 HOST_WIDE_INT
*expected_size
)
1451 histogram_value histogram
;
1452 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_AVERAGE
);
1454 *expected_size
= -1;
1455 else if (!histogram
->hvalue
.counters
[1])
1457 *expected_size
= -1;
1458 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1463 size
= ((histogram
->hvalue
.counters
[0]
1464 + histogram
->hvalue
.counters
[1] / 2)
1465 / histogram
->hvalue
.counters
[1]);
1466 /* Even if we can hold bigger value in SIZE, INT_MAX
1467 is safe "infinity" for code generation strategies. */
1470 *expected_size
= size
;
1471 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1473 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_IOR
);
1475 *expected_align
= 0;
1476 else if (!histogram
->hvalue
.counters
[0])
1478 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1479 *expected_align
= 0;
1486 count
= histogram
->hvalue
.counters
[0];
1488 while (!(count
& alignment
)
1489 && (alignment
* 2 * BITS_PER_UNIT
))
1491 *expected_align
= alignment
* BITS_PER_UNIT
;
1492 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1496 struct value_prof_hooks
{
1497 /* Find list of values for which we want to measure histograms. */
1498 void (*find_values_to_profile
) (histogram_values
*);
1500 /* Identify and exploit properties of values that are hard to analyze
1501 statically. See value-prof.c for more detail. */
1502 bool (*value_profile_transformations
) (void);
1505 /* Find values inside STMT for that we want to measure histograms for
1506 division/modulo optimization. */
1508 gimple_divmod_values_to_profile (gimple stmt
, histogram_values
*values
)
1510 tree lhs
, divisor
, op0
, type
;
1511 histogram_value hist
;
1513 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
1516 lhs
= gimple_assign_lhs (stmt
);
1517 type
= TREE_TYPE (lhs
);
1518 if (!INTEGRAL_TYPE_P (type
))
1521 switch (gimple_assign_rhs_code (stmt
))
1523 case TRUNC_DIV_EXPR
:
1524 case TRUNC_MOD_EXPR
:
1525 divisor
= gimple_assign_rhs2 (stmt
);
1526 op0
= gimple_assign_rhs1 (stmt
);
1528 VEC_reserve (histogram_value
, heap
, *values
, 3);
1530 if (is_gimple_reg (divisor
))
1531 /* Check for the case where the divisor is the same value most
1533 VEC_quick_push (histogram_value
, *values
,
1534 gimple_alloc_histogram_value (cfun
,
1535 HIST_TYPE_SINGLE_VALUE
,
1538 /* For mod, check whether it is not often a noop (or replaceable by
1539 a few subtractions). */
1540 if (gimple_assign_rhs_code (stmt
) == TRUNC_MOD_EXPR
1541 && TYPE_UNSIGNED (type
))
1544 /* Check for a special case where the divisor is power of 2. */
1545 VEC_quick_push (histogram_value
, *values
,
1546 gimple_alloc_histogram_value (cfun
, HIST_TYPE_POW2
,
1549 val
= build2 (TRUNC_DIV_EXPR
, type
, op0
, divisor
);
1550 hist
= gimple_alloc_histogram_value (cfun
, HIST_TYPE_INTERVAL
,
1552 hist
->hdata
.intvl
.int_start
= 0;
1553 hist
->hdata
.intvl
.steps
= 2;
1554 VEC_quick_push (histogram_value
, *values
, hist
);
1563 /* Find calls inside STMT for that we want to measure histograms for
1564 indirect/virtual call optimization. */
1567 gimple_indirect_call_to_profile (gimple stmt
, histogram_values
*values
)
1571 if (gimple_code (stmt
) != GIMPLE_CALL
1572 || gimple_call_fndecl (stmt
) != NULL_TREE
)
1575 callee
= gimple_call_fn (stmt
);
1577 VEC_reserve (histogram_value
, heap
, *values
, 3);
1579 VEC_quick_push (histogram_value
, *values
,
1580 gimple_alloc_histogram_value (cfun
, HIST_TYPE_INDIR_CALL
,
1586 /* Find values inside STMT for that we want to measure histograms for
1587 string operations. */
1589 gimple_stringops_values_to_profile (gimple stmt
, histogram_values
*values
)
1594 enum built_in_function fcode
;
1596 if (gimple_code (stmt
) != GIMPLE_CALL
)
1598 fndecl
= gimple_call_fndecl (stmt
);
1601 fcode
= DECL_FUNCTION_CODE (fndecl
);
1603 if (!interesting_stringop_to_profile_p (fndecl
, stmt
))
1606 dest
= gimple_call_arg (stmt
, 0);
1607 if (fcode
== BUILT_IN_BZERO
)
1608 blck_size
= gimple_call_arg (stmt
, 1);
1610 blck_size
= gimple_call_arg (stmt
, 2);
1612 if (TREE_CODE (blck_size
) != INTEGER_CST
)
1614 VEC_safe_push (histogram_value
, heap
, *values
,
1615 gimple_alloc_histogram_value (cfun
, HIST_TYPE_SINGLE_VALUE
,
1617 VEC_safe_push (histogram_value
, heap
, *values
,
1618 gimple_alloc_histogram_value (cfun
, HIST_TYPE_AVERAGE
,
1621 if (TREE_CODE (blck_size
) != INTEGER_CST
)
1622 VEC_safe_push (histogram_value
, heap
, *values
,
1623 gimple_alloc_histogram_value (cfun
, HIST_TYPE_IOR
,
1627 /* Find values inside STMT for that we want to measure histograms and adds
1628 them to list VALUES. */
1631 gimple_values_to_profile (gimple stmt
, histogram_values
*values
)
1633 if (flag_value_profile_transformations
)
1635 gimple_divmod_values_to_profile (stmt
, values
);
1636 gimple_stringops_values_to_profile (stmt
, values
);
1637 gimple_indirect_call_to_profile (stmt
, values
);
1642 gimple_find_values_to_profile (histogram_values
*values
)
1645 gimple_stmt_iterator gsi
;
1647 histogram_value hist
= NULL
;
1651 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1652 gimple_values_to_profile (gsi_stmt (gsi
), values
);
1654 for (i
= 0; VEC_iterate (histogram_value
, *values
, i
, hist
); i
++)
1658 case HIST_TYPE_INTERVAL
:
1659 hist
->n_counters
= hist
->hdata
.intvl
.steps
+ 2;
1662 case HIST_TYPE_POW2
:
1663 hist
->n_counters
= 2;
1666 case HIST_TYPE_SINGLE_VALUE
:
1667 hist
->n_counters
= 3;
1670 case HIST_TYPE_CONST_DELTA
:
1671 hist
->n_counters
= 4;
1674 case HIST_TYPE_INDIR_CALL
:
1675 hist
->n_counters
= 3;
1678 case HIST_TYPE_AVERAGE
:
1679 hist
->n_counters
= 2;
1683 hist
->n_counters
= 1;
1691 fprintf (dump_file
, "Stmt ");
1692 print_gimple_stmt (dump_file
, hist
->hvalue
.stmt
, 0, TDF_SLIM
);
1693 dump_histogram_value (dump_file
, hist
);
1698 static struct value_prof_hooks gimple_value_prof_hooks
= {
1699 gimple_find_values_to_profile
,
1700 gimple_value_profile_transformations
1704 gimple_register_value_prof_hooks (void)
1706 gcc_assert (current_ir_type () == IR_GIMPLE
);
1707 value_prof_hooks
= &gimple_value_prof_hooks
;
1710 /* IR-independent entry points. */
1712 find_values_to_profile (histogram_values
*values
)
1714 (value_prof_hooks
->find_values_to_profile
) (values
);
1718 value_profile_transformations (void)
1720 return (value_prof_hooks
->value_profile_transformations
) ();