1 /* Transformations based on profile information for values.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "value-prof.h"
31 #include "insn-config.h"
36 #include "tree-flow.h"
37 #include "tree-flow-inline.h"
38 #include "diagnostic.h"
39 #include "gimple-pretty-print.h"
46 #include "pointer-set.h"
49 /* In this file value profile based optimizations are placed. Currently the
50 following optimizations are implemented (for more detailed descriptions
51 see comments at value_profile_transformations):
53 1) Division/modulo specialization. Provided that we can determine that the
54 operands of the division have some special properties, we may use it to
55 produce more effective code.
56 2) Speculative prefetching. If we are able to determine that the difference
57 between addresses accessed by a memory reference is usually constant, we
58 may add the prefetch instructions.
59 FIXME: This transformation was removed together with RTL based value
62 3) Indirect/virtual call specialization. If we can determine most
63 common function callee in indirect/virtual call. We can use this
64 information to improve code effectiveness (especially info for
67 Every such optimization should add its requirements for profiled values to
68 insn_values_to_profile function. This function is called from branch_prob
69 in profile.c and the requested values are instrumented by it in the first
70 compilation with -fprofile-arcs. The optimization may then read the
71 gathered data in the second compilation with -fbranch-probabilities.
73 The measured data is pointed to from the histograms
74 field of the statement annotation of the instrumented insns. It is
75 kept as a linked list of struct histogram_value_t's, which contain the
76 same information as above. */
79 static tree
gimple_divmod_fixed_value (gimple
, tree
, int, gcov_type
, gcov_type
);
80 static tree
gimple_mod_pow2 (gimple
, int, gcov_type
, gcov_type
);
81 static tree
gimple_mod_subtract (gimple
, int, int, int, gcov_type
, gcov_type
,
83 static bool gimple_divmod_fixed_value_transform (gimple_stmt_iterator
*);
84 static bool gimple_mod_pow2_value_transform (gimple_stmt_iterator
*);
85 static bool gimple_mod_subtract_transform (gimple_stmt_iterator
*);
86 static bool gimple_stringops_transform (gimple_stmt_iterator
*);
87 static bool gimple_ic_transform (gimple
);
89 /* Allocate histogram value. */
91 static histogram_value
92 gimple_alloc_histogram_value (struct function
*fun ATTRIBUTE_UNUSED
,
93 enum hist_type type
, gimple stmt
, tree value
)
95 histogram_value hist
= (histogram_value
) xcalloc (1, sizeof (*hist
));
96 hist
->hvalue
.value
= value
;
97 hist
->hvalue
.stmt
= stmt
;
102 /* Hash value for histogram. */
105 histogram_hash (const void *x
)
107 return htab_hash_pointer (((const_histogram_value
)x
)->hvalue
.stmt
);
110 /* Return nonzero if statement for histogram_value X is Y. */
113 histogram_eq (const void *x
, const void *y
)
115 return ((const_histogram_value
) x
)->hvalue
.stmt
== (const_gimple
) y
;
118 /* Set histogram for STMT. */
121 set_histogram_value (struct function
*fun
, gimple stmt
, histogram_value hist
)
124 if (!hist
&& !VALUE_HISTOGRAMS (fun
))
126 if (!VALUE_HISTOGRAMS (fun
))
127 VALUE_HISTOGRAMS (fun
) = htab_create (1, histogram_hash
,
129 loc
= htab_find_slot_with_hash (VALUE_HISTOGRAMS (fun
), stmt
,
130 htab_hash_pointer (stmt
),
131 hist
? INSERT
: NO_INSERT
);
135 htab_clear_slot (VALUE_HISTOGRAMS (fun
), loc
);
141 /* Get histogram list for STMT. */
144 gimple_histogram_value (struct function
*fun
, gimple stmt
)
146 if (!VALUE_HISTOGRAMS (fun
))
148 return (histogram_value
) htab_find_with_hash (VALUE_HISTOGRAMS (fun
), stmt
,
149 htab_hash_pointer (stmt
));
152 /* Add histogram for STMT. */
155 gimple_add_histogram_value (struct function
*fun
, gimple stmt
,
156 histogram_value hist
)
158 hist
->hvalue
.next
= gimple_histogram_value (fun
, stmt
);
159 set_histogram_value (fun
, stmt
, hist
);
163 /* Remove histogram HIST from STMT's histogram list. */
166 gimple_remove_histogram_value (struct function
*fun
, gimple stmt
,
167 histogram_value hist
)
169 histogram_value hist2
= gimple_histogram_value (fun
, stmt
);
172 set_histogram_value (fun
, stmt
, hist
->hvalue
.next
);
176 while (hist2
->hvalue
.next
!= hist
)
177 hist2
= hist2
->hvalue
.next
;
178 hist2
->hvalue
.next
= hist
->hvalue
.next
;
180 free (hist
->hvalue
.counters
);
181 #ifdef ENABLE_CHECKING
182 memset (hist
, 0xab, sizeof (*hist
));
188 /* Lookup histogram of type TYPE in the STMT. */
191 gimple_histogram_value_of_type (struct function
*fun
, gimple stmt
,
194 histogram_value hist
;
195 for (hist
= gimple_histogram_value (fun
, stmt
); hist
;
196 hist
= hist
->hvalue
.next
)
197 if (hist
->type
== type
)
202 /* Dump information about HIST to DUMP_FILE. */
205 dump_histogram_value (FILE *dump_file
, histogram_value hist
)
209 case HIST_TYPE_INTERVAL
:
210 fprintf (dump_file
, "Interval counter range %d -- %d",
211 hist
->hdata
.intvl
.int_start
,
212 (hist
->hdata
.intvl
.int_start
213 + hist
->hdata
.intvl
.steps
- 1));
214 if (hist
->hvalue
.counters
)
217 fprintf(dump_file
, " [");
218 for (i
= 0; i
< hist
->hdata
.intvl
.steps
; i
++)
219 fprintf (dump_file
, " %d:"HOST_WIDEST_INT_PRINT_DEC
,
220 hist
->hdata
.intvl
.int_start
+ i
,
221 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[i
]);
222 fprintf (dump_file
, " ] outside range:"HOST_WIDEST_INT_PRINT_DEC
,
223 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[i
]);
225 fprintf (dump_file
, ".\n");
229 fprintf (dump_file
, "Pow2 counter ");
230 if (hist
->hvalue
.counters
)
232 fprintf (dump_file
, "pow2:"HOST_WIDEST_INT_PRINT_DEC
233 " nonpow2:"HOST_WIDEST_INT_PRINT_DEC
,
234 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0],
235 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[1]);
237 fprintf (dump_file
, ".\n");
240 case HIST_TYPE_SINGLE_VALUE
:
241 fprintf (dump_file
, "Single value ");
242 if (hist
->hvalue
.counters
)
244 fprintf (dump_file
, "value:"HOST_WIDEST_INT_PRINT_DEC
245 " match:"HOST_WIDEST_INT_PRINT_DEC
246 " wrong:"HOST_WIDEST_INT_PRINT_DEC
,
247 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0],
248 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[1],
249 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[2]);
251 fprintf (dump_file
, ".\n");
254 case HIST_TYPE_AVERAGE
:
255 fprintf (dump_file
, "Average value ");
256 if (hist
->hvalue
.counters
)
258 fprintf (dump_file
, "sum:"HOST_WIDEST_INT_PRINT_DEC
259 " times:"HOST_WIDEST_INT_PRINT_DEC
,
260 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0],
261 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[1]);
263 fprintf (dump_file
, ".\n");
267 fprintf (dump_file
, "IOR value ");
268 if (hist
->hvalue
.counters
)
270 fprintf (dump_file
, "ior:"HOST_WIDEST_INT_PRINT_DEC
,
271 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0]);
273 fprintf (dump_file
, ".\n");
276 case HIST_TYPE_CONST_DELTA
:
277 fprintf (dump_file
, "Constant delta ");
278 if (hist
->hvalue
.counters
)
280 fprintf (dump_file
, "value:"HOST_WIDEST_INT_PRINT_DEC
281 " match:"HOST_WIDEST_INT_PRINT_DEC
282 " wrong:"HOST_WIDEST_INT_PRINT_DEC
,
283 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0],
284 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[1],
285 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[2]);
287 fprintf (dump_file
, ".\n");
289 case HIST_TYPE_INDIR_CALL
:
290 fprintf (dump_file
, "Indirect call ");
291 if (hist
->hvalue
.counters
)
293 fprintf (dump_file
, "value:"HOST_WIDEST_INT_PRINT_DEC
294 " match:"HOST_WIDEST_INT_PRINT_DEC
295 " all:"HOST_WIDEST_INT_PRINT_DEC
,
296 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0],
297 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[1],
298 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[2]);
300 fprintf (dump_file
, ".\n");
305 /* Dump all histograms attached to STMT to DUMP_FILE. */
308 dump_histograms_for_stmt (struct function
*fun
, FILE *dump_file
, gimple stmt
)
310 histogram_value hist
;
311 for (hist
= gimple_histogram_value (fun
, stmt
); hist
; hist
= hist
->hvalue
.next
)
312 dump_histogram_value (dump_file
, hist
);
315 /* Remove all histograms associated with STMT. */
318 gimple_remove_stmt_histograms (struct function
*fun
, gimple stmt
)
321 while ((val
= gimple_histogram_value (fun
, stmt
)) != NULL
)
322 gimple_remove_histogram_value (fun
, stmt
, val
);
325 /* Duplicate all histograms associates with OSTMT to STMT. */
328 gimple_duplicate_stmt_histograms (struct function
*fun
, gimple stmt
,
329 struct function
*ofun
, gimple ostmt
)
332 for (val
= gimple_histogram_value (ofun
, ostmt
); val
!= NULL
; val
= val
->hvalue
.next
)
334 histogram_value new_val
= gimple_alloc_histogram_value (fun
, val
->type
, NULL
, NULL
);
335 memcpy (new_val
, val
, sizeof (*val
));
336 new_val
->hvalue
.stmt
= stmt
;
337 new_val
->hvalue
.counters
= XNEWVAR (gcov_type
, sizeof (*new_val
->hvalue
.counters
) * new_val
->n_counters
);
338 memcpy (new_val
->hvalue
.counters
, val
->hvalue
.counters
, sizeof (*new_val
->hvalue
.counters
) * new_val
->n_counters
);
339 gimple_add_histogram_value (fun
, stmt
, new_val
);
344 /* Move all histograms associated with OSTMT to STMT. */
347 gimple_move_stmt_histograms (struct function
*fun
, gimple stmt
, gimple ostmt
)
349 histogram_value val
= gimple_histogram_value (fun
, ostmt
);
352 /* The following three statements can't be reordered,
353 because histogram hashtab relies on stmt field value
354 for finding the exact slot. */
355 set_histogram_value (fun
, ostmt
, NULL
);
356 for (; val
!= NULL
; val
= val
->hvalue
.next
)
357 val
->hvalue
.stmt
= stmt
;
358 set_histogram_value (fun
, stmt
, val
);
362 static bool error_found
= false;
364 /* Helper function for verify_histograms. For each histogram reachable via htab
365 walk verify that it was reached via statement walk. */
368 visit_hist (void **slot
, void *data
)
370 struct pointer_set_t
*visited
= (struct pointer_set_t
*) data
;
371 histogram_value hist
= *(histogram_value
*) slot
;
372 if (!pointer_set_contains (visited
, hist
))
374 error ("dead histogram");
375 dump_histogram_value (stderr
, hist
);
376 debug_gimple_stmt (hist
->hvalue
.stmt
);
383 /* Verify sanity of the histograms. */
386 verify_histograms (void)
389 gimple_stmt_iterator gsi
;
390 histogram_value hist
;
391 struct pointer_set_t
*visited_hists
;
394 visited_hists
= pointer_set_create ();
396 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
398 gimple stmt
= gsi_stmt (gsi
);
400 for (hist
= gimple_histogram_value (cfun
, stmt
); hist
;
401 hist
= hist
->hvalue
.next
)
403 if (hist
->hvalue
.stmt
!= stmt
)
405 error ("Histogram value statement does not correspond to "
406 "the statement it is associated with");
407 debug_gimple_stmt (stmt
);
408 dump_histogram_value (stderr
, hist
);
411 pointer_set_insert (visited_hists
, hist
);
414 if (VALUE_HISTOGRAMS (cfun
))
415 htab_traverse (VALUE_HISTOGRAMS (cfun
), visit_hist
, visited_hists
);
416 pointer_set_destroy (visited_hists
);
418 internal_error ("verify_histograms failed");
421 /* Helper function for verify_histograms. For each histogram reachable via htab
422 walk verify that it was reached via statement walk. */
425 free_hist (void **slot
, void *data ATTRIBUTE_UNUSED
)
427 histogram_value hist
= *(histogram_value
*) slot
;
428 free (hist
->hvalue
.counters
);
429 #ifdef ENABLE_CHECKING
430 memset (hist
, 0xab, sizeof (*hist
));
437 free_histograms (void)
439 if (VALUE_HISTOGRAMS (cfun
))
441 htab_traverse (VALUE_HISTOGRAMS (cfun
), free_hist
, NULL
);
442 htab_delete (VALUE_HISTOGRAMS (cfun
));
443 VALUE_HISTOGRAMS (cfun
) = NULL
;
448 /* The overall number of invocations of the counter should match
449 execution count of basic block. Report it as error rather than
450 internal error as it might mean that user has misused the profile
454 check_counter (gimple stmt
, const char * name
,
455 gcov_type
*count
, gcov_type
*all
, gcov_type bb_count
)
457 if (*all
!= bb_count
|| *count
> *all
)
460 locus
= (stmt
!= NULL
)
461 ? gimple_location (stmt
)
462 : DECL_SOURCE_LOCATION (current_function_decl
);
463 if (flag_profile_correction
)
465 inform (locus
, "correcting inconsistent value profile: "
466 "%s profiler overall count (%d) does not match BB count "
467 "(%d)", name
, (int)*all
, (int)bb_count
);
475 error_at (locus
, "corrupted value profile: %s "
476 "profile counter (%d out of %d) inconsistent with "
477 "basic-block count (%d)",
490 /* GIMPLE based transformations. */
493 gimple_value_profile_transformations (void)
496 gimple_stmt_iterator gsi
;
497 bool changed
= false;
501 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
503 gimple stmt
= gsi_stmt (gsi
);
504 histogram_value th
= gimple_histogram_value (cfun
, stmt
);
510 fprintf (dump_file
, "Trying transformations on stmt ");
511 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
512 dump_histograms_for_stmt (cfun
, dump_file
, stmt
);
515 /* Transformations: */
516 /* The order of things in this conditional controls which
517 transformation is used when more than one is applicable. */
518 /* It is expected that any code added by the transformations
519 will be added before the current statement, and that the
520 current statement remain valid (although possibly
521 modified) upon return. */
522 if (flag_value_profile_transformations
523 && (gimple_mod_subtract_transform (&gsi
)
524 || gimple_divmod_fixed_value_transform (&gsi
)
525 || gimple_mod_pow2_value_transform (&gsi
)
526 || gimple_stringops_transform (&gsi
)
527 || gimple_ic_transform (stmt
)))
529 stmt
= gsi_stmt (gsi
);
531 /* Original statement may no longer be in the same block. */
532 if (bb
!= gimple_bb (stmt
))
534 bb
= gimple_bb (stmt
);
535 gsi
= gsi_for_stmt (stmt
);
550 /* Generate code for transformation 1 (with parent gimple assignment
551 STMT and probability of taking the optimal path PROB, which is
552 equivalent to COUNT/ALL within roundoff error). This generates the
553 result into a temp and returns the temp; it does not replace or
554 alter the original STMT. */
557 gimple_divmod_fixed_value (gimple stmt
, tree value
, int prob
, gcov_type count
,
560 gimple stmt1
, stmt2
, stmt3
;
561 tree tmp0
, tmp1
, tmp2
, tmpv
;
562 gimple bb1end
, bb2end
, bb3end
;
563 basic_block bb
, bb2
, bb3
, bb4
;
564 tree optype
, op1
, op2
;
565 edge e12
, e13
, e23
, e24
, e34
;
566 gimple_stmt_iterator gsi
;
568 gcc_assert (is_gimple_assign (stmt
)
569 && (gimple_assign_rhs_code (stmt
) == TRUNC_DIV_EXPR
570 || gimple_assign_rhs_code (stmt
) == TRUNC_MOD_EXPR
));
572 optype
= TREE_TYPE (gimple_assign_lhs (stmt
));
573 op1
= gimple_assign_rhs1 (stmt
);
574 op2
= gimple_assign_rhs2 (stmt
);
576 bb
= gimple_bb (stmt
);
577 gsi
= gsi_for_stmt (stmt
);
579 tmpv
= create_tmp_reg (optype
, "PROF");
580 tmp0
= make_ssa_name (tmpv
, NULL
);
581 tmp1
= make_ssa_name (tmpv
, NULL
);
582 stmt1
= gimple_build_assign (tmp0
, fold_convert (optype
, value
));
583 SSA_NAME_DEF_STMT (tmp0
) = stmt1
;
584 stmt2
= gimple_build_assign (tmp1
, op2
);
585 SSA_NAME_DEF_STMT (tmp1
) = stmt2
;
586 stmt3
= gimple_build_cond (NE_EXPR
, tmp1
, tmp0
, NULL_TREE
, NULL_TREE
);
587 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
588 gsi_insert_before (&gsi
, stmt2
, GSI_SAME_STMT
);
589 gsi_insert_before (&gsi
, stmt3
, GSI_SAME_STMT
);
592 tmp2
= make_rename_temp (optype
, "PROF");
593 stmt1
= gimple_build_assign_with_ops (gimple_assign_rhs_code (stmt
), tmp2
,
595 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
598 stmt1
= gimple_build_assign_with_ops (gimple_assign_rhs_code (stmt
), tmp2
,
600 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
604 /* Edge e23 connects bb2 to bb3, etc. */
605 e12
= split_block (bb
, bb1end
);
608 e23
= split_block (bb2
, bb2end
);
610 bb3
->count
= all
- count
;
611 e34
= split_block (bb3
, bb3end
);
615 e12
->flags
&= ~EDGE_FALLTHRU
;
616 e12
->flags
|= EDGE_FALSE_VALUE
;
617 e12
->probability
= prob
;
620 e13
= make_edge (bb
, bb3
, EDGE_TRUE_VALUE
);
621 e13
->probability
= REG_BR_PROB_BASE
- prob
;
622 e13
->count
= all
- count
;
626 e24
= make_edge (bb2
, bb4
, EDGE_FALLTHRU
);
627 e24
->probability
= REG_BR_PROB_BASE
;
630 e34
->probability
= REG_BR_PROB_BASE
;
631 e34
->count
= all
- count
;
637 /* Do transform 1) on INSN if applicable. */
640 gimple_divmod_fixed_value_transform (gimple_stmt_iterator
*si
)
642 histogram_value histogram
;
644 gcov_type val
, count
, all
;
645 tree result
, value
, tree_val
;
649 stmt
= gsi_stmt (*si
);
650 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
653 if (!INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (stmt
))))
656 code
= gimple_assign_rhs_code (stmt
);
658 if (code
!= TRUNC_DIV_EXPR
&& code
!= TRUNC_MOD_EXPR
)
661 histogram
= gimple_histogram_value_of_type (cfun
, stmt
,
662 HIST_TYPE_SINGLE_VALUE
);
666 value
= histogram
->hvalue
.value
;
667 val
= histogram
->hvalue
.counters
[0];
668 count
= histogram
->hvalue
.counters
[1];
669 all
= histogram
->hvalue
.counters
[2];
670 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
672 /* We require that count is at least half of all; this means
673 that for the transformation to fire the value must be constant
674 at least 50% of time (and 75% gives the guarantee of usage). */
675 if (simple_cst_equal (gimple_assign_rhs2 (stmt
), value
) != 1
677 || optimize_bb_for_size_p (gimple_bb (stmt
)))
680 if (check_counter (stmt
, "value", &count
, &all
, gimple_bb (stmt
)->count
))
683 /* Compute probability of taking the optimal path. */
685 prob
= (count
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
689 tree_val
= build_int_cst_wide (get_gcov_type (),
690 (unsigned HOST_WIDE_INT
) val
,
691 val
>> (HOST_BITS_PER_WIDE_INT
- 1) >> 1);
692 result
= gimple_divmod_fixed_value (stmt
, tree_val
, prob
, count
, all
);
696 fprintf (dump_file
, "Div/mod by constant ");
697 print_generic_expr (dump_file
, value
, TDF_SLIM
);
698 fprintf (dump_file
, "=");
699 print_generic_expr (dump_file
, tree_val
, TDF_SLIM
);
700 fprintf (dump_file
, " transformation on insn ");
701 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
704 gimple_assign_set_rhs_from_tree (si
, result
);
705 update_stmt (gsi_stmt (*si
));
710 /* Generate code for transformation 2 (with parent gimple assign STMT and
711 probability of taking the optimal path PROB, which is equivalent to COUNT/ALL
712 within roundoff error). This generates the result into a temp and returns
713 the temp; it does not replace or alter the original STMT. */
715 gimple_mod_pow2 (gimple stmt
, int prob
, gcov_type count
, gcov_type all
)
717 gimple stmt1
, stmt2
, stmt3
, stmt4
;
718 tree tmp2
, tmp3
, tmpv
;
719 gimple bb1end
, bb2end
, bb3end
;
720 basic_block bb
, bb2
, bb3
, bb4
;
721 tree optype
, op1
, op2
;
722 edge e12
, e13
, e23
, e24
, e34
;
723 gimple_stmt_iterator gsi
;
726 gcc_assert (is_gimple_assign (stmt
)
727 && gimple_assign_rhs_code (stmt
) == TRUNC_MOD_EXPR
);
729 optype
= TREE_TYPE (gimple_assign_lhs (stmt
));
730 op1
= gimple_assign_rhs1 (stmt
);
731 op2
= gimple_assign_rhs2 (stmt
);
733 bb
= gimple_bb (stmt
);
734 gsi
= gsi_for_stmt (stmt
);
736 result
= make_rename_temp (optype
, "PROF");
737 tmpv
= create_tmp_var (optype
, "PROF");
738 tmp2
= make_ssa_name (tmpv
, NULL
);
739 tmp3
= make_ssa_name (tmpv
, NULL
);
740 stmt2
= gimple_build_assign_with_ops (PLUS_EXPR
, tmp2
, op2
,
741 build_int_cst (optype
, -1));
742 SSA_NAME_DEF_STMT (tmp2
) = stmt2
;
743 stmt3
= gimple_build_assign_with_ops (BIT_AND_EXPR
, tmp3
, tmp2
, op2
);
744 SSA_NAME_DEF_STMT (tmp3
) = stmt3
;
745 stmt4
= gimple_build_cond (NE_EXPR
, tmp3
, build_int_cst (optype
, 0),
746 NULL_TREE
, NULL_TREE
);
747 gsi_insert_before (&gsi
, stmt2
, GSI_SAME_STMT
);
748 gsi_insert_before (&gsi
, stmt3
, GSI_SAME_STMT
);
749 gsi_insert_before (&gsi
, stmt4
, GSI_SAME_STMT
);
752 /* tmp2 == op2-1 inherited from previous block. */
753 stmt1
= gimple_build_assign_with_ops (BIT_AND_EXPR
, result
, op1
, tmp2
);
754 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
757 stmt1
= gimple_build_assign_with_ops (gimple_assign_rhs_code (stmt
), result
,
759 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
763 /* Edge e23 connects bb2 to bb3, etc. */
764 e12
= split_block (bb
, bb1end
);
767 e23
= split_block (bb2
, bb2end
);
769 bb3
->count
= all
- count
;
770 e34
= split_block (bb3
, bb3end
);
774 e12
->flags
&= ~EDGE_FALLTHRU
;
775 e12
->flags
|= EDGE_FALSE_VALUE
;
776 e12
->probability
= prob
;
779 e13
= make_edge (bb
, bb3
, EDGE_TRUE_VALUE
);
780 e13
->probability
= REG_BR_PROB_BASE
- prob
;
781 e13
->count
= all
- count
;
785 e24
= make_edge (bb2
, bb4
, EDGE_FALLTHRU
);
786 e24
->probability
= REG_BR_PROB_BASE
;
789 e34
->probability
= REG_BR_PROB_BASE
;
790 e34
->count
= all
- count
;
795 /* Do transform 2) on INSN if applicable. */
797 gimple_mod_pow2_value_transform (gimple_stmt_iterator
*si
)
799 histogram_value histogram
;
801 gcov_type count
, wrong_values
, all
;
802 tree lhs_type
, result
, value
;
806 stmt
= gsi_stmt (*si
);
807 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
810 lhs_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
811 if (!INTEGRAL_TYPE_P (lhs_type
))
814 code
= gimple_assign_rhs_code (stmt
);
816 if (code
!= TRUNC_MOD_EXPR
|| !TYPE_UNSIGNED (lhs_type
))
819 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_POW2
);
823 value
= histogram
->hvalue
.value
;
824 wrong_values
= histogram
->hvalue
.counters
[0];
825 count
= histogram
->hvalue
.counters
[1];
827 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
829 /* We require that we hit a power of 2 at least half of all evaluations. */
830 if (simple_cst_equal (gimple_assign_rhs2 (stmt
), value
) != 1
831 || count
< wrong_values
832 || optimize_bb_for_size_p (gimple_bb (stmt
)))
837 fprintf (dump_file
, "Mod power of 2 transformation on insn ");
838 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
841 /* Compute probability of taking the optimal path. */
842 all
= count
+ wrong_values
;
844 if (check_counter (stmt
, "pow2", &count
, &all
, gimple_bb (stmt
)->count
))
848 prob
= (count
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
852 result
= gimple_mod_pow2 (stmt
, prob
, count
, all
);
854 gimple_assign_set_rhs_from_tree (si
, result
);
855 update_stmt (gsi_stmt (*si
));
860 /* Generate code for transformations 3 and 4 (with parent gimple assign STMT, and
861 NCOUNTS the number of cases to support. Currently only NCOUNTS==0 or 1 is
862 supported and this is built into this interface. The probabilities of taking
863 the optimal paths are PROB1 and PROB2, which are equivalent to COUNT1/ALL and
864 COUNT2/ALL respectively within roundoff error). This generates the
865 result into a temp and returns the temp; it does not replace or alter
866 the original STMT. */
867 /* FIXME: Generalize the interface to handle NCOUNTS > 1. */
870 gimple_mod_subtract (gimple stmt
, int prob1
, int prob2
, int ncounts
,
871 gcov_type count1
, gcov_type count2
, gcov_type all
)
873 gimple stmt1
, stmt2
, stmt3
;
875 gimple bb1end
, bb2end
= NULL
, bb3end
;
876 basic_block bb
, bb2
, bb3
, bb4
;
877 tree optype
, op1
, op2
;
878 edge e12
, e23
= 0, e24
, e34
, e14
;
879 gimple_stmt_iterator gsi
;
882 gcc_assert (is_gimple_assign (stmt
)
883 && gimple_assign_rhs_code (stmt
) == TRUNC_MOD_EXPR
);
885 optype
= TREE_TYPE (gimple_assign_lhs (stmt
));
886 op1
= gimple_assign_rhs1 (stmt
);
887 op2
= gimple_assign_rhs2 (stmt
);
889 bb
= gimple_bb (stmt
);
890 gsi
= gsi_for_stmt (stmt
);
892 result
= make_rename_temp (optype
, "PROF");
893 tmp1
= make_ssa_name (create_tmp_var (optype
, "PROF"), NULL
);
894 stmt1
= gimple_build_assign (result
, op1
);
895 stmt2
= gimple_build_assign (tmp1
, op2
);
896 SSA_NAME_DEF_STMT (tmp1
) = stmt2
;
897 stmt3
= gimple_build_cond (LT_EXPR
, result
, tmp1
, NULL_TREE
, NULL_TREE
);
898 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
899 gsi_insert_before (&gsi
, stmt2
, GSI_SAME_STMT
);
900 gsi_insert_before (&gsi
, stmt3
, GSI_SAME_STMT
);
903 if (ncounts
) /* Assumed to be 0 or 1 */
905 stmt1
= gimple_build_assign_with_ops (MINUS_EXPR
, result
, result
, tmp1
);
906 stmt2
= gimple_build_cond (LT_EXPR
, result
, tmp1
, NULL_TREE
, NULL_TREE
);
907 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
908 gsi_insert_before (&gsi
, stmt2
, GSI_SAME_STMT
);
913 stmt1
= gimple_build_assign_with_ops (gimple_assign_rhs_code (stmt
), result
,
915 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
919 /* Edge e23 connects bb2 to bb3, etc. */
920 /* However block 3 is optional; if it is not there, references
921 to 3 really refer to block 2. */
922 e12
= split_block (bb
, bb1end
);
924 bb2
->count
= all
- count1
;
926 if (ncounts
) /* Assumed to be 0 or 1. */
928 e23
= split_block (bb2
, bb2end
);
930 bb3
->count
= all
- count1
- count2
;
933 e34
= split_block (ncounts
? bb3
: bb2
, bb3end
);
937 e12
->flags
&= ~EDGE_FALLTHRU
;
938 e12
->flags
|= EDGE_FALSE_VALUE
;
939 e12
->probability
= REG_BR_PROB_BASE
- prob1
;
940 e12
->count
= all
- count1
;
942 e14
= make_edge (bb
, bb4
, EDGE_TRUE_VALUE
);
943 e14
->probability
= prob1
;
946 if (ncounts
) /* Assumed to be 0 or 1. */
948 e23
->flags
&= ~EDGE_FALLTHRU
;
949 e23
->flags
|= EDGE_FALSE_VALUE
;
950 e23
->count
= all
- count1
- count2
;
951 e23
->probability
= REG_BR_PROB_BASE
- prob2
;
953 e24
= make_edge (bb2
, bb4
, EDGE_TRUE_VALUE
);
954 e24
->probability
= prob2
;
958 e34
->probability
= REG_BR_PROB_BASE
;
959 e34
->count
= all
- count1
- count2
;
965 /* Do transforms 3) and 4) on the statement pointed-to by SI if applicable. */
968 gimple_mod_subtract_transform (gimple_stmt_iterator
*si
)
970 histogram_value histogram
;
972 gcov_type count
, wrong_values
, all
;
973 tree lhs_type
, result
;
974 gcov_type prob1
, prob2
;
975 unsigned int i
, steps
;
976 gcov_type count1
, count2
;
979 stmt
= gsi_stmt (*si
);
980 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
983 lhs_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
984 if (!INTEGRAL_TYPE_P (lhs_type
))
987 code
= gimple_assign_rhs_code (stmt
);
989 if (code
!= TRUNC_MOD_EXPR
|| !TYPE_UNSIGNED (lhs_type
))
992 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_INTERVAL
);
998 for (i
= 0; i
< histogram
->hdata
.intvl
.steps
; i
++)
999 all
+= histogram
->hvalue
.counters
[i
];
1001 wrong_values
+= histogram
->hvalue
.counters
[i
];
1002 wrong_values
+= histogram
->hvalue
.counters
[i
+1];
1003 steps
= histogram
->hdata
.intvl
.steps
;
1004 all
+= wrong_values
;
1005 count1
= histogram
->hvalue
.counters
[0];
1006 count2
= histogram
->hvalue
.counters
[1];
1008 /* Compute probability of taking the optimal path. */
1009 if (check_counter (stmt
, "interval", &count1
, &all
, gimple_bb (stmt
)->count
))
1011 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1015 if (flag_profile_correction
&& count1
+ count2
> all
)
1016 all
= count1
+ count2
;
1018 gcc_assert (count1
+ count2
<= all
);
1020 /* We require that we use just subtractions in at least 50% of all
1023 for (i
= 0; i
< histogram
->hdata
.intvl
.steps
; i
++)
1025 count
+= histogram
->hvalue
.counters
[i
];
1026 if (count
* 2 >= all
)
1030 || optimize_bb_for_size_p (gimple_bb (stmt
)))
1033 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1036 fprintf (dump_file
, "Mod subtract transformation on insn ");
1037 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1040 /* Compute probability of taking the optimal path(s). */
1043 prob1
= (count1
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
1044 prob2
= (count2
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
1051 /* In practice, "steps" is always 2. This interface reflects this,
1052 and will need to be changed if "steps" can change. */
1053 result
= gimple_mod_subtract (stmt
, prob1
, prob2
, i
, count1
, count2
, all
);
1055 gimple_assign_set_rhs_from_tree (si
, result
);
1056 update_stmt (gsi_stmt (*si
));
1061 static VEC(cgraph_node_ptr
, heap
) *cgraph_node_map
= NULL
;
1063 /* Initialize map from FUNCDEF_NO to CGRAPH_NODE. */
1066 init_node_map (void)
1068 struct cgraph_node
*n
;
1070 if (get_last_funcdef_no ())
1071 VEC_safe_grow_cleared (cgraph_node_ptr
, heap
,
1072 cgraph_node_map
, get_last_funcdef_no ());
1074 FOR_EACH_FUNCTION (n
)
1076 if (DECL_STRUCT_FUNCTION (n
->symbol
.decl
))
1077 VEC_replace (cgraph_node_ptr
, cgraph_node_map
,
1078 DECL_STRUCT_FUNCTION (n
->symbol
.decl
)->funcdef_no
, n
);
1082 /* Delete the CGRAPH_NODE_MAP. */
1087 VEC_free (cgraph_node_ptr
, heap
, cgraph_node_map
);
1088 cgraph_node_map
= NULL
;
1091 /* Return cgraph node for function with pid */
1093 static inline struct cgraph_node
*
1094 find_func_by_funcdef_no (int func_id
)
1096 int max_id
= get_last_funcdef_no ();
1097 if (func_id
>= max_id
|| VEC_index (cgraph_node_ptr
,
1101 if (flag_profile_correction
)
1102 inform (DECL_SOURCE_LOCATION (current_function_decl
),
1103 "Inconsistent profile: indirect call target (%d) does not exist", func_id
);
1105 error ("Inconsistent profile: indirect call target (%d) does not exist", func_id
);
1110 return VEC_index (cgraph_node_ptr
, cgraph_node_map
, func_id
);
1113 /* Perform sanity check on the indirect call target. Due to race conditions,
1114 false function target may be attributed to an indirect call site. If the
1115 call expression type mismatches with the target function's type, expand_call
1116 may ICE. Here we only do very minimal sanity check just to make compiler happy.
1117 Returns true if TARGET is considered ok for call CALL_STMT. */
1120 check_ic_target (gimple call_stmt
, struct cgraph_node
*target
)
1123 if (gimple_check_call_matching_types (call_stmt
, target
->symbol
.decl
))
1126 locus
= gimple_location (call_stmt
);
1127 inform (locus
, "Skipping target %s with mismatching types for icall ",
1128 cgraph_node_name (target
));
1132 /* Do transformation
1134 if (actual_callee_address == address_of_most_common_function/method)
1141 gimple_ic (gimple icall_stmt
, struct cgraph_node
*direct_call
,
1142 int prob
, gcov_type count
, gcov_type all
)
1144 gimple dcall_stmt
, load_stmt
, cond_stmt
;
1145 tree tmp0
, tmp1
, tmpv
, tmp
;
1146 basic_block cond_bb
, dcall_bb
, icall_bb
, join_bb
= NULL
;
1147 tree optype
= build_pointer_type (void_type_node
);
1148 edge e_cd
, e_ci
, e_di
, e_dj
= NULL
, e_ij
;
1149 gimple_stmt_iterator gsi
;
1152 cond_bb
= gimple_bb (icall_stmt
);
1153 gsi
= gsi_for_stmt (icall_stmt
);
1155 tmpv
= create_tmp_reg (optype
, "PROF");
1156 tmp0
= make_ssa_name (tmpv
, NULL
);
1157 tmp1
= make_ssa_name (tmpv
, NULL
);
1158 tmp
= unshare_expr (gimple_call_fn (icall_stmt
));
1159 load_stmt
= gimple_build_assign (tmp0
, tmp
);
1160 SSA_NAME_DEF_STMT (tmp0
) = load_stmt
;
1161 gsi_insert_before (&gsi
, load_stmt
, GSI_SAME_STMT
);
1163 tmp
= fold_convert (optype
, build_addr (direct_call
->symbol
.decl
,
1164 current_function_decl
));
1165 load_stmt
= gimple_build_assign (tmp1
, tmp
);
1166 SSA_NAME_DEF_STMT (tmp1
) = load_stmt
;
1167 gsi_insert_before (&gsi
, load_stmt
, GSI_SAME_STMT
);
1169 cond_stmt
= gimple_build_cond (EQ_EXPR
, tmp1
, tmp0
, NULL_TREE
, NULL_TREE
);
1170 gsi_insert_before (&gsi
, cond_stmt
, GSI_SAME_STMT
);
1172 gimple_set_vdef (icall_stmt
, NULL_TREE
);
1173 gimple_set_vuse (icall_stmt
, NULL_TREE
);
1174 update_stmt (icall_stmt
);
1175 dcall_stmt
= gimple_copy (icall_stmt
);
1176 gimple_call_set_fndecl (dcall_stmt
, direct_call
->symbol
.decl
);
1177 dflags
= flags_from_decl_or_type (direct_call
->symbol
.decl
);
1178 if ((dflags
& ECF_NORETURN
) != 0)
1179 gimple_call_set_lhs (dcall_stmt
, NULL_TREE
);
1180 gsi_insert_before (&gsi
, dcall_stmt
, GSI_SAME_STMT
);
1183 /* Edge e_cd connects cond_bb to dcall_bb, etc; note the first letters. */
1184 e_cd
= split_block (cond_bb
, cond_stmt
);
1185 dcall_bb
= e_cd
->dest
;
1186 dcall_bb
->count
= count
;
1188 e_di
= split_block (dcall_bb
, dcall_stmt
);
1189 icall_bb
= e_di
->dest
;
1190 icall_bb
->count
= all
- count
;
1192 /* Do not disturb existing EH edges from the indirect call. */
1193 if (!stmt_ends_bb_p (icall_stmt
))
1194 e_ij
= split_block (icall_bb
, icall_stmt
);
1197 e_ij
= find_fallthru_edge (icall_bb
->succs
);
1198 /* The indirect call might be noreturn. */
1201 e_ij
->probability
= REG_BR_PROB_BASE
;
1202 e_ij
->count
= all
- count
;
1203 e_ij
= single_pred_edge (split_edge (e_ij
));
1208 join_bb
= e_ij
->dest
;
1209 join_bb
->count
= all
;
1212 e_cd
->flags
= (e_cd
->flags
& ~EDGE_FALLTHRU
) | EDGE_TRUE_VALUE
;
1213 e_cd
->probability
= prob
;
1214 e_cd
->count
= count
;
1216 e_ci
= make_edge (cond_bb
, icall_bb
, EDGE_FALSE_VALUE
);
1217 e_ci
->probability
= REG_BR_PROB_BASE
- prob
;
1218 e_ci
->count
= all
- count
;
1224 if ((dflags
& ECF_NORETURN
) != 0)
1228 e_dj
= make_edge (dcall_bb
, join_bb
, EDGE_FALLTHRU
);
1229 e_dj
->probability
= REG_BR_PROB_BASE
;
1230 e_dj
->count
= count
;
1232 e_ij
->count
= all
- count
;
1234 e_ij
->probability
= REG_BR_PROB_BASE
;
1237 /* Insert PHI node for the call result if necessary. */
1238 if (gimple_call_lhs (icall_stmt
)
1239 && TREE_CODE (gimple_call_lhs (icall_stmt
)) == SSA_NAME
1240 && (dflags
& ECF_NORETURN
) == 0)
1242 tree result
= gimple_call_lhs (icall_stmt
);
1243 gimple phi
= create_phi_node (result
, join_bb
);
1244 SSA_NAME_DEF_STMT (result
) = phi
;
1245 gimple_call_set_lhs (icall_stmt
,
1246 make_ssa_name (SSA_NAME_VAR (result
), icall_stmt
));
1247 add_phi_arg (phi
, gimple_call_lhs (icall_stmt
), e_ij
, UNKNOWN_LOCATION
);
1248 gimple_call_set_lhs (dcall_stmt
,
1249 make_ssa_name (SSA_NAME_VAR (result
), dcall_stmt
));
1250 add_phi_arg (phi
, gimple_call_lhs (dcall_stmt
), e_dj
, UNKNOWN_LOCATION
);
1253 /* Build an EH edge for the direct call if necessary. */
1254 lp_nr
= lookup_stmt_eh_lp (icall_stmt
);
1256 && stmt_could_throw_p (dcall_stmt
))
1260 gimple_stmt_iterator psi
;
1262 add_stmt_to_eh_lp (dcall_stmt
, lp_nr
);
1263 FOR_EACH_EDGE (e_eh
, ei
, icall_bb
->succs
)
1264 if (e_eh
->flags
& EDGE_EH
)
1266 e
= make_edge (dcall_bb
, e_eh
->dest
, EDGE_EH
);
1267 for (psi
= gsi_start_phis (e_eh
->dest
);
1268 !gsi_end_p (psi
); gsi_next (&psi
))
1270 gimple phi
= gsi_stmt (psi
);
1271 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
),
1272 PHI_ARG_DEF_FROM_EDGE (phi
, e_eh
));
1280 For every checked indirect/virtual call determine if most common pid of
1281 function/class method has probability more than 50%. If yes modify code of
1286 gimple_ic_transform (gimple stmt
)
1288 histogram_value histogram
;
1289 gcov_type val
, count
, all
, bb_all
;
1292 struct cgraph_node
*direct_call
;
1294 if (gimple_code (stmt
) != GIMPLE_CALL
)
1297 if (gimple_call_fndecl (stmt
) != NULL_TREE
)
1300 if (gimple_call_internal_p (stmt
))
1303 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_INDIR_CALL
);
1307 val
= histogram
->hvalue
.counters
[0];
1308 count
= histogram
->hvalue
.counters
[1];
1309 all
= histogram
->hvalue
.counters
[2];
1310 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1312 if (4 * count
<= 3 * all
)
1315 bb_all
= gimple_bb (stmt
)->count
;
1316 /* The order of CHECK_COUNTER calls is important -
1317 since check_counter can correct the third parameter
1318 and we want to make count <= all <= bb_all. */
1319 if ( check_counter (stmt
, "ic", &all
, &bb_all
, bb_all
)
1320 || check_counter (stmt
, "ic", &count
, &all
, all
))
1324 prob
= (count
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
1327 direct_call
= find_func_by_funcdef_no ((int)val
);
1329 if (direct_call
== NULL
)
1332 if (!check_ic_target (stmt
, direct_call
))
1335 modify
= gimple_ic (stmt
, direct_call
, prob
, count
, all
);
1339 fprintf (dump_file
, "Indirect call -> direct call ");
1340 print_generic_expr (dump_file
, gimple_call_fn (stmt
), TDF_SLIM
);
1341 fprintf (dump_file
, "=> ");
1342 print_generic_expr (dump_file
, direct_call
->symbol
.decl
, TDF_SLIM
);
1343 fprintf (dump_file
, " transformation on insn ");
1344 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1345 fprintf (dump_file
, " to ");
1346 print_gimple_stmt (dump_file
, modify
, 0, TDF_SLIM
);
1347 fprintf (dump_file
, "hist->count "HOST_WIDEST_INT_PRINT_DEC
1348 " hist->all "HOST_WIDEST_INT_PRINT_DEC
"\n", count
, all
);
1354 /* Return true if the stringop CALL with FNDECL shall be profiled.
1355 SIZE_ARG be set to the argument index for the size of the string
1359 interesting_stringop_to_profile_p (tree fndecl
, gimple call
, int *size_arg
)
1361 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
1363 if (fcode
!= BUILT_IN_MEMCPY
&& fcode
!= BUILT_IN_MEMPCPY
1364 && fcode
!= BUILT_IN_MEMSET
&& fcode
!= BUILT_IN_BZERO
)
1369 case BUILT_IN_MEMCPY
:
1370 case BUILT_IN_MEMPCPY
:
1372 return validate_gimple_arglist (call
, POINTER_TYPE
, POINTER_TYPE
,
1373 INTEGER_TYPE
, VOID_TYPE
);
1374 case BUILT_IN_MEMSET
:
1376 return validate_gimple_arglist (call
, POINTER_TYPE
, INTEGER_TYPE
,
1377 INTEGER_TYPE
, VOID_TYPE
);
1378 case BUILT_IN_BZERO
:
1380 return validate_gimple_arglist (call
, POINTER_TYPE
, INTEGER_TYPE
,
1387 /* Convert stringop (..., vcall_size)
1389 if (vcall_size == icall_size)
1390 stringop (..., icall_size);
1392 stringop (..., vcall_size);
1393 assuming we'll propagate a true constant into ICALL_SIZE later. */
1396 gimple_stringop_fixed_value (gimple vcall_stmt
, tree icall_size
, int prob
,
1397 gcov_type count
, gcov_type all
)
1399 gimple tmp_stmt
, cond_stmt
, icall_stmt
;
1400 tree tmp0
, tmp1
, tmpv
, vcall_size
, optype
;
1401 basic_block cond_bb
, icall_bb
, vcall_bb
, join_bb
;
1402 edge e_ci
, e_cv
, e_iv
, e_ij
, e_vj
;
1403 gimple_stmt_iterator gsi
;
1407 fndecl
= gimple_call_fndecl (vcall_stmt
);
1408 if (!interesting_stringop_to_profile_p (fndecl
, vcall_stmt
, &size_arg
))
1411 cond_bb
= gimple_bb (vcall_stmt
);
1412 gsi
= gsi_for_stmt (vcall_stmt
);
1414 vcall_size
= gimple_call_arg (vcall_stmt
, size_arg
);
1415 optype
= TREE_TYPE (vcall_size
);
1417 tmpv
= create_tmp_var (optype
, "PROF");
1418 tmp0
= make_ssa_name (tmpv
, NULL
);
1419 tmp1
= make_ssa_name (tmpv
, NULL
);
1420 tmp_stmt
= gimple_build_assign (tmp0
, fold_convert (optype
, icall_size
));
1421 SSA_NAME_DEF_STMT (tmp0
) = tmp_stmt
;
1422 gsi_insert_before (&gsi
, tmp_stmt
, GSI_SAME_STMT
);
1424 tmp_stmt
= gimple_build_assign (tmp1
, vcall_size
);
1425 SSA_NAME_DEF_STMT (tmp1
) = tmp_stmt
;
1426 gsi_insert_before (&gsi
, tmp_stmt
, GSI_SAME_STMT
);
1428 cond_stmt
= gimple_build_cond (EQ_EXPR
, tmp1
, tmp0
, NULL_TREE
, NULL_TREE
);
1429 gsi_insert_before (&gsi
, cond_stmt
, GSI_SAME_STMT
);
1431 gimple_set_vdef (vcall_stmt
, NULL
);
1432 gimple_set_vuse (vcall_stmt
, NULL
);
1433 update_stmt (vcall_stmt
);
1434 icall_stmt
= gimple_copy (vcall_stmt
);
1435 gimple_call_set_arg (icall_stmt
, size_arg
, icall_size
);
1436 gsi_insert_before (&gsi
, icall_stmt
, GSI_SAME_STMT
);
1439 /* Edge e_ci connects cond_bb to icall_bb, etc. */
1440 e_ci
= split_block (cond_bb
, cond_stmt
);
1441 icall_bb
= e_ci
->dest
;
1442 icall_bb
->count
= count
;
1444 e_iv
= split_block (icall_bb
, icall_stmt
);
1445 vcall_bb
= e_iv
->dest
;
1446 vcall_bb
->count
= all
- count
;
1448 e_vj
= split_block (vcall_bb
, vcall_stmt
);
1449 join_bb
= e_vj
->dest
;
1450 join_bb
->count
= all
;
1452 e_ci
->flags
= (e_ci
->flags
& ~EDGE_FALLTHRU
) | EDGE_TRUE_VALUE
;
1453 e_ci
->probability
= prob
;
1454 e_ci
->count
= count
;
1456 e_cv
= make_edge (cond_bb
, vcall_bb
, EDGE_FALSE_VALUE
);
1457 e_cv
->probability
= REG_BR_PROB_BASE
- prob
;
1458 e_cv
->count
= all
- count
;
1462 e_ij
= make_edge (icall_bb
, join_bb
, EDGE_FALLTHRU
);
1463 e_ij
->probability
= REG_BR_PROB_BASE
;
1464 e_ij
->count
= count
;
1466 e_vj
->probability
= REG_BR_PROB_BASE
;
1467 e_vj
->count
= all
- count
;
1469 /* Insert PHI node for the call result if necessary. */
1470 if (gimple_call_lhs (vcall_stmt
)
1471 && TREE_CODE (gimple_call_lhs (vcall_stmt
)) == SSA_NAME
)
1473 tree result
= gimple_call_lhs (vcall_stmt
);
1474 gimple phi
= create_phi_node (result
, join_bb
);
1475 SSA_NAME_DEF_STMT (result
) = phi
;
1476 gimple_call_set_lhs (vcall_stmt
,
1477 make_ssa_name (SSA_NAME_VAR (result
), vcall_stmt
));
1478 add_phi_arg (phi
, gimple_call_lhs (vcall_stmt
), e_vj
, UNKNOWN_LOCATION
);
1479 gimple_call_set_lhs (icall_stmt
,
1480 make_ssa_name (SSA_NAME_VAR (result
), icall_stmt
));
1481 add_phi_arg (phi
, gimple_call_lhs (icall_stmt
), e_ij
, UNKNOWN_LOCATION
);
1484 /* Because these are all string op builtins, they're all nothrow. */
1485 gcc_assert (!stmt_could_throw_p (vcall_stmt
));
1486 gcc_assert (!stmt_could_throw_p (icall_stmt
));
1489 /* Find values inside STMT for that we want to measure histograms for
1490 division/modulo optimization. */
1492 gimple_stringops_transform (gimple_stmt_iterator
*gsi
)
1494 gimple stmt
= gsi_stmt (*gsi
);
1497 enum built_in_function fcode
;
1498 histogram_value histogram
;
1499 gcov_type count
, all
, val
;
1501 unsigned int dest_align
, src_align
;
1506 if (gimple_code (stmt
) != GIMPLE_CALL
)
1508 fndecl
= gimple_call_fndecl (stmt
);
1511 fcode
= DECL_FUNCTION_CODE (fndecl
);
1512 if (!interesting_stringop_to_profile_p (fndecl
, stmt
, &size_arg
))
1515 blck_size
= gimple_call_arg (stmt
, size_arg
);
1516 if (TREE_CODE (blck_size
) == INTEGER_CST
)
1519 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_SINGLE_VALUE
);
1522 val
= histogram
->hvalue
.counters
[0];
1523 count
= histogram
->hvalue
.counters
[1];
1524 all
= histogram
->hvalue
.counters
[2];
1525 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1526 /* We require that count is at least half of all; this means
1527 that for the transformation to fire the value must be constant
1528 at least 80% of time. */
1529 if ((6 * count
/ 5) < all
|| optimize_bb_for_size_p (gimple_bb (stmt
)))
1531 if (check_counter (stmt
, "value", &count
, &all
, gimple_bb (stmt
)->count
))
1534 prob
= (count
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
1537 dest
= gimple_call_arg (stmt
, 0);
1538 dest_align
= get_pointer_alignment (dest
);
1541 case BUILT_IN_MEMCPY
:
1542 case BUILT_IN_MEMPCPY
:
1543 src
= gimple_call_arg (stmt
, 1);
1544 src_align
= get_pointer_alignment (src
);
1545 if (!can_move_by_pieces (val
, MIN (dest_align
, src_align
)))
1548 case BUILT_IN_MEMSET
:
1549 if (!can_store_by_pieces (val
, builtin_memset_read_str
,
1550 gimple_call_arg (stmt
, 1),
1554 case BUILT_IN_BZERO
:
1555 if (!can_store_by_pieces (val
, builtin_memset_read_str
,
1563 tree_val
= build_int_cst_wide (get_gcov_type (),
1564 (unsigned HOST_WIDE_INT
) val
,
1565 val
>> (HOST_BITS_PER_WIDE_INT
- 1) >> 1);
1568 fprintf (dump_file
, "Single value %i stringop transformation on ",
1570 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1572 gimple_stringop_fixed_value (stmt
, tree_val
, prob
, count
, all
);
1578 stringop_block_profile (gimple stmt
, unsigned int *expected_align
,
1579 HOST_WIDE_INT
*expected_size
)
1581 histogram_value histogram
;
1582 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_AVERAGE
);
1584 *expected_size
= -1;
1585 else if (!histogram
->hvalue
.counters
[1])
1587 *expected_size
= -1;
1588 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1593 size
= ((histogram
->hvalue
.counters
[0]
1594 + histogram
->hvalue
.counters
[1] / 2)
1595 / histogram
->hvalue
.counters
[1]);
1596 /* Even if we can hold bigger value in SIZE, INT_MAX
1597 is safe "infinity" for code generation strategies. */
1600 *expected_size
= size
;
1601 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1603 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_IOR
);
1605 *expected_align
= 0;
1606 else if (!histogram
->hvalue
.counters
[0])
1608 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1609 *expected_align
= 0;
1616 count
= histogram
->hvalue
.counters
[0];
1618 while (!(count
& alignment
)
1619 && (alignment
* 2 * BITS_PER_UNIT
))
1621 *expected_align
= alignment
* BITS_PER_UNIT
;
1622 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1627 /* Find values inside STMT for that we want to measure histograms for
1628 division/modulo optimization. */
1630 gimple_divmod_values_to_profile (gimple stmt
, histogram_values
*values
)
1632 tree lhs
, divisor
, op0
, type
;
1633 histogram_value hist
;
1635 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
1638 lhs
= gimple_assign_lhs (stmt
);
1639 type
= TREE_TYPE (lhs
);
1640 if (!INTEGRAL_TYPE_P (type
))
1643 switch (gimple_assign_rhs_code (stmt
))
1645 case TRUNC_DIV_EXPR
:
1646 case TRUNC_MOD_EXPR
:
1647 divisor
= gimple_assign_rhs2 (stmt
);
1648 op0
= gimple_assign_rhs1 (stmt
);
1650 VEC_reserve (histogram_value
, heap
, *values
, 3);
1652 if (is_gimple_reg (divisor
))
1653 /* Check for the case where the divisor is the same value most
1655 VEC_quick_push (histogram_value
, *values
,
1656 gimple_alloc_histogram_value (cfun
,
1657 HIST_TYPE_SINGLE_VALUE
,
1660 /* For mod, check whether it is not often a noop (or replaceable by
1661 a few subtractions). */
1662 if (gimple_assign_rhs_code (stmt
) == TRUNC_MOD_EXPR
1663 && TYPE_UNSIGNED (type
))
1666 /* Check for a special case where the divisor is power of 2. */
1667 VEC_quick_push (histogram_value
, *values
,
1668 gimple_alloc_histogram_value (cfun
, HIST_TYPE_POW2
,
1671 val
= build2 (TRUNC_DIV_EXPR
, type
, op0
, divisor
);
1672 hist
= gimple_alloc_histogram_value (cfun
, HIST_TYPE_INTERVAL
,
1674 hist
->hdata
.intvl
.int_start
= 0;
1675 hist
->hdata
.intvl
.steps
= 2;
1676 VEC_quick_push (histogram_value
, *values
, hist
);
1685 /* Find calls inside STMT for that we want to measure histograms for
1686 indirect/virtual call optimization. */
1689 gimple_indirect_call_to_profile (gimple stmt
, histogram_values
*values
)
1693 if (gimple_code (stmt
) != GIMPLE_CALL
1694 || gimple_call_internal_p (stmt
)
1695 || gimple_call_fndecl (stmt
) != NULL_TREE
)
1698 callee
= gimple_call_fn (stmt
);
1700 VEC_reserve (histogram_value
, heap
, *values
, 3);
1702 VEC_quick_push (histogram_value
, *values
,
1703 gimple_alloc_histogram_value (cfun
, HIST_TYPE_INDIR_CALL
,
1709 /* Find values inside STMT for that we want to measure histograms for
1710 string operations. */
1712 gimple_stringops_values_to_profile (gimple stmt
, histogram_values
*values
)
1719 if (gimple_code (stmt
) != GIMPLE_CALL
)
1721 fndecl
= gimple_call_fndecl (stmt
);
1725 if (!interesting_stringop_to_profile_p (fndecl
, stmt
, &size_arg
))
1728 dest
= gimple_call_arg (stmt
, 0);
1729 blck_size
= gimple_call_arg (stmt
, size_arg
);
1731 if (TREE_CODE (blck_size
) != INTEGER_CST
)
1733 VEC_safe_push (histogram_value
, heap
, *values
,
1734 gimple_alloc_histogram_value (cfun
, HIST_TYPE_SINGLE_VALUE
,
1736 VEC_safe_push (histogram_value
, heap
, *values
,
1737 gimple_alloc_histogram_value (cfun
, HIST_TYPE_AVERAGE
,
1740 if (TREE_CODE (blck_size
) != INTEGER_CST
)
1741 VEC_safe_push (histogram_value
, heap
, *values
,
1742 gimple_alloc_histogram_value (cfun
, HIST_TYPE_IOR
,
1746 /* Find values inside STMT for that we want to measure histograms and adds
1747 them to list VALUES. */
1750 gimple_values_to_profile (gimple stmt
, histogram_values
*values
)
1752 if (flag_value_profile_transformations
)
1754 gimple_divmod_values_to_profile (stmt
, values
);
1755 gimple_stringops_values_to_profile (stmt
, values
);
1756 gimple_indirect_call_to_profile (stmt
, values
);
1761 gimple_find_values_to_profile (histogram_values
*values
)
1764 gimple_stmt_iterator gsi
;
1766 histogram_value hist
= NULL
;
1770 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1771 gimple_values_to_profile (gsi_stmt (gsi
), values
);
1773 FOR_EACH_VEC_ELT (histogram_value
, *values
, i
, hist
)
1777 case HIST_TYPE_INTERVAL
:
1778 hist
->n_counters
= hist
->hdata
.intvl
.steps
+ 2;
1781 case HIST_TYPE_POW2
:
1782 hist
->n_counters
= 2;
1785 case HIST_TYPE_SINGLE_VALUE
:
1786 hist
->n_counters
= 3;
1789 case HIST_TYPE_CONST_DELTA
:
1790 hist
->n_counters
= 4;
1793 case HIST_TYPE_INDIR_CALL
:
1794 hist
->n_counters
= 3;
1797 case HIST_TYPE_AVERAGE
:
1798 hist
->n_counters
= 2;
1802 hist
->n_counters
= 1;
1810 fprintf (dump_file
, "Stmt ");
1811 print_gimple_stmt (dump_file
, hist
->hvalue
.stmt
, 0, TDF_SLIM
);
1812 dump_histogram_value (dump_file
, hist
);