1 /* Transformations based on profile information for values.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "value-prof.h"
32 #include "insn-config.h"
37 #include "tree-flow.h"
38 #include "tree-flow-inline.h"
39 #include "diagnostic.h"
40 #include "tree-pretty-print.h"
41 #include "gimple-pretty-print.h"
47 #include "tree-pass.h"
48 #include "pointer-set.h"
50 /* In this file value profile based optimizations are placed. Currently the
51 following optimizations are implemented (for more detailed descriptions
52 see comments at value_profile_transformations):
54 1) Division/modulo specialization. Provided that we can determine that the
55 operands of the division have some special properties, we may use it to
56 produce more effective code.
57 2) Speculative prefetching. If we are able to determine that the difference
58 between addresses accessed by a memory reference is usually constant, we
59 may add the prefetch instructions.
60 FIXME: This transformation was removed together with RTL based value
63 3) Indirect/virtual call specialization. If we can determine most
64 common function callee in indirect/virtual call. We can use this
65 information to improve code effectiveness (especially info for
68 Every such optimization should add its requirements for profiled values to
69 insn_values_to_profile function. This function is called from branch_prob
70 in profile.c and the requested values are instrumented by it in the first
71 compilation with -fprofile-arcs. The optimization may then read the
72 gathered data in the second compilation with -fbranch-probabilities.
74 The measured data is pointed to from the histograms
75 field of the statement annotation of the instrumented insns. It is
76 kept as a linked list of struct histogram_value_t's, which contain the
77 same information as above. */
80 static tree
gimple_divmod_fixed_value (gimple
, tree
, int, gcov_type
, gcov_type
);
81 static tree
gimple_mod_pow2 (gimple
, int, gcov_type
, gcov_type
);
82 static tree
gimple_mod_subtract (gimple
, int, int, int, gcov_type
, gcov_type
,
84 static bool gimple_divmod_fixed_value_transform (gimple_stmt_iterator
*);
85 static bool gimple_mod_pow2_value_transform (gimple_stmt_iterator
*);
86 static bool gimple_mod_subtract_transform (gimple_stmt_iterator
*);
87 static bool gimple_stringops_transform (gimple_stmt_iterator
*);
88 static bool gimple_ic_transform (gimple
);
90 /* Allocate histogram value. */
92 static histogram_value
93 gimple_alloc_histogram_value (struct function
*fun ATTRIBUTE_UNUSED
,
94 enum hist_type type
, gimple stmt
, tree value
)
96 histogram_value hist
= (histogram_value
) xcalloc (1, sizeof (*hist
));
97 hist
->hvalue
.value
= value
;
98 hist
->hvalue
.stmt
= stmt
;
103 /* Hash value for histogram. */
106 histogram_hash (const void *x
)
108 return htab_hash_pointer (((const_histogram_value
)x
)->hvalue
.stmt
);
111 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
114 histogram_eq (const void *x
, const void *y
)
116 return ((const_histogram_value
) x
)->hvalue
.stmt
== (const_gimple
) y
;
119 /* Set histogram for STMT. */
122 set_histogram_value (struct function
*fun
, gimple stmt
, histogram_value hist
)
125 if (!hist
&& !VALUE_HISTOGRAMS (fun
))
127 if (!VALUE_HISTOGRAMS (fun
))
128 VALUE_HISTOGRAMS (fun
) = htab_create (1, histogram_hash
,
130 loc
= htab_find_slot_with_hash (VALUE_HISTOGRAMS (fun
), stmt
,
131 htab_hash_pointer (stmt
),
132 hist
? INSERT
: NO_INSERT
);
136 htab_clear_slot (VALUE_HISTOGRAMS (fun
), loc
);
142 /* Get histogram list for STMT. */
145 gimple_histogram_value (struct function
*fun
, gimple stmt
)
147 if (!VALUE_HISTOGRAMS (fun
))
149 return (histogram_value
) htab_find_with_hash (VALUE_HISTOGRAMS (fun
), stmt
,
150 htab_hash_pointer (stmt
));
153 /* Add histogram for STMT. */
156 gimple_add_histogram_value (struct function
*fun
, gimple stmt
,
157 histogram_value hist
)
159 hist
->hvalue
.next
= gimple_histogram_value (fun
, stmt
);
160 set_histogram_value (fun
, stmt
, hist
);
164 /* Remove histogram HIST from STMT's histogram list. */
167 gimple_remove_histogram_value (struct function
*fun
, gimple stmt
,
168 histogram_value hist
)
170 histogram_value hist2
= gimple_histogram_value (fun
, stmt
);
173 set_histogram_value (fun
, stmt
, hist
->hvalue
.next
);
177 while (hist2
->hvalue
.next
!= hist
)
178 hist2
= hist2
->hvalue
.next
;
179 hist2
->hvalue
.next
= hist
->hvalue
.next
;
181 free (hist
->hvalue
.counters
);
182 #ifdef ENABLE_CHECKING
183 memset (hist
, 0xab, sizeof (*hist
));
189 /* Lookup histogram of type TYPE in the STMT. */
192 gimple_histogram_value_of_type (struct function
*fun
, gimple stmt
,
195 histogram_value hist
;
196 for (hist
= gimple_histogram_value (fun
, stmt
); hist
;
197 hist
= hist
->hvalue
.next
)
198 if (hist
->type
== type
)
203 /* Dump information about HIST to DUMP_FILE. */
206 dump_histogram_value (FILE *dump_file
, histogram_value hist
)
210 case HIST_TYPE_INTERVAL
:
211 fprintf (dump_file
, "Interval counter range %d -- %d",
212 hist
->hdata
.intvl
.int_start
,
213 (hist
->hdata
.intvl
.int_start
214 + hist
->hdata
.intvl
.steps
- 1));
215 if (hist
->hvalue
.counters
)
218 fprintf(dump_file
, " [");
219 for (i
= 0; i
< hist
->hdata
.intvl
.steps
; i
++)
220 fprintf (dump_file
, " %d:"HOST_WIDEST_INT_PRINT_DEC
,
221 hist
->hdata
.intvl
.int_start
+ i
,
222 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[i
]);
223 fprintf (dump_file
, " ] outside range:"HOST_WIDEST_INT_PRINT_DEC
,
224 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[i
]);
226 fprintf (dump_file
, ".\n");
230 fprintf (dump_file
, "Pow2 counter ");
231 if (hist
->hvalue
.counters
)
233 fprintf (dump_file
, "pow2:"HOST_WIDEST_INT_PRINT_DEC
234 " nonpow2:"HOST_WIDEST_INT_PRINT_DEC
,
235 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0],
236 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[1]);
238 fprintf (dump_file
, ".\n");
241 case HIST_TYPE_SINGLE_VALUE
:
242 fprintf (dump_file
, "Single value ");
243 if (hist
->hvalue
.counters
)
245 fprintf (dump_file
, "value:"HOST_WIDEST_INT_PRINT_DEC
246 " match:"HOST_WIDEST_INT_PRINT_DEC
247 " wrong:"HOST_WIDEST_INT_PRINT_DEC
,
248 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0],
249 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[1],
250 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[2]);
252 fprintf (dump_file
, ".\n");
255 case HIST_TYPE_AVERAGE
:
256 fprintf (dump_file
, "Average value ");
257 if (hist
->hvalue
.counters
)
259 fprintf (dump_file
, "sum:"HOST_WIDEST_INT_PRINT_DEC
260 " times:"HOST_WIDEST_INT_PRINT_DEC
,
261 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0],
262 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[1]);
264 fprintf (dump_file
, ".\n");
268 fprintf (dump_file
, "IOR value ");
269 if (hist
->hvalue
.counters
)
271 fprintf (dump_file
, "ior:"HOST_WIDEST_INT_PRINT_DEC
,
272 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0]);
274 fprintf (dump_file
, ".\n");
277 case HIST_TYPE_CONST_DELTA
:
278 fprintf (dump_file
, "Constant delta ");
279 if (hist
->hvalue
.counters
)
281 fprintf (dump_file
, "value:"HOST_WIDEST_INT_PRINT_DEC
282 " match:"HOST_WIDEST_INT_PRINT_DEC
283 " wrong:"HOST_WIDEST_INT_PRINT_DEC
,
284 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0],
285 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[1],
286 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[2]);
288 fprintf (dump_file
, ".\n");
290 case HIST_TYPE_INDIR_CALL
:
291 fprintf (dump_file
, "Indirect call ");
292 if (hist
->hvalue
.counters
)
294 fprintf (dump_file
, "value:"HOST_WIDEST_INT_PRINT_DEC
295 " match:"HOST_WIDEST_INT_PRINT_DEC
296 " all:"HOST_WIDEST_INT_PRINT_DEC
,
297 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0],
298 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[1],
299 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[2]);
301 fprintf (dump_file
, ".\n");
306 /* Dump all histograms attached to STMT to DUMP_FILE. */
309 dump_histograms_for_stmt (struct function
*fun
, FILE *dump_file
, gimple stmt
)
311 histogram_value hist
;
312 for (hist
= gimple_histogram_value (fun
, stmt
); hist
; hist
= hist
->hvalue
.next
)
313 dump_histogram_value (dump_file
, hist
);
316 /* Remove all histograms associated with STMT. */
319 gimple_remove_stmt_histograms (struct function
*fun
, gimple stmt
)
322 while ((val
= gimple_histogram_value (fun
, stmt
)) != NULL
)
323 gimple_remove_histogram_value (fun
, stmt
, val
);
326 /* Duplicate all histograms associates with OSTMT to STMT. */
329 gimple_duplicate_stmt_histograms (struct function
*fun
, gimple stmt
,
330 struct function
*ofun
, gimple ostmt
)
333 for (val
= gimple_histogram_value (ofun
, ostmt
); val
!= NULL
; val
= val
->hvalue
.next
)
335 histogram_value new_val
= gimple_alloc_histogram_value (fun
, val
->type
, NULL
, NULL
);
336 memcpy (new_val
, val
, sizeof (*val
));
337 new_val
->hvalue
.stmt
= stmt
;
338 new_val
->hvalue
.counters
= XNEWVAR (gcov_type
, sizeof (*new_val
->hvalue
.counters
) * new_val
->n_counters
);
339 memcpy (new_val
->hvalue
.counters
, val
->hvalue
.counters
, sizeof (*new_val
->hvalue
.counters
) * new_val
->n_counters
);
340 gimple_add_histogram_value (fun
, stmt
, new_val
);
345 /* Move all histograms associated with OSTMT to STMT. */
348 gimple_move_stmt_histograms (struct function
*fun
, gimple stmt
, gimple ostmt
)
350 histogram_value val
= gimple_histogram_value (fun
, ostmt
);
353 /* The following three statements can't be reordered,
354 because histogram hashtab relies on stmt field value
355 for finding the exact slot. */
356 set_histogram_value (fun
, ostmt
, NULL
);
357 for (; val
!= NULL
; val
= val
->hvalue
.next
)
358 val
->hvalue
.stmt
= stmt
;
359 set_histogram_value (fun
, stmt
, val
);
363 static bool error_found
= false;
365 /* Helper function for verify_histograms. For each histogram reachable via htab
366 walk verify that it was reached via statement walk. */
369 visit_hist (void **slot
, void *data
)
371 struct pointer_set_t
*visited
= (struct pointer_set_t
*) data
;
372 histogram_value hist
= *(histogram_value
*) slot
;
373 if (!pointer_set_contains (visited
, hist
))
375 error ("dead histogram");
376 dump_histogram_value (stderr
, hist
);
377 debug_gimple_stmt (hist
->hvalue
.stmt
);
384 /* Verify sanity of the histograms. */
387 verify_histograms (void)
390 gimple_stmt_iterator gsi
;
391 histogram_value hist
;
392 struct pointer_set_t
*visited_hists
;
395 visited_hists
= pointer_set_create ();
397 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
399 gimple stmt
= gsi_stmt (gsi
);
401 for (hist
= gimple_histogram_value (cfun
, stmt
); hist
;
402 hist
= hist
->hvalue
.next
)
404 if (hist
->hvalue
.stmt
!= stmt
)
406 error ("Histogram value statement does not correspond to "
407 "the statement it is associated with");
408 debug_gimple_stmt (stmt
);
409 dump_histogram_value (stderr
, hist
);
412 pointer_set_insert (visited_hists
, hist
);
415 if (VALUE_HISTOGRAMS (cfun
))
416 htab_traverse (VALUE_HISTOGRAMS (cfun
), visit_hist
, visited_hists
);
417 pointer_set_destroy (visited_hists
);
419 internal_error ("verify_histograms failed");
422 /* Helper function for verify_histograms. For each histogram reachable via htab
423 walk verify that it was reached via statement walk. */
426 free_hist (void **slot
, void *data ATTRIBUTE_UNUSED
)
428 histogram_value hist
= *(histogram_value
*) slot
;
429 free (hist
->hvalue
.counters
);
430 #ifdef ENABLE_CHECKING
431 memset (hist
, 0xab, sizeof (*hist
));
438 free_histograms (void)
440 if (VALUE_HISTOGRAMS (cfun
))
442 htab_traverse (VALUE_HISTOGRAMS (cfun
), free_hist
, NULL
);
443 htab_delete (VALUE_HISTOGRAMS (cfun
));
444 VALUE_HISTOGRAMS (cfun
) = NULL
;
449 /* The overall number of invocations of the counter should match
450 execution count of basic block. Report it as error rather than
451 internal error as it might mean that user has misused the profile
455 check_counter (gimple stmt
, const char * name
,
456 gcov_type
*count
, gcov_type
*all
, gcov_type bb_count
)
458 if (*all
!= bb_count
|| *count
> *all
)
461 locus
= (stmt
!= NULL
)
462 ? gimple_location (stmt
)
463 : DECL_SOURCE_LOCATION (current_function_decl
);
464 if (flag_profile_correction
)
466 inform (locus
, "correcting inconsistent value profile: "
467 "%s profiler overall count (%d) does not match BB count "
468 "(%d)", name
, (int)*all
, (int)bb_count
);
476 error_at (locus
, "corrupted value profile: %s "
477 "profile counter (%d out of %d) inconsistent with "
478 "basic-block count (%d)",
491 /* GIMPLE based transformations. */
494 gimple_value_profile_transformations (void)
497 gimple_stmt_iterator gsi
;
498 bool changed
= false;
502 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
504 gimple stmt
= gsi_stmt (gsi
);
505 histogram_value th
= gimple_histogram_value (cfun
, stmt
);
511 fprintf (dump_file
, "Trying transformations on stmt ");
512 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
513 dump_histograms_for_stmt (cfun
, dump_file
, stmt
);
516 /* Transformations: */
517 /* The order of things in this conditional controls which
518 transformation is used when more than one is applicable. */
519 /* It is expected that any code added by the transformations
520 will be added before the current statement, and that the
521 current statement remain valid (although possibly
522 modified) upon return. */
523 if (flag_value_profile_transformations
524 && (gimple_mod_subtract_transform (&gsi
)
525 || gimple_divmod_fixed_value_transform (&gsi
)
526 || gimple_mod_pow2_value_transform (&gsi
)
527 || gimple_stringops_transform (&gsi
)
528 || gimple_ic_transform (stmt
)))
530 stmt
= gsi_stmt (gsi
);
532 /* Original statement may no longer be in the same block. */
533 if (bb
!= gimple_bb (stmt
))
535 bb
= gimple_bb (stmt
);
536 gsi
= gsi_for_stmt (stmt
);
551 /* Generate code for transformation 1 (with parent gimple assignment
552 STMT and probability of taking the optimal path PROB, which is
553 equivalent to COUNT/ALL within roundoff error). This generates the
554 result into a temp and returns the temp; it does not replace or
555 alter the original STMT. */
558 gimple_divmod_fixed_value (gimple stmt
, tree value
, int prob
, gcov_type count
,
561 gimple stmt1
, stmt2
, stmt3
;
562 tree tmp0
, tmp1
, tmp2
, tmpv
;
563 gimple bb1end
, bb2end
, bb3end
;
564 basic_block bb
, bb2
, bb3
, bb4
;
565 tree optype
, op1
, op2
;
566 edge e12
, e13
, e23
, e24
, e34
;
567 gimple_stmt_iterator gsi
;
569 gcc_assert (is_gimple_assign (stmt
)
570 && (gimple_assign_rhs_code (stmt
) == TRUNC_DIV_EXPR
571 || gimple_assign_rhs_code (stmt
) == TRUNC_MOD_EXPR
));
573 optype
= TREE_TYPE (gimple_assign_lhs (stmt
));
574 op1
= gimple_assign_rhs1 (stmt
);
575 op2
= gimple_assign_rhs2 (stmt
);
577 bb
= gimple_bb (stmt
);
578 gsi
= gsi_for_stmt (stmt
);
580 tmpv
= create_tmp_reg (optype
, "PROF");
581 tmp0
= make_ssa_name (tmpv
, NULL
);
582 tmp1
= make_ssa_name (tmpv
, NULL
);
583 stmt1
= gimple_build_assign (tmp0
, fold_convert (optype
, value
));
584 SSA_NAME_DEF_STMT (tmp0
) = stmt1
;
585 stmt2
= gimple_build_assign (tmp1
, op2
);
586 SSA_NAME_DEF_STMT (tmp1
) = stmt2
;
587 stmt3
= gimple_build_cond (NE_EXPR
, tmp1
, tmp0
, NULL_TREE
, NULL_TREE
);
588 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
589 gsi_insert_before (&gsi
, stmt2
, GSI_SAME_STMT
);
590 gsi_insert_before (&gsi
, stmt3
, GSI_SAME_STMT
);
593 tmp2
= make_rename_temp (optype
, "PROF");
594 stmt1
= gimple_build_assign_with_ops (gimple_assign_rhs_code (stmt
), tmp2
,
596 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
599 stmt1
= gimple_build_assign_with_ops (gimple_assign_rhs_code (stmt
), tmp2
,
601 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
605 /* Edge e23 connects bb2 to bb3, etc. */
606 e12
= split_block (bb
, bb1end
);
609 e23
= split_block (bb2
, bb2end
);
611 bb3
->count
= all
- count
;
612 e34
= split_block (bb3
, bb3end
);
616 e12
->flags
&= ~EDGE_FALLTHRU
;
617 e12
->flags
|= EDGE_FALSE_VALUE
;
618 e12
->probability
= prob
;
621 e13
= make_edge (bb
, bb3
, EDGE_TRUE_VALUE
);
622 e13
->probability
= REG_BR_PROB_BASE
- prob
;
623 e13
->count
= all
- count
;
627 e24
= make_edge (bb2
, bb4
, EDGE_FALLTHRU
);
628 e24
->probability
= REG_BR_PROB_BASE
;
631 e34
->probability
= REG_BR_PROB_BASE
;
632 e34
->count
= all
- count
;
638 /* Do transform 1) on INSN if applicable. */
641 gimple_divmod_fixed_value_transform (gimple_stmt_iterator
*si
)
643 histogram_value histogram
;
645 gcov_type val
, count
, all
;
646 tree result
, value
, tree_val
;
650 stmt
= gsi_stmt (*si
);
651 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
654 if (!INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (stmt
))))
657 code
= gimple_assign_rhs_code (stmt
);
659 if (code
!= TRUNC_DIV_EXPR
&& code
!= TRUNC_MOD_EXPR
)
662 histogram
= gimple_histogram_value_of_type (cfun
, stmt
,
663 HIST_TYPE_SINGLE_VALUE
);
667 value
= histogram
->hvalue
.value
;
668 val
= histogram
->hvalue
.counters
[0];
669 count
= histogram
->hvalue
.counters
[1];
670 all
= histogram
->hvalue
.counters
[2];
671 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
673 /* We require that count is at least half of all; this means
674 that for the transformation to fire the value must be constant
675 at least 50% of time (and 75% gives the guarantee of usage). */
676 if (simple_cst_equal (gimple_assign_rhs2 (stmt
), value
) != 1
678 || optimize_bb_for_size_p (gimple_bb (stmt
)))
681 if (check_counter (stmt
, "value", &count
, &all
, gimple_bb (stmt
)->count
))
684 /* Compute probability of taking the optimal path. */
686 prob
= (count
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
690 tree_val
= build_int_cst_wide (get_gcov_type (),
691 (unsigned HOST_WIDE_INT
) val
,
692 val
>> (HOST_BITS_PER_WIDE_INT
- 1) >> 1);
693 result
= gimple_divmod_fixed_value (stmt
, tree_val
, prob
, count
, all
);
697 fprintf (dump_file
, "Div/mod by constant ");
698 print_generic_expr (dump_file
, value
, TDF_SLIM
);
699 fprintf (dump_file
, "=");
700 print_generic_expr (dump_file
, tree_val
, TDF_SLIM
);
701 fprintf (dump_file
, " transformation on insn ");
702 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
705 gimple_assign_set_rhs_from_tree (si
, result
);
706 update_stmt (gsi_stmt (*si
));
711 /* Generate code for transformation 2 (with parent gimple assign STMT and
712 probability of taking the optimal path PROB, which is equivalent to COUNT/ALL
713 within roundoff error). This generates the result into a temp and returns
714 the temp; it does not replace or alter the original STMT. */
716 gimple_mod_pow2 (gimple stmt
, int prob
, gcov_type count
, gcov_type all
)
718 gimple stmt1
, stmt2
, stmt3
, stmt4
;
719 tree tmp2
, tmp3
, tmpv
;
720 gimple bb1end
, bb2end
, bb3end
;
721 basic_block bb
, bb2
, bb3
, bb4
;
722 tree optype
, op1
, op2
;
723 edge e12
, e13
, e23
, e24
, e34
;
724 gimple_stmt_iterator gsi
;
727 gcc_assert (is_gimple_assign (stmt
)
728 && gimple_assign_rhs_code (stmt
) == TRUNC_MOD_EXPR
);
730 optype
= TREE_TYPE (gimple_assign_lhs (stmt
));
731 op1
= gimple_assign_rhs1 (stmt
);
732 op2
= gimple_assign_rhs2 (stmt
);
734 bb
= gimple_bb (stmt
);
735 gsi
= gsi_for_stmt (stmt
);
737 result
= make_rename_temp (optype
, "PROF");
738 tmpv
= create_tmp_var (optype
, "PROF");
739 tmp2
= make_ssa_name (tmpv
, NULL
);
740 tmp3
= make_ssa_name (tmpv
, NULL
);
741 stmt2
= gimple_build_assign_with_ops (PLUS_EXPR
, tmp2
, op2
,
742 build_int_cst (optype
, -1));
743 SSA_NAME_DEF_STMT (tmp2
) = stmt2
;
744 stmt3
= gimple_build_assign_with_ops (BIT_AND_EXPR
, tmp3
, tmp2
, op2
);
745 SSA_NAME_DEF_STMT (tmp3
) = stmt3
;
746 stmt4
= gimple_build_cond (NE_EXPR
, tmp3
, build_int_cst (optype
, 0),
747 NULL_TREE
, NULL_TREE
);
748 gsi_insert_before (&gsi
, stmt2
, GSI_SAME_STMT
);
749 gsi_insert_before (&gsi
, stmt3
, GSI_SAME_STMT
);
750 gsi_insert_before (&gsi
, stmt4
, GSI_SAME_STMT
);
753 /* tmp2 == op2-1 inherited from previous block. */
754 stmt1
= gimple_build_assign_with_ops (BIT_AND_EXPR
, result
, op1
, tmp2
);
755 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
758 stmt1
= gimple_build_assign_with_ops (gimple_assign_rhs_code (stmt
), result
,
760 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
764 /* Edge e23 connects bb2 to bb3, etc. */
765 e12
= split_block (bb
, bb1end
);
768 e23
= split_block (bb2
, bb2end
);
770 bb3
->count
= all
- count
;
771 e34
= split_block (bb3
, bb3end
);
775 e12
->flags
&= ~EDGE_FALLTHRU
;
776 e12
->flags
|= EDGE_FALSE_VALUE
;
777 e12
->probability
= prob
;
780 e13
= make_edge (bb
, bb3
, EDGE_TRUE_VALUE
);
781 e13
->probability
= REG_BR_PROB_BASE
- prob
;
782 e13
->count
= all
- count
;
786 e24
= make_edge (bb2
, bb4
, EDGE_FALLTHRU
);
787 e24
->probability
= REG_BR_PROB_BASE
;
790 e34
->probability
= REG_BR_PROB_BASE
;
791 e34
->count
= all
- count
;
796 /* Do transform 2) on INSN if applicable. */
798 gimple_mod_pow2_value_transform (gimple_stmt_iterator
*si
)
800 histogram_value histogram
;
802 gcov_type count
, wrong_values
, all
;
803 tree lhs_type
, result
, value
;
807 stmt
= gsi_stmt (*si
);
808 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
811 lhs_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
812 if (!INTEGRAL_TYPE_P (lhs_type
))
815 code
= gimple_assign_rhs_code (stmt
);
817 if (code
!= TRUNC_MOD_EXPR
|| !TYPE_UNSIGNED (lhs_type
))
820 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_POW2
);
824 value
= histogram
->hvalue
.value
;
825 wrong_values
= histogram
->hvalue
.counters
[0];
826 count
= histogram
->hvalue
.counters
[1];
828 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
830 /* We require that we hit a power of 2 at least half of all evaluations. */
831 if (simple_cst_equal (gimple_assign_rhs2 (stmt
), value
) != 1
832 || count
< wrong_values
833 || optimize_bb_for_size_p (gimple_bb (stmt
)))
838 fprintf (dump_file
, "Mod power of 2 transformation on insn ");
839 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
842 /* Compute probability of taking the optimal path. */
843 all
= count
+ wrong_values
;
845 if (check_counter (stmt
, "pow2", &count
, &all
, gimple_bb (stmt
)->count
))
849 prob
= (count
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
853 result
= gimple_mod_pow2 (stmt
, prob
, count
, all
);
855 gimple_assign_set_rhs_from_tree (si
, result
);
856 update_stmt (gsi_stmt (*si
));
861 /* Generate code for transformations 3 and 4 (with parent gimple assign STMT, and
862 NCOUNTS the number of cases to support. Currently only NCOUNTS==0 or 1 is
863 supported and this is built into this interface. The probabilities of taking
864 the optimal paths are PROB1 and PROB2, which are equivalent to COUNT1/ALL and
865 COUNT2/ALL respectively within roundoff error). This generates the
866 result into a temp and returns the temp; it does not replace or alter
867 the original STMT. */
868 /* FIXME: Generalize the interface to handle NCOUNTS > 1. */
871 gimple_mod_subtract (gimple stmt
, int prob1
, int prob2
, int ncounts
,
872 gcov_type count1
, gcov_type count2
, gcov_type all
)
874 gimple stmt1
, stmt2
, stmt3
;
876 gimple bb1end
, bb2end
= NULL
, bb3end
;
877 basic_block bb
, bb2
, bb3
, bb4
;
878 tree optype
, op1
, op2
;
879 edge e12
, e23
= 0, e24
, e34
, e14
;
880 gimple_stmt_iterator gsi
;
883 gcc_assert (is_gimple_assign (stmt
)
884 && gimple_assign_rhs_code (stmt
) == TRUNC_MOD_EXPR
);
886 optype
= TREE_TYPE (gimple_assign_lhs (stmt
));
887 op1
= gimple_assign_rhs1 (stmt
);
888 op2
= gimple_assign_rhs2 (stmt
);
890 bb
= gimple_bb (stmt
);
891 gsi
= gsi_for_stmt (stmt
);
893 result
= make_rename_temp (optype
, "PROF");
894 tmp1
= make_ssa_name (create_tmp_var (optype
, "PROF"), NULL
);
895 stmt1
= gimple_build_assign (result
, op1
);
896 stmt2
= gimple_build_assign (tmp1
, op2
);
897 SSA_NAME_DEF_STMT (tmp1
) = stmt2
;
898 stmt3
= gimple_build_cond (LT_EXPR
, result
, tmp1
, NULL_TREE
, NULL_TREE
);
899 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
900 gsi_insert_before (&gsi
, stmt2
, GSI_SAME_STMT
);
901 gsi_insert_before (&gsi
, stmt3
, GSI_SAME_STMT
);
904 if (ncounts
) /* Assumed to be 0 or 1 */
906 stmt1
= gimple_build_assign_with_ops (MINUS_EXPR
, result
, result
, tmp1
);
907 stmt2
= gimple_build_cond (LT_EXPR
, result
, tmp1
, NULL_TREE
, NULL_TREE
);
908 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
909 gsi_insert_before (&gsi
, stmt2
, GSI_SAME_STMT
);
914 stmt1
= gimple_build_assign_with_ops (gimple_assign_rhs_code (stmt
), result
,
916 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
920 /* Edge e23 connects bb2 to bb3, etc. */
921 /* However block 3 is optional; if it is not there, references
922 to 3 really refer to block 2. */
923 e12
= split_block (bb
, bb1end
);
925 bb2
->count
= all
- count1
;
927 if (ncounts
) /* Assumed to be 0 or 1. */
929 e23
= split_block (bb2
, bb2end
);
931 bb3
->count
= all
- count1
- count2
;
934 e34
= split_block (ncounts
? bb3
: bb2
, bb3end
);
938 e12
->flags
&= ~EDGE_FALLTHRU
;
939 e12
->flags
|= EDGE_FALSE_VALUE
;
940 e12
->probability
= REG_BR_PROB_BASE
- prob1
;
941 e12
->count
= all
- count1
;
943 e14
= make_edge (bb
, bb4
, EDGE_TRUE_VALUE
);
944 e14
->probability
= prob1
;
947 if (ncounts
) /* Assumed to be 0 or 1. */
949 e23
->flags
&= ~EDGE_FALLTHRU
;
950 e23
->flags
|= EDGE_FALSE_VALUE
;
951 e23
->count
= all
- count1
- count2
;
952 e23
->probability
= REG_BR_PROB_BASE
- prob2
;
954 e24
= make_edge (bb2
, bb4
, EDGE_TRUE_VALUE
);
955 e24
->probability
= prob2
;
959 e34
->probability
= REG_BR_PROB_BASE
;
960 e34
->count
= all
- count1
- count2
;
966 /* Do transforms 3) and 4) on the statement pointed-to by SI if applicable. */
969 gimple_mod_subtract_transform (gimple_stmt_iterator
*si
)
971 histogram_value histogram
;
973 gcov_type count
, wrong_values
, all
;
974 tree lhs_type
, result
;
975 gcov_type prob1
, prob2
;
976 unsigned int i
, steps
;
977 gcov_type count1
, count2
;
980 stmt
= gsi_stmt (*si
);
981 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
984 lhs_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
985 if (!INTEGRAL_TYPE_P (lhs_type
))
988 code
= gimple_assign_rhs_code (stmt
);
990 if (code
!= TRUNC_MOD_EXPR
|| !TYPE_UNSIGNED (lhs_type
))
993 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_INTERVAL
);
999 for (i
= 0; i
< histogram
->hdata
.intvl
.steps
; i
++)
1000 all
+= histogram
->hvalue
.counters
[i
];
1002 wrong_values
+= histogram
->hvalue
.counters
[i
];
1003 wrong_values
+= histogram
->hvalue
.counters
[i
+1];
1004 steps
= histogram
->hdata
.intvl
.steps
;
1005 all
+= wrong_values
;
1006 count1
= histogram
->hvalue
.counters
[0];
1007 count2
= histogram
->hvalue
.counters
[1];
1009 /* Compute probability of taking the optimal path. */
1010 if (check_counter (stmt
, "interval", &count1
, &all
, gimple_bb (stmt
)->count
))
1012 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1016 if (flag_profile_correction
&& count1
+ count2
> all
)
1017 all
= count1
+ count2
;
1019 gcc_assert (count1
+ count2
<= all
);
1021 /* We require that we use just subtractions in at least 50% of all
1024 for (i
= 0; i
< histogram
->hdata
.intvl
.steps
; i
++)
1026 count
+= histogram
->hvalue
.counters
[i
];
1027 if (count
* 2 >= all
)
1031 || optimize_bb_for_size_p (gimple_bb (stmt
)))
1034 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1037 fprintf (dump_file
, "Mod subtract transformation on insn ");
1038 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1041 /* Compute probability of taking the optimal path(s). */
1044 prob1
= (count1
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
1045 prob2
= (count2
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
1052 /* In practice, "steps" is always 2. This interface reflects this,
1053 and will need to be changed if "steps" can change. */
1054 result
= gimple_mod_subtract (stmt
, prob1
, prob2
, i
, count1
, count2
, all
);
1056 gimple_assign_set_rhs_from_tree (si
, result
);
1057 update_stmt (gsi_stmt (*si
));
1062 static struct cgraph_node
** pid_map
= NULL
;
1064 /* Initialize map of pids (pid -> cgraph node) */
1069 struct cgraph_node
*n
;
1071 if (pid_map
!= NULL
)
1074 pid_map
= XCNEWVEC (struct cgraph_node
*, cgraph_max_pid
);
1076 for (n
= cgraph_nodes
; n
; n
= n
->next
)
1079 pid_map
[n
->pid
] = n
;
1083 /* Return cgraph node for function with pid */
1085 static inline struct cgraph_node
*
1086 find_func_by_pid (int pid
)
1090 return pid_map
[pid
];
1093 /* Do transformation
1095 if (actual_callee_address == address_of_most_common_function/method)
1102 gimple_ic (gimple icall_stmt
, struct cgraph_node
*direct_call
,
1103 int prob
, gcov_type count
, gcov_type all
)
1105 gimple dcall_stmt
, load_stmt
, cond_stmt
;
1106 tree tmp0
, tmp1
, tmpv
, tmp
;
1107 basic_block cond_bb
, dcall_bb
, icall_bb
, join_bb
;
1108 tree optype
= build_pointer_type (void_type_node
);
1109 edge e_cd
, e_ci
, e_di
, e_dj
, e_ij
;
1110 gimple_stmt_iterator gsi
;
1113 cond_bb
= gimple_bb (icall_stmt
);
1114 gsi
= gsi_for_stmt (icall_stmt
);
1116 tmpv
= create_tmp_reg (optype
, "PROF");
1117 tmp0
= make_ssa_name (tmpv
, NULL
);
1118 tmp1
= make_ssa_name (tmpv
, NULL
);
1119 tmp
= unshare_expr (gimple_call_fn (icall_stmt
));
1120 load_stmt
= gimple_build_assign (tmp0
, tmp
);
1121 SSA_NAME_DEF_STMT (tmp0
) = load_stmt
;
1122 gsi_insert_before (&gsi
, load_stmt
, GSI_SAME_STMT
);
1124 tmp
= fold_convert (optype
, build_addr (direct_call
->decl
,
1125 current_function_decl
));
1126 load_stmt
= gimple_build_assign (tmp1
, tmp
);
1127 SSA_NAME_DEF_STMT (tmp1
) = load_stmt
;
1128 gsi_insert_before (&gsi
, load_stmt
, GSI_SAME_STMT
);
1130 cond_stmt
= gimple_build_cond (EQ_EXPR
, tmp1
, tmp0
, NULL_TREE
, NULL_TREE
);
1131 gsi_insert_before (&gsi
, cond_stmt
, GSI_SAME_STMT
);
1133 gimple_set_vdef (icall_stmt
, NULL_TREE
);
1134 gimple_set_vuse (icall_stmt
, NULL_TREE
);
1135 update_stmt (icall_stmt
);
1136 dcall_stmt
= gimple_copy (icall_stmt
);
1137 gimple_call_set_fndecl (dcall_stmt
, direct_call
->decl
);
1138 gsi_insert_before (&gsi
, dcall_stmt
, GSI_SAME_STMT
);
1141 /* Edge e_cd connects cond_bb to dcall_bb, etc; note the first letters. */
1142 e_cd
= split_block (cond_bb
, cond_stmt
);
1143 dcall_bb
= e_cd
->dest
;
1144 dcall_bb
->count
= count
;
1146 e_di
= split_block (dcall_bb
, dcall_stmt
);
1147 icall_bb
= e_di
->dest
;
1148 icall_bb
->count
= all
- count
;
1150 /* Do not disturb existing EH edges from the indirect call. */
1151 if (!stmt_ends_bb_p (icall_stmt
))
1152 e_ij
= split_block (icall_bb
, icall_stmt
);
1155 e_ij
= find_fallthru_edge (icall_bb
->succs
);
1156 e_ij
->probability
= REG_BR_PROB_BASE
;
1157 e_ij
->count
= all
- count
;
1158 e_ij
= single_pred_edge (split_edge (e_ij
));
1160 join_bb
= e_ij
->dest
;
1161 join_bb
->count
= all
;
1163 e_cd
->flags
= (e_cd
->flags
& ~EDGE_FALLTHRU
) | EDGE_TRUE_VALUE
;
1164 e_cd
->probability
= prob
;
1165 e_cd
->count
= count
;
1167 e_ci
= make_edge (cond_bb
, icall_bb
, EDGE_FALSE_VALUE
);
1168 e_ci
->probability
= REG_BR_PROB_BASE
- prob
;
1169 e_ci
->count
= all
- count
;
1173 e_dj
= make_edge (dcall_bb
, join_bb
, EDGE_FALLTHRU
);
1174 e_dj
->probability
= REG_BR_PROB_BASE
;
1175 e_dj
->count
= count
;
1177 e_ij
->probability
= REG_BR_PROB_BASE
;
1178 e_ij
->count
= all
- count
;
1180 /* Insert PHI node for the call result if necessary. */
1181 if (gimple_call_lhs (icall_stmt
)
1182 && TREE_CODE (gimple_call_lhs (icall_stmt
)) == SSA_NAME
)
1184 tree result
= gimple_call_lhs (icall_stmt
);
1185 gimple phi
= create_phi_node (result
, join_bb
);
1186 SSA_NAME_DEF_STMT (result
) = phi
;
1187 gimple_call_set_lhs (icall_stmt
,
1188 make_ssa_name (SSA_NAME_VAR (result
), icall_stmt
));
1189 add_phi_arg (phi
, gimple_call_lhs (icall_stmt
), e_ij
, UNKNOWN_LOCATION
);
1190 gimple_call_set_lhs (dcall_stmt
,
1191 make_ssa_name (SSA_NAME_VAR (result
), dcall_stmt
));
1192 add_phi_arg (phi
, gimple_call_lhs (dcall_stmt
), e_dj
, UNKNOWN_LOCATION
);
1195 /* Build an EH edge for the direct call if necessary. */
1196 lp_nr
= lookup_stmt_eh_lp (icall_stmt
);
1198 && stmt_could_throw_p (dcall_stmt
))
1202 gimple_stmt_iterator psi
;
1204 add_stmt_to_eh_lp (dcall_stmt
, lp_nr
);
1205 FOR_EACH_EDGE (e_eh
, ei
, icall_bb
->succs
)
1206 if (e_eh
->flags
& EDGE_EH
)
1208 e
= make_edge (dcall_bb
, e_eh
->dest
, EDGE_EH
);
1209 for (psi
= gsi_start_phis (e_eh
->dest
);
1210 !gsi_end_p (psi
); gsi_next (&psi
))
1212 gimple phi
= gsi_stmt (psi
);
1213 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
),
1214 PHI_ARG_DEF_FROM_EDGE (phi
, e_eh
));
1222 For every checked indirect/virtual call determine if most common pid of
1223 function/class method has probability more than 50%. If yes modify code of
1228 gimple_ic_transform (gimple stmt
)
1230 histogram_value histogram
;
1231 gcov_type val
, count
, all
, bb_all
;
1235 struct cgraph_node
*direct_call
;
1237 if (gimple_code (stmt
) != GIMPLE_CALL
)
1240 callee
= gimple_call_fn (stmt
);
1242 if (TREE_CODE (callee
) == FUNCTION_DECL
)
1245 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_INDIR_CALL
);
1249 val
= histogram
->hvalue
.counters
[0];
1250 count
= histogram
->hvalue
.counters
[1];
1251 all
= histogram
->hvalue
.counters
[2];
1252 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1254 if (4 * count
<= 3 * all
)
1257 bb_all
= gimple_bb (stmt
)->count
;
1258 /* The order of CHECK_COUNTER calls is important -
1259 since check_counter can correct the third parameter
1260 and we want to make count <= all <= bb_all. */
1261 if ( check_counter (stmt
, "ic", &all
, &bb_all
, bb_all
)
1262 || check_counter (stmt
, "ic", &count
, &all
, all
))
1266 prob
= (count
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
1269 direct_call
= find_func_by_pid ((int)val
);
1271 if (direct_call
== NULL
)
1274 modify
= gimple_ic (stmt
, direct_call
, prob
, count
, all
);
1278 fprintf (dump_file
, "Indirect call -> direct call ");
1279 print_generic_expr (dump_file
, gimple_call_fn (stmt
), TDF_SLIM
);
1280 fprintf (dump_file
, "=> ");
1281 print_generic_expr (dump_file
, direct_call
->decl
, TDF_SLIM
);
1282 fprintf (dump_file
, " transformation on insn ");
1283 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1284 fprintf (dump_file
, " to ");
1285 print_gimple_stmt (dump_file
, modify
, 0, TDF_SLIM
);
1286 fprintf (dump_file
, "hist->count "HOST_WIDEST_INT_PRINT_DEC
1287 " hist->all "HOST_WIDEST_INT_PRINT_DEC
"\n", count
, all
);
1293 /* Return true if the stringop CALL with FNDECL shall be profiled.
1294 SIZE_ARG be set to the argument index for the size of the string
1298 interesting_stringop_to_profile_p (tree fndecl
, gimple call
, int *size_arg
)
1300 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
1302 if (fcode
!= BUILT_IN_MEMCPY
&& fcode
!= BUILT_IN_MEMPCPY
1303 && fcode
!= BUILT_IN_MEMSET
&& fcode
!= BUILT_IN_BZERO
)
1308 case BUILT_IN_MEMCPY
:
1309 case BUILT_IN_MEMPCPY
:
1311 return validate_gimple_arglist (call
, POINTER_TYPE
, POINTER_TYPE
,
1312 INTEGER_TYPE
, VOID_TYPE
);
1313 case BUILT_IN_MEMSET
:
1315 return validate_gimple_arglist (call
, POINTER_TYPE
, INTEGER_TYPE
,
1316 INTEGER_TYPE
, VOID_TYPE
);
1317 case BUILT_IN_BZERO
:
1319 return validate_gimple_arglist (call
, POINTER_TYPE
, INTEGER_TYPE
,
1326 /* Convert stringop (..., vcall_size)
1328 if (vcall_size == icall_size)
1329 stringop (..., icall_size);
1331 stringop (..., vcall_size);
1332 assuming we'll propagate a true constant into ICALL_SIZE later. */
1335 gimple_stringop_fixed_value (gimple vcall_stmt
, tree icall_size
, int prob
,
1336 gcov_type count
, gcov_type all
)
1338 gimple tmp_stmt
, cond_stmt
, icall_stmt
;
1339 tree tmp0
, tmp1
, tmpv
, vcall_size
, optype
;
1340 basic_block cond_bb
, icall_bb
, vcall_bb
, join_bb
;
1341 edge e_ci
, e_cv
, e_iv
, e_ij
, e_vj
;
1342 gimple_stmt_iterator gsi
;
1346 fndecl
= gimple_call_fndecl (vcall_stmt
);
1347 if (!interesting_stringop_to_profile_p (fndecl
, vcall_stmt
, &size_arg
))
1350 cond_bb
= gimple_bb (vcall_stmt
);
1351 gsi
= gsi_for_stmt (vcall_stmt
);
1353 vcall_size
= gimple_call_arg (vcall_stmt
, size_arg
);
1354 optype
= TREE_TYPE (vcall_size
);
1356 tmpv
= create_tmp_var (optype
, "PROF");
1357 tmp0
= make_ssa_name (tmpv
, NULL
);
1358 tmp1
= make_ssa_name (tmpv
, NULL
);
1359 tmp_stmt
= gimple_build_assign (tmp0
, fold_convert (optype
, icall_size
));
1360 SSA_NAME_DEF_STMT (tmp0
) = tmp_stmt
;
1361 gsi_insert_before (&gsi
, tmp_stmt
, GSI_SAME_STMT
);
1363 tmp_stmt
= gimple_build_assign (tmp1
, vcall_size
);
1364 SSA_NAME_DEF_STMT (tmp1
) = tmp_stmt
;
1365 gsi_insert_before (&gsi
, tmp_stmt
, GSI_SAME_STMT
);
1367 cond_stmt
= gimple_build_cond (EQ_EXPR
, tmp1
, tmp0
, NULL_TREE
, NULL_TREE
);
1368 gsi_insert_before (&gsi
, cond_stmt
, GSI_SAME_STMT
);
1370 gimple_set_vdef (vcall_stmt
, NULL
);
1371 gimple_set_vuse (vcall_stmt
, NULL
);
1372 update_stmt (vcall_stmt
);
1373 icall_stmt
= gimple_copy (vcall_stmt
);
1374 gimple_call_set_arg (icall_stmt
, size_arg
, icall_size
);
1375 gsi_insert_before (&gsi
, icall_stmt
, GSI_SAME_STMT
);
1378 /* Edge e_ci connects cond_bb to icall_bb, etc. */
1379 e_ci
= split_block (cond_bb
, cond_stmt
);
1380 icall_bb
= e_ci
->dest
;
1381 icall_bb
->count
= count
;
1383 e_iv
= split_block (icall_bb
, icall_stmt
);
1384 vcall_bb
= e_iv
->dest
;
1385 vcall_bb
->count
= all
- count
;
1387 e_vj
= split_block (vcall_bb
, vcall_stmt
);
1388 join_bb
= e_vj
->dest
;
1389 join_bb
->count
= all
;
1391 e_ci
->flags
= (e_ci
->flags
& ~EDGE_FALLTHRU
) | EDGE_TRUE_VALUE
;
1392 e_ci
->probability
= prob
;
1393 e_ci
->count
= count
;
1395 e_cv
= make_edge (cond_bb
, vcall_bb
, EDGE_FALSE_VALUE
);
1396 e_cv
->probability
= REG_BR_PROB_BASE
- prob
;
1397 e_cv
->count
= all
- count
;
1401 e_ij
= make_edge (icall_bb
, join_bb
, EDGE_FALLTHRU
);
1402 e_ij
->probability
= REG_BR_PROB_BASE
;
1403 e_ij
->count
= count
;
1405 e_vj
->probability
= REG_BR_PROB_BASE
;
1406 e_vj
->count
= all
- count
;
1408 /* Insert PHI node for the call result if necessary. */
1409 if (gimple_call_lhs (vcall_stmt
)
1410 && TREE_CODE (gimple_call_lhs (vcall_stmt
)) == SSA_NAME
)
1412 tree result
= gimple_call_lhs (vcall_stmt
);
1413 gimple phi
= create_phi_node (result
, join_bb
);
1414 SSA_NAME_DEF_STMT (result
) = phi
;
1415 gimple_call_set_lhs (vcall_stmt
,
1416 make_ssa_name (SSA_NAME_VAR (result
), vcall_stmt
));
1417 add_phi_arg (phi
, gimple_call_lhs (vcall_stmt
), e_vj
, UNKNOWN_LOCATION
);
1418 gimple_call_set_lhs (icall_stmt
,
1419 make_ssa_name (SSA_NAME_VAR (result
), icall_stmt
));
1420 add_phi_arg (phi
, gimple_call_lhs (icall_stmt
), e_ij
, UNKNOWN_LOCATION
);
1423 /* Because these are all string op builtins, they're all nothrow. */
1424 gcc_assert (!stmt_could_throw_p (vcall_stmt
));
1425 gcc_assert (!stmt_could_throw_p (icall_stmt
));
1428 /* Find values inside STMT for that we want to measure histograms for
1429 division/modulo optimization. */
1431 gimple_stringops_transform (gimple_stmt_iterator
*gsi
)
1433 gimple stmt
= gsi_stmt (*gsi
);
1436 enum built_in_function fcode
;
1437 histogram_value histogram
;
1438 gcov_type count
, all
, val
;
1440 unsigned int dest_align
, src_align
;
1445 if (gimple_code (stmt
) != GIMPLE_CALL
)
1447 fndecl
= gimple_call_fndecl (stmt
);
1450 fcode
= DECL_FUNCTION_CODE (fndecl
);
1451 if (!interesting_stringop_to_profile_p (fndecl
, stmt
, &size_arg
))
1454 blck_size
= gimple_call_arg (stmt
, size_arg
);
1455 if (TREE_CODE (blck_size
) == INTEGER_CST
)
1458 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_SINGLE_VALUE
);
1461 val
= histogram
->hvalue
.counters
[0];
1462 count
= histogram
->hvalue
.counters
[1];
1463 all
= histogram
->hvalue
.counters
[2];
1464 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1465 /* We require that count is at least half of all; this means
1466 that for the transformation to fire the value must be constant
1467 at least 80% of time. */
1468 if ((6 * count
/ 5) < all
|| optimize_bb_for_size_p (gimple_bb (stmt
)))
1470 if (check_counter (stmt
, "value", &count
, &all
, gimple_bb (stmt
)->count
))
1473 prob
= (count
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
1476 dest
= gimple_call_arg (stmt
, 0);
1477 dest_align
= get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
1480 case BUILT_IN_MEMCPY
:
1481 case BUILT_IN_MEMPCPY
:
1482 src
= gimple_call_arg (stmt
, 1);
1483 src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
1484 if (!can_move_by_pieces (val
, MIN (dest_align
, src_align
)))
1487 case BUILT_IN_MEMSET
:
1488 if (!can_store_by_pieces (val
, builtin_memset_read_str
,
1489 gimple_call_arg (stmt
, 1),
1493 case BUILT_IN_BZERO
:
1494 if (!can_store_by_pieces (val
, builtin_memset_read_str
,
1502 tree_val
= build_int_cst_wide (get_gcov_type (),
1503 (unsigned HOST_WIDE_INT
) val
,
1504 val
>> (HOST_BITS_PER_WIDE_INT
- 1) >> 1);
1507 fprintf (dump_file
, "Single value %i stringop transformation on ",
1509 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1511 gimple_stringop_fixed_value (stmt
, tree_val
, prob
, count
, all
);
1517 stringop_block_profile (gimple stmt
, unsigned int *expected_align
,
1518 HOST_WIDE_INT
*expected_size
)
1520 histogram_value histogram
;
1521 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_AVERAGE
);
1523 *expected_size
= -1;
1524 else if (!histogram
->hvalue
.counters
[1])
1526 *expected_size
= -1;
1527 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1532 size
= ((histogram
->hvalue
.counters
[0]
1533 + histogram
->hvalue
.counters
[1] / 2)
1534 / histogram
->hvalue
.counters
[1]);
1535 /* Even if we can hold bigger value in SIZE, INT_MAX
1536 is safe "infinity" for code generation strategies. */
1539 *expected_size
= size
;
1540 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1542 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_IOR
);
1544 *expected_align
= 0;
1545 else if (!histogram
->hvalue
.counters
[0])
1547 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1548 *expected_align
= 0;
1555 count
= histogram
->hvalue
.counters
[0];
1557 while (!(count
& alignment
)
1558 && (alignment
* 2 * BITS_PER_UNIT
))
1560 *expected_align
= alignment
* BITS_PER_UNIT
;
1561 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1566 /* Find values inside STMT for that we want to measure histograms for
1567 division/modulo optimization. */
1569 gimple_divmod_values_to_profile (gimple stmt
, histogram_values
*values
)
1571 tree lhs
, divisor
, op0
, type
;
1572 histogram_value hist
;
1574 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
1577 lhs
= gimple_assign_lhs (stmt
);
1578 type
= TREE_TYPE (lhs
);
1579 if (!INTEGRAL_TYPE_P (type
))
1582 switch (gimple_assign_rhs_code (stmt
))
1584 case TRUNC_DIV_EXPR
:
1585 case TRUNC_MOD_EXPR
:
1586 divisor
= gimple_assign_rhs2 (stmt
);
1587 op0
= gimple_assign_rhs1 (stmt
);
1589 VEC_reserve (histogram_value
, heap
, *values
, 3);
1591 if (is_gimple_reg (divisor
))
1592 /* Check for the case where the divisor is the same value most
1594 VEC_quick_push (histogram_value
, *values
,
1595 gimple_alloc_histogram_value (cfun
,
1596 HIST_TYPE_SINGLE_VALUE
,
1599 /* For mod, check whether it is not often a noop (or replaceable by
1600 a few subtractions). */
1601 if (gimple_assign_rhs_code (stmt
) == TRUNC_MOD_EXPR
1602 && TYPE_UNSIGNED (type
))
1605 /* Check for a special case where the divisor is power of 2. */
1606 VEC_quick_push (histogram_value
, *values
,
1607 gimple_alloc_histogram_value (cfun
, HIST_TYPE_POW2
,
1610 val
= build2 (TRUNC_DIV_EXPR
, type
, op0
, divisor
);
1611 hist
= gimple_alloc_histogram_value (cfun
, HIST_TYPE_INTERVAL
,
1613 hist
->hdata
.intvl
.int_start
= 0;
1614 hist
->hdata
.intvl
.steps
= 2;
1615 VEC_quick_push (histogram_value
, *values
, hist
);
1624 /* Find calls inside STMT for that we want to measure histograms for
1625 indirect/virtual call optimization. */
1628 gimple_indirect_call_to_profile (gimple stmt
, histogram_values
*values
)
1632 if (gimple_code (stmt
) != GIMPLE_CALL
1633 || gimple_call_fndecl (stmt
) != NULL_TREE
)
1636 callee
= gimple_call_fn (stmt
);
1638 VEC_reserve (histogram_value
, heap
, *values
, 3);
1640 VEC_quick_push (histogram_value
, *values
,
1641 gimple_alloc_histogram_value (cfun
, HIST_TYPE_INDIR_CALL
,
1647 /* Find values inside STMT for that we want to measure histograms for
1648 string operations. */
1650 gimple_stringops_values_to_profile (gimple stmt
, histogram_values
*values
)
1657 if (gimple_code (stmt
) != GIMPLE_CALL
)
1659 fndecl
= gimple_call_fndecl (stmt
);
1663 if (!interesting_stringop_to_profile_p (fndecl
, stmt
, &size_arg
))
1666 dest
= gimple_call_arg (stmt
, 0);
1667 blck_size
= gimple_call_arg (stmt
, size_arg
);
1669 if (TREE_CODE (blck_size
) != INTEGER_CST
)
1671 VEC_safe_push (histogram_value
, heap
, *values
,
1672 gimple_alloc_histogram_value (cfun
, HIST_TYPE_SINGLE_VALUE
,
1674 VEC_safe_push (histogram_value
, heap
, *values
,
1675 gimple_alloc_histogram_value (cfun
, HIST_TYPE_AVERAGE
,
1678 if (TREE_CODE (blck_size
) != INTEGER_CST
)
1679 VEC_safe_push (histogram_value
, heap
, *values
,
1680 gimple_alloc_histogram_value (cfun
, HIST_TYPE_IOR
,
1684 /* Find values inside STMT for that we want to measure histograms and adds
1685 them to list VALUES. */
1688 gimple_values_to_profile (gimple stmt
, histogram_values
*values
)
1690 if (flag_value_profile_transformations
)
1692 gimple_divmod_values_to_profile (stmt
, values
);
1693 gimple_stringops_values_to_profile (stmt
, values
);
1694 gimple_indirect_call_to_profile (stmt
, values
);
1699 gimple_find_values_to_profile (histogram_values
*values
)
1702 gimple_stmt_iterator gsi
;
1704 histogram_value hist
= NULL
;
1708 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1709 gimple_values_to_profile (gsi_stmt (gsi
), values
);
1711 FOR_EACH_VEC_ELT (histogram_value
, *values
, i
, hist
)
1715 case HIST_TYPE_INTERVAL
:
1716 hist
->n_counters
= hist
->hdata
.intvl
.steps
+ 2;
1719 case HIST_TYPE_POW2
:
1720 hist
->n_counters
= 2;
1723 case HIST_TYPE_SINGLE_VALUE
:
1724 hist
->n_counters
= 3;
1727 case HIST_TYPE_CONST_DELTA
:
1728 hist
->n_counters
= 4;
1731 case HIST_TYPE_INDIR_CALL
:
1732 hist
->n_counters
= 3;
1735 case HIST_TYPE_AVERAGE
:
1736 hist
->n_counters
= 2;
1740 hist
->n_counters
= 1;
1748 fprintf (dump_file
, "Stmt ");
1749 print_gimple_stmt (dump_file
, hist
->hvalue
.stmt
, 0, TDF_SLIM
);
1750 dump_histogram_value (dump_file
, hist
);