1 /* Transformations based on profile information for values.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "value-prof.h"
32 #include "insn-config.h"
37 #include "tree-flow.h"
38 #include "tree-flow-inline.h"
39 #include "diagnostic.h"
40 #include "tree-pretty-print.h"
41 #include "gimple-pretty-print.h"
47 #include "tree-pass.h"
48 #include "pointer-set.h"
51 /* In this file value profile based optimizations are placed. Currently the
52 following optimizations are implemented (for more detailed descriptions
53 see comments at value_profile_transformations):
55 1) Division/modulo specialization. Provided that we can determine that the
56 operands of the division have some special properties, we may use it to
57 produce more effective code.
58 2) Speculative prefetching. If we are able to determine that the difference
59 between addresses accessed by a memory reference is usually constant, we
60 may add the prefetch instructions.
61 FIXME: This transformation was removed together with RTL based value
64 3) Indirect/virtual call specialization. If we can determine most
65 common function callee in indirect/virtual call. We can use this
66 information to improve code effectiveness (especially info for
69 Every such optimization should add its requirements for profiled values to
70 insn_values_to_profile function. This function is called from branch_prob
71 in profile.c and the requested values are instrumented by it in the first
72 compilation with -fprofile-arcs. The optimization may then read the
73 gathered data in the second compilation with -fbranch-probabilities.
75 The measured data is pointed to from the histograms
76 field of the statement annotation of the instrumented insns. It is
77 kept as a linked list of struct histogram_value_t's, which contain the
78 same information as above. */
81 static tree
gimple_divmod_fixed_value (gimple
, tree
, int, gcov_type
, gcov_type
);
82 static tree
gimple_mod_pow2 (gimple
, int, gcov_type
, gcov_type
);
83 static tree
gimple_mod_subtract (gimple
, int, int, int, gcov_type
, gcov_type
,
85 static bool gimple_divmod_fixed_value_transform (gimple_stmt_iterator
*);
86 static bool gimple_mod_pow2_value_transform (gimple_stmt_iterator
*);
87 static bool gimple_mod_subtract_transform (gimple_stmt_iterator
*);
88 static bool gimple_stringops_transform (gimple_stmt_iterator
*);
89 static bool gimple_ic_transform (gimple
);
91 /* Allocate histogram value. */
93 static histogram_value
94 gimple_alloc_histogram_value (struct function
*fun ATTRIBUTE_UNUSED
,
95 enum hist_type type
, gimple stmt
, tree value
)
97 histogram_value hist
= (histogram_value
) xcalloc (1, sizeof (*hist
));
98 hist
->hvalue
.value
= value
;
99 hist
->hvalue
.stmt
= stmt
;
104 /* Hash value for histogram. */
107 histogram_hash (const void *x
)
109 return htab_hash_pointer (((const_histogram_value
)x
)->hvalue
.stmt
);
112 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
115 histogram_eq (const void *x
, const void *y
)
117 return ((const_histogram_value
) x
)->hvalue
.stmt
== (const_gimple
) y
;
120 /* Set histogram for STMT. */
123 set_histogram_value (struct function
*fun
, gimple stmt
, histogram_value hist
)
126 if (!hist
&& !VALUE_HISTOGRAMS (fun
))
128 if (!VALUE_HISTOGRAMS (fun
))
129 VALUE_HISTOGRAMS (fun
) = htab_create (1, histogram_hash
,
131 loc
= htab_find_slot_with_hash (VALUE_HISTOGRAMS (fun
), stmt
,
132 htab_hash_pointer (stmt
),
133 hist
? INSERT
: NO_INSERT
);
137 htab_clear_slot (VALUE_HISTOGRAMS (fun
), loc
);
143 /* Get histogram list for STMT. */
146 gimple_histogram_value (struct function
*fun
, gimple stmt
)
148 if (!VALUE_HISTOGRAMS (fun
))
150 return (histogram_value
) htab_find_with_hash (VALUE_HISTOGRAMS (fun
), stmt
,
151 htab_hash_pointer (stmt
));
154 /* Add histogram for STMT. */
157 gimple_add_histogram_value (struct function
*fun
, gimple stmt
,
158 histogram_value hist
)
160 hist
->hvalue
.next
= gimple_histogram_value (fun
, stmt
);
161 set_histogram_value (fun
, stmt
, hist
);
165 /* Remove histogram HIST from STMT's histogram list. */
168 gimple_remove_histogram_value (struct function
*fun
, gimple stmt
,
169 histogram_value hist
)
171 histogram_value hist2
= gimple_histogram_value (fun
, stmt
);
174 set_histogram_value (fun
, stmt
, hist
->hvalue
.next
);
178 while (hist2
->hvalue
.next
!= hist
)
179 hist2
= hist2
->hvalue
.next
;
180 hist2
->hvalue
.next
= hist
->hvalue
.next
;
182 free (hist
->hvalue
.counters
);
183 #ifdef ENABLE_CHECKING
184 memset (hist
, 0xab, sizeof (*hist
));
190 /* Lookup histogram of type TYPE in the STMT. */
193 gimple_histogram_value_of_type (struct function
*fun
, gimple stmt
,
196 histogram_value hist
;
197 for (hist
= gimple_histogram_value (fun
, stmt
); hist
;
198 hist
= hist
->hvalue
.next
)
199 if (hist
->type
== type
)
204 /* Dump information about HIST to DUMP_FILE. */
207 dump_histogram_value (FILE *dump_file
, histogram_value hist
)
211 case HIST_TYPE_INTERVAL
:
212 fprintf (dump_file
, "Interval counter range %d -- %d",
213 hist
->hdata
.intvl
.int_start
,
214 (hist
->hdata
.intvl
.int_start
215 + hist
->hdata
.intvl
.steps
- 1));
216 if (hist
->hvalue
.counters
)
219 fprintf(dump_file
, " [");
220 for (i
= 0; i
< hist
->hdata
.intvl
.steps
; i
++)
221 fprintf (dump_file
, " %d:"HOST_WIDEST_INT_PRINT_DEC
,
222 hist
->hdata
.intvl
.int_start
+ i
,
223 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[i
]);
224 fprintf (dump_file
, " ] outside range:"HOST_WIDEST_INT_PRINT_DEC
,
225 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[i
]);
227 fprintf (dump_file
, ".\n");
231 fprintf (dump_file
, "Pow2 counter ");
232 if (hist
->hvalue
.counters
)
234 fprintf (dump_file
, "pow2:"HOST_WIDEST_INT_PRINT_DEC
235 " nonpow2:"HOST_WIDEST_INT_PRINT_DEC
,
236 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0],
237 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[1]);
239 fprintf (dump_file
, ".\n");
242 case HIST_TYPE_SINGLE_VALUE
:
243 fprintf (dump_file
, "Single value ");
244 if (hist
->hvalue
.counters
)
246 fprintf (dump_file
, "value:"HOST_WIDEST_INT_PRINT_DEC
247 " match:"HOST_WIDEST_INT_PRINT_DEC
248 " wrong:"HOST_WIDEST_INT_PRINT_DEC
,
249 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0],
250 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[1],
251 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[2]);
253 fprintf (dump_file
, ".\n");
256 case HIST_TYPE_AVERAGE
:
257 fprintf (dump_file
, "Average value ");
258 if (hist
->hvalue
.counters
)
260 fprintf (dump_file
, "sum:"HOST_WIDEST_INT_PRINT_DEC
261 " times:"HOST_WIDEST_INT_PRINT_DEC
,
262 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0],
263 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[1]);
265 fprintf (dump_file
, ".\n");
269 fprintf (dump_file
, "IOR value ");
270 if (hist
->hvalue
.counters
)
272 fprintf (dump_file
, "ior:"HOST_WIDEST_INT_PRINT_DEC
,
273 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0]);
275 fprintf (dump_file
, ".\n");
278 case HIST_TYPE_CONST_DELTA
:
279 fprintf (dump_file
, "Constant delta ");
280 if (hist
->hvalue
.counters
)
282 fprintf (dump_file
, "value:"HOST_WIDEST_INT_PRINT_DEC
283 " match:"HOST_WIDEST_INT_PRINT_DEC
284 " wrong:"HOST_WIDEST_INT_PRINT_DEC
,
285 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0],
286 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[1],
287 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[2]);
289 fprintf (dump_file
, ".\n");
291 case HIST_TYPE_INDIR_CALL
:
292 fprintf (dump_file
, "Indirect call ");
293 if (hist
->hvalue
.counters
)
295 fprintf (dump_file
, "value:"HOST_WIDEST_INT_PRINT_DEC
296 " match:"HOST_WIDEST_INT_PRINT_DEC
297 " all:"HOST_WIDEST_INT_PRINT_DEC
,
298 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[0],
299 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[1],
300 (HOST_WIDEST_INT
) hist
->hvalue
.counters
[2]);
302 fprintf (dump_file
, ".\n");
307 /* Dump all histograms attached to STMT to DUMP_FILE. */
310 dump_histograms_for_stmt (struct function
*fun
, FILE *dump_file
, gimple stmt
)
312 histogram_value hist
;
313 for (hist
= gimple_histogram_value (fun
, stmt
); hist
; hist
= hist
->hvalue
.next
)
314 dump_histogram_value (dump_file
, hist
);
317 /* Remove all histograms associated with STMT. */
320 gimple_remove_stmt_histograms (struct function
*fun
, gimple stmt
)
323 while ((val
= gimple_histogram_value (fun
, stmt
)) != NULL
)
324 gimple_remove_histogram_value (fun
, stmt
, val
);
327 /* Duplicate all histograms associates with OSTMT to STMT. */
330 gimple_duplicate_stmt_histograms (struct function
*fun
, gimple stmt
,
331 struct function
*ofun
, gimple ostmt
)
334 for (val
= gimple_histogram_value (ofun
, ostmt
); val
!= NULL
; val
= val
->hvalue
.next
)
336 histogram_value new_val
= gimple_alloc_histogram_value (fun
, val
->type
, NULL
, NULL
);
337 memcpy (new_val
, val
, sizeof (*val
));
338 new_val
->hvalue
.stmt
= stmt
;
339 new_val
->hvalue
.counters
= XNEWVAR (gcov_type
, sizeof (*new_val
->hvalue
.counters
) * new_val
->n_counters
);
340 memcpy (new_val
->hvalue
.counters
, val
->hvalue
.counters
, sizeof (*new_val
->hvalue
.counters
) * new_val
->n_counters
);
341 gimple_add_histogram_value (fun
, stmt
, new_val
);
346 /* Move all histograms associated with OSTMT to STMT. */
349 gimple_move_stmt_histograms (struct function
*fun
, gimple stmt
, gimple ostmt
)
351 histogram_value val
= gimple_histogram_value (fun
, ostmt
);
354 /* The following three statements can't be reordered,
355 because histogram hashtab relies on stmt field value
356 for finding the exact slot. */
357 set_histogram_value (fun
, ostmt
, NULL
);
358 for (; val
!= NULL
; val
= val
->hvalue
.next
)
359 val
->hvalue
.stmt
= stmt
;
360 set_histogram_value (fun
, stmt
, val
);
364 static bool error_found
= false;
366 /* Helper function for verify_histograms. For each histogram reachable via htab
367 walk verify that it was reached via statement walk. */
370 visit_hist (void **slot
, void *data
)
372 struct pointer_set_t
*visited
= (struct pointer_set_t
*) data
;
373 histogram_value hist
= *(histogram_value
*) slot
;
374 if (!pointer_set_contains (visited
, hist
))
376 error ("dead histogram");
377 dump_histogram_value (stderr
, hist
);
378 debug_gimple_stmt (hist
->hvalue
.stmt
);
385 /* Verify sanity of the histograms. */
388 verify_histograms (void)
391 gimple_stmt_iterator gsi
;
392 histogram_value hist
;
393 struct pointer_set_t
*visited_hists
;
396 visited_hists
= pointer_set_create ();
398 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
400 gimple stmt
= gsi_stmt (gsi
);
402 for (hist
= gimple_histogram_value (cfun
, stmt
); hist
;
403 hist
= hist
->hvalue
.next
)
405 if (hist
->hvalue
.stmt
!= stmt
)
407 error ("Histogram value statement does not correspond to "
408 "the statement it is associated with");
409 debug_gimple_stmt (stmt
);
410 dump_histogram_value (stderr
, hist
);
413 pointer_set_insert (visited_hists
, hist
);
416 if (VALUE_HISTOGRAMS (cfun
))
417 htab_traverse (VALUE_HISTOGRAMS (cfun
), visit_hist
, visited_hists
);
418 pointer_set_destroy (visited_hists
);
420 internal_error ("verify_histograms failed");
423 /* Helper function for verify_histograms. For each histogram reachable via htab
424 walk verify that it was reached via statement walk. */
427 free_hist (void **slot
, void *data ATTRIBUTE_UNUSED
)
429 histogram_value hist
= *(histogram_value
*) slot
;
430 free (hist
->hvalue
.counters
);
431 #ifdef ENABLE_CHECKING
432 memset (hist
, 0xab, sizeof (*hist
));
439 free_histograms (void)
441 if (VALUE_HISTOGRAMS (cfun
))
443 htab_traverse (VALUE_HISTOGRAMS (cfun
), free_hist
, NULL
);
444 htab_delete (VALUE_HISTOGRAMS (cfun
));
445 VALUE_HISTOGRAMS (cfun
) = NULL
;
450 /* The overall number of invocations of the counter should match
451 execution count of basic block. Report it as error rather than
452 internal error as it might mean that user has misused the profile
456 check_counter (gimple stmt
, const char * name
,
457 gcov_type
*count
, gcov_type
*all
, gcov_type bb_count
)
459 if (*all
!= bb_count
|| *count
> *all
)
462 locus
= (stmt
!= NULL
)
463 ? gimple_location (stmt
)
464 : DECL_SOURCE_LOCATION (current_function_decl
);
465 if (flag_profile_correction
)
467 inform (locus
, "correcting inconsistent value profile: "
468 "%s profiler overall count (%d) does not match BB count "
469 "(%d)", name
, (int)*all
, (int)bb_count
);
477 error_at (locus
, "corrupted value profile: %s "
478 "profile counter (%d out of %d) inconsistent with "
479 "basic-block count (%d)",
492 /* GIMPLE based transformations. */
495 gimple_value_profile_transformations (void)
498 gimple_stmt_iterator gsi
;
499 bool changed
= false;
503 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
505 gimple stmt
= gsi_stmt (gsi
);
506 histogram_value th
= gimple_histogram_value (cfun
, stmt
);
512 fprintf (dump_file
, "Trying transformations on stmt ");
513 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
514 dump_histograms_for_stmt (cfun
, dump_file
, stmt
);
517 /* Transformations: */
518 /* The order of things in this conditional controls which
519 transformation is used when more than one is applicable. */
520 /* It is expected that any code added by the transformations
521 will be added before the current statement, and that the
522 current statement remain valid (although possibly
523 modified) upon return. */
524 if (flag_value_profile_transformations
525 && (gimple_mod_subtract_transform (&gsi
)
526 || gimple_divmod_fixed_value_transform (&gsi
)
527 || gimple_mod_pow2_value_transform (&gsi
)
528 || gimple_stringops_transform (&gsi
)
529 || gimple_ic_transform (stmt
)))
531 stmt
= gsi_stmt (gsi
);
533 /* Original statement may no longer be in the same block. */
534 if (bb
!= gimple_bb (stmt
))
536 bb
= gimple_bb (stmt
);
537 gsi
= gsi_for_stmt (stmt
);
552 /* Generate code for transformation 1 (with parent gimple assignment
553 STMT and probability of taking the optimal path PROB, which is
554 equivalent to COUNT/ALL within roundoff error). This generates the
555 result into a temp and returns the temp; it does not replace or
556 alter the original STMT. */
559 gimple_divmod_fixed_value (gimple stmt
, tree value
, int prob
, gcov_type count
,
562 gimple stmt1
, stmt2
, stmt3
;
563 tree tmp0
, tmp1
, tmp2
, tmpv
;
564 gimple bb1end
, bb2end
, bb3end
;
565 basic_block bb
, bb2
, bb3
, bb4
;
566 tree optype
, op1
, op2
;
567 edge e12
, e13
, e23
, e24
, e34
;
568 gimple_stmt_iterator gsi
;
570 gcc_assert (is_gimple_assign (stmt
)
571 && (gimple_assign_rhs_code (stmt
) == TRUNC_DIV_EXPR
572 || gimple_assign_rhs_code (stmt
) == TRUNC_MOD_EXPR
));
574 optype
= TREE_TYPE (gimple_assign_lhs (stmt
));
575 op1
= gimple_assign_rhs1 (stmt
);
576 op2
= gimple_assign_rhs2 (stmt
);
578 bb
= gimple_bb (stmt
);
579 gsi
= gsi_for_stmt (stmt
);
581 tmpv
= create_tmp_reg (optype
, "PROF");
582 tmp0
= make_ssa_name (tmpv
, NULL
);
583 tmp1
= make_ssa_name (tmpv
, NULL
);
584 stmt1
= gimple_build_assign (tmp0
, fold_convert (optype
, value
));
585 SSA_NAME_DEF_STMT (tmp0
) = stmt1
;
586 stmt2
= gimple_build_assign (tmp1
, op2
);
587 SSA_NAME_DEF_STMT (tmp1
) = stmt2
;
588 stmt3
= gimple_build_cond (NE_EXPR
, tmp1
, tmp0
, NULL_TREE
, NULL_TREE
);
589 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
590 gsi_insert_before (&gsi
, stmt2
, GSI_SAME_STMT
);
591 gsi_insert_before (&gsi
, stmt3
, GSI_SAME_STMT
);
594 tmp2
= make_rename_temp (optype
, "PROF");
595 stmt1
= gimple_build_assign_with_ops (gimple_assign_rhs_code (stmt
), tmp2
,
597 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
600 stmt1
= gimple_build_assign_with_ops (gimple_assign_rhs_code (stmt
), tmp2
,
602 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
606 /* Edge e23 connects bb2 to bb3, etc. */
607 e12
= split_block (bb
, bb1end
);
610 e23
= split_block (bb2
, bb2end
);
612 bb3
->count
= all
- count
;
613 e34
= split_block (bb3
, bb3end
);
617 e12
->flags
&= ~EDGE_FALLTHRU
;
618 e12
->flags
|= EDGE_FALSE_VALUE
;
619 e12
->probability
= prob
;
622 e13
= make_edge (bb
, bb3
, EDGE_TRUE_VALUE
);
623 e13
->probability
= REG_BR_PROB_BASE
- prob
;
624 e13
->count
= all
- count
;
628 e24
= make_edge (bb2
, bb4
, EDGE_FALLTHRU
);
629 e24
->probability
= REG_BR_PROB_BASE
;
632 e34
->probability
= REG_BR_PROB_BASE
;
633 e34
->count
= all
- count
;
639 /* Do transform 1) on INSN if applicable. */
642 gimple_divmod_fixed_value_transform (gimple_stmt_iterator
*si
)
644 histogram_value histogram
;
646 gcov_type val
, count
, all
;
647 tree result
, value
, tree_val
;
651 stmt
= gsi_stmt (*si
);
652 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
655 if (!INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (stmt
))))
658 code
= gimple_assign_rhs_code (stmt
);
660 if (code
!= TRUNC_DIV_EXPR
&& code
!= TRUNC_MOD_EXPR
)
663 histogram
= gimple_histogram_value_of_type (cfun
, stmt
,
664 HIST_TYPE_SINGLE_VALUE
);
668 value
= histogram
->hvalue
.value
;
669 val
= histogram
->hvalue
.counters
[0];
670 count
= histogram
->hvalue
.counters
[1];
671 all
= histogram
->hvalue
.counters
[2];
672 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
674 /* We require that count is at least half of all; this means
675 that for the transformation to fire the value must be constant
676 at least 50% of time (and 75% gives the guarantee of usage). */
677 if (simple_cst_equal (gimple_assign_rhs2 (stmt
), value
) != 1
679 || optimize_bb_for_size_p (gimple_bb (stmt
)))
682 if (check_counter (stmt
, "value", &count
, &all
, gimple_bb (stmt
)->count
))
685 /* Compute probability of taking the optimal path. */
687 prob
= (count
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
691 tree_val
= build_int_cst_wide (get_gcov_type (),
692 (unsigned HOST_WIDE_INT
) val
,
693 val
>> (HOST_BITS_PER_WIDE_INT
- 1) >> 1);
694 result
= gimple_divmod_fixed_value (stmt
, tree_val
, prob
, count
, all
);
698 fprintf (dump_file
, "Div/mod by constant ");
699 print_generic_expr (dump_file
, value
, TDF_SLIM
);
700 fprintf (dump_file
, "=");
701 print_generic_expr (dump_file
, tree_val
, TDF_SLIM
);
702 fprintf (dump_file
, " transformation on insn ");
703 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
706 gimple_assign_set_rhs_from_tree (si
, result
);
707 update_stmt (gsi_stmt (*si
));
712 /* Generate code for transformation 2 (with parent gimple assign STMT and
713 probability of taking the optimal path PROB, which is equivalent to COUNT/ALL
714 within roundoff error). This generates the result into a temp and returns
715 the temp; it does not replace or alter the original STMT. */
717 gimple_mod_pow2 (gimple stmt
, int prob
, gcov_type count
, gcov_type all
)
719 gimple stmt1
, stmt2
, stmt3
, stmt4
;
720 tree tmp2
, tmp3
, tmpv
;
721 gimple bb1end
, bb2end
, bb3end
;
722 basic_block bb
, bb2
, bb3
, bb4
;
723 tree optype
, op1
, op2
;
724 edge e12
, e13
, e23
, e24
, e34
;
725 gimple_stmt_iterator gsi
;
728 gcc_assert (is_gimple_assign (stmt
)
729 && gimple_assign_rhs_code (stmt
) == TRUNC_MOD_EXPR
);
731 optype
= TREE_TYPE (gimple_assign_lhs (stmt
));
732 op1
= gimple_assign_rhs1 (stmt
);
733 op2
= gimple_assign_rhs2 (stmt
);
735 bb
= gimple_bb (stmt
);
736 gsi
= gsi_for_stmt (stmt
);
738 result
= make_rename_temp (optype
, "PROF");
739 tmpv
= create_tmp_var (optype
, "PROF");
740 tmp2
= make_ssa_name (tmpv
, NULL
);
741 tmp3
= make_ssa_name (tmpv
, NULL
);
742 stmt2
= gimple_build_assign_with_ops (PLUS_EXPR
, tmp2
, op2
,
743 build_int_cst (optype
, -1));
744 SSA_NAME_DEF_STMT (tmp2
) = stmt2
;
745 stmt3
= gimple_build_assign_with_ops (BIT_AND_EXPR
, tmp3
, tmp2
, op2
);
746 SSA_NAME_DEF_STMT (tmp3
) = stmt3
;
747 stmt4
= gimple_build_cond (NE_EXPR
, tmp3
, build_int_cst (optype
, 0),
748 NULL_TREE
, NULL_TREE
);
749 gsi_insert_before (&gsi
, stmt2
, GSI_SAME_STMT
);
750 gsi_insert_before (&gsi
, stmt3
, GSI_SAME_STMT
);
751 gsi_insert_before (&gsi
, stmt4
, GSI_SAME_STMT
);
754 /* tmp2 == op2-1 inherited from previous block. */
755 stmt1
= gimple_build_assign_with_ops (BIT_AND_EXPR
, result
, op1
, tmp2
);
756 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
759 stmt1
= gimple_build_assign_with_ops (gimple_assign_rhs_code (stmt
), result
,
761 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
765 /* Edge e23 connects bb2 to bb3, etc. */
766 e12
= split_block (bb
, bb1end
);
769 e23
= split_block (bb2
, bb2end
);
771 bb3
->count
= all
- count
;
772 e34
= split_block (bb3
, bb3end
);
776 e12
->flags
&= ~EDGE_FALLTHRU
;
777 e12
->flags
|= EDGE_FALSE_VALUE
;
778 e12
->probability
= prob
;
781 e13
= make_edge (bb
, bb3
, EDGE_TRUE_VALUE
);
782 e13
->probability
= REG_BR_PROB_BASE
- prob
;
783 e13
->count
= all
- count
;
787 e24
= make_edge (bb2
, bb4
, EDGE_FALLTHRU
);
788 e24
->probability
= REG_BR_PROB_BASE
;
791 e34
->probability
= REG_BR_PROB_BASE
;
792 e34
->count
= all
- count
;
797 /* Do transform 2) on INSN if applicable. */
799 gimple_mod_pow2_value_transform (gimple_stmt_iterator
*si
)
801 histogram_value histogram
;
803 gcov_type count
, wrong_values
, all
;
804 tree lhs_type
, result
, value
;
808 stmt
= gsi_stmt (*si
);
809 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
812 lhs_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
813 if (!INTEGRAL_TYPE_P (lhs_type
))
816 code
= gimple_assign_rhs_code (stmt
);
818 if (code
!= TRUNC_MOD_EXPR
|| !TYPE_UNSIGNED (lhs_type
))
821 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_POW2
);
825 value
= histogram
->hvalue
.value
;
826 wrong_values
= histogram
->hvalue
.counters
[0];
827 count
= histogram
->hvalue
.counters
[1];
829 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
831 /* We require that we hit a power of 2 at least half of all evaluations. */
832 if (simple_cst_equal (gimple_assign_rhs2 (stmt
), value
) != 1
833 || count
< wrong_values
834 || optimize_bb_for_size_p (gimple_bb (stmt
)))
839 fprintf (dump_file
, "Mod power of 2 transformation on insn ");
840 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
843 /* Compute probability of taking the optimal path. */
844 all
= count
+ wrong_values
;
846 if (check_counter (stmt
, "pow2", &count
, &all
, gimple_bb (stmt
)->count
))
850 prob
= (count
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
854 result
= gimple_mod_pow2 (stmt
, prob
, count
, all
);
856 gimple_assign_set_rhs_from_tree (si
, result
);
857 update_stmt (gsi_stmt (*si
));
862 /* Generate code for transformations 3 and 4 (with parent gimple assign STMT, and
863 NCOUNTS the number of cases to support. Currently only NCOUNTS==0 or 1 is
864 supported and this is built into this interface. The probabilities of taking
865 the optimal paths are PROB1 and PROB2, which are equivalent to COUNT1/ALL and
866 COUNT2/ALL respectively within roundoff error). This generates the
867 result into a temp and returns the temp; it does not replace or alter
868 the original STMT. */
869 /* FIXME: Generalize the interface to handle NCOUNTS > 1. */
872 gimple_mod_subtract (gimple stmt
, int prob1
, int prob2
, int ncounts
,
873 gcov_type count1
, gcov_type count2
, gcov_type all
)
875 gimple stmt1
, stmt2
, stmt3
;
877 gimple bb1end
, bb2end
= NULL
, bb3end
;
878 basic_block bb
, bb2
, bb3
, bb4
;
879 tree optype
, op1
, op2
;
880 edge e12
, e23
= 0, e24
, e34
, e14
;
881 gimple_stmt_iterator gsi
;
884 gcc_assert (is_gimple_assign (stmt
)
885 && gimple_assign_rhs_code (stmt
) == TRUNC_MOD_EXPR
);
887 optype
= TREE_TYPE (gimple_assign_lhs (stmt
));
888 op1
= gimple_assign_rhs1 (stmt
);
889 op2
= gimple_assign_rhs2 (stmt
);
891 bb
= gimple_bb (stmt
);
892 gsi
= gsi_for_stmt (stmt
);
894 result
= make_rename_temp (optype
, "PROF");
895 tmp1
= make_ssa_name (create_tmp_var (optype
, "PROF"), NULL
);
896 stmt1
= gimple_build_assign (result
, op1
);
897 stmt2
= gimple_build_assign (tmp1
, op2
);
898 SSA_NAME_DEF_STMT (tmp1
) = stmt2
;
899 stmt3
= gimple_build_cond (LT_EXPR
, result
, tmp1
, NULL_TREE
, NULL_TREE
);
900 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
901 gsi_insert_before (&gsi
, stmt2
, GSI_SAME_STMT
);
902 gsi_insert_before (&gsi
, stmt3
, GSI_SAME_STMT
);
905 if (ncounts
) /* Assumed to be 0 or 1 */
907 stmt1
= gimple_build_assign_with_ops (MINUS_EXPR
, result
, result
, tmp1
);
908 stmt2
= gimple_build_cond (LT_EXPR
, result
, tmp1
, NULL_TREE
, NULL_TREE
);
909 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
910 gsi_insert_before (&gsi
, stmt2
, GSI_SAME_STMT
);
915 stmt1
= gimple_build_assign_with_ops (gimple_assign_rhs_code (stmt
), result
,
917 gsi_insert_before (&gsi
, stmt1
, GSI_SAME_STMT
);
921 /* Edge e23 connects bb2 to bb3, etc. */
922 /* However block 3 is optional; if it is not there, references
923 to 3 really refer to block 2. */
924 e12
= split_block (bb
, bb1end
);
926 bb2
->count
= all
- count1
;
928 if (ncounts
) /* Assumed to be 0 or 1. */
930 e23
= split_block (bb2
, bb2end
);
932 bb3
->count
= all
- count1
- count2
;
935 e34
= split_block (ncounts
? bb3
: bb2
, bb3end
);
939 e12
->flags
&= ~EDGE_FALLTHRU
;
940 e12
->flags
|= EDGE_FALSE_VALUE
;
941 e12
->probability
= REG_BR_PROB_BASE
- prob1
;
942 e12
->count
= all
- count1
;
944 e14
= make_edge (bb
, bb4
, EDGE_TRUE_VALUE
);
945 e14
->probability
= prob1
;
948 if (ncounts
) /* Assumed to be 0 or 1. */
950 e23
->flags
&= ~EDGE_FALLTHRU
;
951 e23
->flags
|= EDGE_FALSE_VALUE
;
952 e23
->count
= all
- count1
- count2
;
953 e23
->probability
= REG_BR_PROB_BASE
- prob2
;
955 e24
= make_edge (bb2
, bb4
, EDGE_TRUE_VALUE
);
956 e24
->probability
= prob2
;
960 e34
->probability
= REG_BR_PROB_BASE
;
961 e34
->count
= all
- count1
- count2
;
967 /* Do transforms 3) and 4) on the statement pointed-to by SI if applicable. */
970 gimple_mod_subtract_transform (gimple_stmt_iterator
*si
)
972 histogram_value histogram
;
974 gcov_type count
, wrong_values
, all
;
975 tree lhs_type
, result
;
976 gcov_type prob1
, prob2
;
977 unsigned int i
, steps
;
978 gcov_type count1
, count2
;
981 stmt
= gsi_stmt (*si
);
982 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
985 lhs_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
986 if (!INTEGRAL_TYPE_P (lhs_type
))
989 code
= gimple_assign_rhs_code (stmt
);
991 if (code
!= TRUNC_MOD_EXPR
|| !TYPE_UNSIGNED (lhs_type
))
994 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_INTERVAL
);
1000 for (i
= 0; i
< histogram
->hdata
.intvl
.steps
; i
++)
1001 all
+= histogram
->hvalue
.counters
[i
];
1003 wrong_values
+= histogram
->hvalue
.counters
[i
];
1004 wrong_values
+= histogram
->hvalue
.counters
[i
+1];
1005 steps
= histogram
->hdata
.intvl
.steps
;
1006 all
+= wrong_values
;
1007 count1
= histogram
->hvalue
.counters
[0];
1008 count2
= histogram
->hvalue
.counters
[1];
1010 /* Compute probability of taking the optimal path. */
1011 if (check_counter (stmt
, "interval", &count1
, &all
, gimple_bb (stmt
)->count
))
1013 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1017 if (flag_profile_correction
&& count1
+ count2
> all
)
1018 all
= count1
+ count2
;
1020 gcc_assert (count1
+ count2
<= all
);
1022 /* We require that we use just subtractions in at least 50% of all
1025 for (i
= 0; i
< histogram
->hdata
.intvl
.steps
; i
++)
1027 count
+= histogram
->hvalue
.counters
[i
];
1028 if (count
* 2 >= all
)
1032 || optimize_bb_for_size_p (gimple_bb (stmt
)))
1035 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1038 fprintf (dump_file
, "Mod subtract transformation on insn ");
1039 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1042 /* Compute probability of taking the optimal path(s). */
1045 prob1
= (count1
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
1046 prob2
= (count2
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
1053 /* In practice, "steps" is always 2. This interface reflects this,
1054 and will need to be changed if "steps" can change. */
1055 result
= gimple_mod_subtract (stmt
, prob1
, prob2
, i
, count1
, count2
, all
);
1057 gimple_assign_set_rhs_from_tree (si
, result
);
1058 update_stmt (gsi_stmt (*si
));
1063 static VEC(cgraph_node_ptr
, heap
) *cgraph_node_map
= NULL
;
1065 /* Initialize map from FUNCDEF_NO to CGRAPH_NODE. */
1068 init_node_map (void)
1070 struct cgraph_node
*n
;
1072 if (get_last_funcdef_no ())
1073 VEC_safe_grow_cleared (cgraph_node_ptr
, heap
,
1074 cgraph_node_map
, get_last_funcdef_no ());
1076 for (n
= cgraph_nodes
; n
; n
= n
->next
)
1078 if (DECL_STRUCT_FUNCTION (n
->decl
))
1079 VEC_replace (cgraph_node_ptr
, cgraph_node_map
,
1080 DECL_STRUCT_FUNCTION (n
->decl
)->funcdef_no
, n
);
1084 /* Delete the CGRAPH_NODE_MAP. */
1089 VEC_free (cgraph_node_ptr
, heap
, cgraph_node_map
);
1090 cgraph_node_map
= NULL
;
1093 /* Return cgraph node for function with pid */
1095 static inline struct cgraph_node
*
1096 find_func_by_funcdef_no (int func_id
)
1098 int max_id
= get_last_funcdef_no ();
1099 if (func_id
>= max_id
|| VEC_index (cgraph_node_ptr
,
1103 if (flag_profile_correction
)
1104 inform (DECL_SOURCE_LOCATION (current_function_decl
),
1105 "Inconsistent profile: indirect call target (%d) does not exist", func_id
);
1107 error ("Inconsistent profile: indirect call target (%d) does not exist", func_id
);
1112 return VEC_index (cgraph_node_ptr
, cgraph_node_map
, func_id
);
1115 /* Perform sanity check on the indirect call target. Due to race conditions,
1116 false function target may be attributed to an indirect call site. If the
1117 call expression type mismatches with the target function's type, expand_call
1118 may ICE. Here we only do very minimal sanity check just to make compiler happy.
1119 Returns true if TARGET is considered ok for call CALL_STMT. */
1122 check_ic_target (gimple call_stmt
, struct cgraph_node
*target
)
1125 if (gimple_check_call_matching_types (call_stmt
, target
->decl
))
1128 locus
= gimple_location (call_stmt
);
1129 inform (locus
, "Skipping target %s with mismatching types for icall ",
1130 cgraph_node_name (target
));
1134 /* Do transformation
1136 if (actual_callee_address == address_of_most_common_function/method)
1143 gimple_ic (gimple icall_stmt
, struct cgraph_node
*direct_call
,
1144 int prob
, gcov_type count
, gcov_type all
)
1146 gimple dcall_stmt
, load_stmt
, cond_stmt
;
1147 tree tmp0
, tmp1
, tmpv
, tmp
;
1148 basic_block cond_bb
, dcall_bb
, icall_bb
, join_bb
= NULL
;
1149 tree optype
= build_pointer_type (void_type_node
);
1150 edge e_cd
, e_ci
, e_di
, e_dj
= NULL
, e_ij
;
1151 gimple_stmt_iterator gsi
;
1154 cond_bb
= gimple_bb (icall_stmt
);
1155 gsi
= gsi_for_stmt (icall_stmt
);
1157 tmpv
= create_tmp_reg (optype
, "PROF");
1158 tmp0
= make_ssa_name (tmpv
, NULL
);
1159 tmp1
= make_ssa_name (tmpv
, NULL
);
1160 tmp
= unshare_expr (gimple_call_fn (icall_stmt
));
1161 load_stmt
= gimple_build_assign (tmp0
, tmp
);
1162 SSA_NAME_DEF_STMT (tmp0
) = load_stmt
;
1163 gsi_insert_before (&gsi
, load_stmt
, GSI_SAME_STMT
);
1165 tmp
= fold_convert (optype
, build_addr (direct_call
->decl
,
1166 current_function_decl
));
1167 load_stmt
= gimple_build_assign (tmp1
, tmp
);
1168 SSA_NAME_DEF_STMT (tmp1
) = load_stmt
;
1169 gsi_insert_before (&gsi
, load_stmt
, GSI_SAME_STMT
);
1171 cond_stmt
= gimple_build_cond (EQ_EXPR
, tmp1
, tmp0
, NULL_TREE
, NULL_TREE
);
1172 gsi_insert_before (&gsi
, cond_stmt
, GSI_SAME_STMT
);
1174 gimple_set_vdef (icall_stmt
, NULL_TREE
);
1175 gimple_set_vuse (icall_stmt
, NULL_TREE
);
1176 update_stmt (icall_stmt
);
1177 dcall_stmt
= gimple_copy (icall_stmt
);
1178 gimple_call_set_fndecl (dcall_stmt
, direct_call
->decl
);
1179 dflags
= flags_from_decl_or_type (direct_call
->decl
);
1180 if ((dflags
& ECF_NORETURN
) != 0)
1181 gimple_call_set_lhs (dcall_stmt
, NULL_TREE
);
1182 gsi_insert_before (&gsi
, dcall_stmt
, GSI_SAME_STMT
);
1185 /* Edge e_cd connects cond_bb to dcall_bb, etc; note the first letters. */
1186 e_cd
= split_block (cond_bb
, cond_stmt
);
1187 dcall_bb
= e_cd
->dest
;
1188 dcall_bb
->count
= count
;
1190 e_di
= split_block (dcall_bb
, dcall_stmt
);
1191 icall_bb
= e_di
->dest
;
1192 icall_bb
->count
= all
- count
;
1194 /* Do not disturb existing EH edges from the indirect call. */
1195 if (!stmt_ends_bb_p (icall_stmt
))
1196 e_ij
= split_block (icall_bb
, icall_stmt
);
1199 e_ij
= find_fallthru_edge (icall_bb
->succs
);
1200 /* The indirect call might be noreturn. */
1203 e_ij
->probability
= REG_BR_PROB_BASE
;
1204 e_ij
->count
= all
- count
;
1205 e_ij
= single_pred_edge (split_edge (e_ij
));
1210 join_bb
= e_ij
->dest
;
1211 join_bb
->count
= all
;
1214 e_cd
->flags
= (e_cd
->flags
& ~EDGE_FALLTHRU
) | EDGE_TRUE_VALUE
;
1215 e_cd
->probability
= prob
;
1216 e_cd
->count
= count
;
1218 e_ci
= make_edge (cond_bb
, icall_bb
, EDGE_FALSE_VALUE
);
1219 e_ci
->probability
= REG_BR_PROB_BASE
- prob
;
1220 e_ci
->count
= all
- count
;
1226 if ((dflags
& ECF_NORETURN
) != 0)
1230 e_dj
= make_edge (dcall_bb
, join_bb
, EDGE_FALLTHRU
);
1231 e_dj
->probability
= REG_BR_PROB_BASE
;
1232 e_dj
->count
= count
;
1234 e_ij
->count
= all
- count
;
1236 e_ij
->probability
= REG_BR_PROB_BASE
;
1239 /* Insert PHI node for the call result if necessary. */
1240 if (gimple_call_lhs (icall_stmt
)
1241 && TREE_CODE (gimple_call_lhs (icall_stmt
)) == SSA_NAME
1242 && (dflags
& ECF_NORETURN
) == 0)
1244 tree result
= gimple_call_lhs (icall_stmt
);
1245 gimple phi
= create_phi_node (result
, join_bb
);
1246 SSA_NAME_DEF_STMT (result
) = phi
;
1247 gimple_call_set_lhs (icall_stmt
,
1248 make_ssa_name (SSA_NAME_VAR (result
), icall_stmt
));
1249 add_phi_arg (phi
, gimple_call_lhs (icall_stmt
), e_ij
, UNKNOWN_LOCATION
);
1250 gimple_call_set_lhs (dcall_stmt
,
1251 make_ssa_name (SSA_NAME_VAR (result
), dcall_stmt
));
1252 add_phi_arg (phi
, gimple_call_lhs (dcall_stmt
), e_dj
, UNKNOWN_LOCATION
);
1255 /* Build an EH edge for the direct call if necessary. */
1256 lp_nr
= lookup_stmt_eh_lp (icall_stmt
);
1258 && stmt_could_throw_p (dcall_stmt
))
1262 gimple_stmt_iterator psi
;
1264 add_stmt_to_eh_lp (dcall_stmt
, lp_nr
);
1265 FOR_EACH_EDGE (e_eh
, ei
, icall_bb
->succs
)
1266 if (e_eh
->flags
& EDGE_EH
)
1268 e
= make_edge (dcall_bb
, e_eh
->dest
, EDGE_EH
);
1269 for (psi
= gsi_start_phis (e_eh
->dest
);
1270 !gsi_end_p (psi
); gsi_next (&psi
))
1272 gimple phi
= gsi_stmt (psi
);
1273 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
),
1274 PHI_ARG_DEF_FROM_EDGE (phi
, e_eh
));
1282 For every checked indirect/virtual call determine if most common pid of
1283 function/class method has probability more than 50%. If yes modify code of
1288 gimple_ic_transform (gimple stmt
)
1290 histogram_value histogram
;
1291 gcov_type val
, count
, all
, bb_all
;
1294 struct cgraph_node
*direct_call
;
1296 if (gimple_code (stmt
) != GIMPLE_CALL
)
1299 if (gimple_call_fndecl (stmt
) != NULL_TREE
)
1302 if (gimple_call_internal_p (stmt
))
1305 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_INDIR_CALL
);
1309 val
= histogram
->hvalue
.counters
[0];
1310 count
= histogram
->hvalue
.counters
[1];
1311 all
= histogram
->hvalue
.counters
[2];
1312 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1314 if (4 * count
<= 3 * all
)
1317 bb_all
= gimple_bb (stmt
)->count
;
1318 /* The order of CHECK_COUNTER calls is important -
1319 since check_counter can correct the third parameter
1320 and we want to make count <= all <= bb_all. */
1321 if ( check_counter (stmt
, "ic", &all
, &bb_all
, bb_all
)
1322 || check_counter (stmt
, "ic", &count
, &all
, all
))
1326 prob
= (count
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
1329 direct_call
= find_func_by_funcdef_no ((int)val
);
1331 if (direct_call
== NULL
)
1334 if (!check_ic_target (stmt
, direct_call
))
1337 modify
= gimple_ic (stmt
, direct_call
, prob
, count
, all
);
1341 fprintf (dump_file
, "Indirect call -> direct call ");
1342 print_generic_expr (dump_file
, gimple_call_fn (stmt
), TDF_SLIM
);
1343 fprintf (dump_file
, "=> ");
1344 print_generic_expr (dump_file
, direct_call
->decl
, TDF_SLIM
);
1345 fprintf (dump_file
, " transformation on insn ");
1346 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1347 fprintf (dump_file
, " to ");
1348 print_gimple_stmt (dump_file
, modify
, 0, TDF_SLIM
);
1349 fprintf (dump_file
, "hist->count "HOST_WIDEST_INT_PRINT_DEC
1350 " hist->all "HOST_WIDEST_INT_PRINT_DEC
"\n", count
, all
);
1356 /* Return true if the stringop CALL with FNDECL shall be profiled.
1357 SIZE_ARG be set to the argument index for the size of the string
1361 interesting_stringop_to_profile_p (tree fndecl
, gimple call
, int *size_arg
)
1363 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
1365 if (fcode
!= BUILT_IN_MEMCPY
&& fcode
!= BUILT_IN_MEMPCPY
1366 && fcode
!= BUILT_IN_MEMSET
&& fcode
!= BUILT_IN_BZERO
)
1371 case BUILT_IN_MEMCPY
:
1372 case BUILT_IN_MEMPCPY
:
1374 return validate_gimple_arglist (call
, POINTER_TYPE
, POINTER_TYPE
,
1375 INTEGER_TYPE
, VOID_TYPE
);
1376 case BUILT_IN_MEMSET
:
1378 return validate_gimple_arglist (call
, POINTER_TYPE
, INTEGER_TYPE
,
1379 INTEGER_TYPE
, VOID_TYPE
);
1380 case BUILT_IN_BZERO
:
1382 return validate_gimple_arglist (call
, POINTER_TYPE
, INTEGER_TYPE
,
1389 /* Convert stringop (..., vcall_size)
1391 if (vcall_size == icall_size)
1392 stringop (..., icall_size);
1394 stringop (..., vcall_size);
1395 assuming we'll propagate a true constant into ICALL_SIZE later. */
1398 gimple_stringop_fixed_value (gimple vcall_stmt
, tree icall_size
, int prob
,
1399 gcov_type count
, gcov_type all
)
1401 gimple tmp_stmt
, cond_stmt
, icall_stmt
;
1402 tree tmp0
, tmp1
, tmpv
, vcall_size
, optype
;
1403 basic_block cond_bb
, icall_bb
, vcall_bb
, join_bb
;
1404 edge e_ci
, e_cv
, e_iv
, e_ij
, e_vj
;
1405 gimple_stmt_iterator gsi
;
1409 fndecl
= gimple_call_fndecl (vcall_stmt
);
1410 if (!interesting_stringop_to_profile_p (fndecl
, vcall_stmt
, &size_arg
))
1413 cond_bb
= gimple_bb (vcall_stmt
);
1414 gsi
= gsi_for_stmt (vcall_stmt
);
1416 vcall_size
= gimple_call_arg (vcall_stmt
, size_arg
);
1417 optype
= TREE_TYPE (vcall_size
);
1419 tmpv
= create_tmp_var (optype
, "PROF");
1420 tmp0
= make_ssa_name (tmpv
, NULL
);
1421 tmp1
= make_ssa_name (tmpv
, NULL
);
1422 tmp_stmt
= gimple_build_assign (tmp0
, fold_convert (optype
, icall_size
));
1423 SSA_NAME_DEF_STMT (tmp0
) = tmp_stmt
;
1424 gsi_insert_before (&gsi
, tmp_stmt
, GSI_SAME_STMT
);
1426 tmp_stmt
= gimple_build_assign (tmp1
, vcall_size
);
1427 SSA_NAME_DEF_STMT (tmp1
) = tmp_stmt
;
1428 gsi_insert_before (&gsi
, tmp_stmt
, GSI_SAME_STMT
);
1430 cond_stmt
= gimple_build_cond (EQ_EXPR
, tmp1
, tmp0
, NULL_TREE
, NULL_TREE
);
1431 gsi_insert_before (&gsi
, cond_stmt
, GSI_SAME_STMT
);
1433 gimple_set_vdef (vcall_stmt
, NULL
);
1434 gimple_set_vuse (vcall_stmt
, NULL
);
1435 update_stmt (vcall_stmt
);
1436 icall_stmt
= gimple_copy (vcall_stmt
);
1437 gimple_call_set_arg (icall_stmt
, size_arg
, icall_size
);
1438 gsi_insert_before (&gsi
, icall_stmt
, GSI_SAME_STMT
);
1441 /* Edge e_ci connects cond_bb to icall_bb, etc. */
1442 e_ci
= split_block (cond_bb
, cond_stmt
);
1443 icall_bb
= e_ci
->dest
;
1444 icall_bb
->count
= count
;
1446 e_iv
= split_block (icall_bb
, icall_stmt
);
1447 vcall_bb
= e_iv
->dest
;
1448 vcall_bb
->count
= all
- count
;
1450 e_vj
= split_block (vcall_bb
, vcall_stmt
);
1451 join_bb
= e_vj
->dest
;
1452 join_bb
->count
= all
;
1454 e_ci
->flags
= (e_ci
->flags
& ~EDGE_FALLTHRU
) | EDGE_TRUE_VALUE
;
1455 e_ci
->probability
= prob
;
1456 e_ci
->count
= count
;
1458 e_cv
= make_edge (cond_bb
, vcall_bb
, EDGE_FALSE_VALUE
);
1459 e_cv
->probability
= REG_BR_PROB_BASE
- prob
;
1460 e_cv
->count
= all
- count
;
1464 e_ij
= make_edge (icall_bb
, join_bb
, EDGE_FALLTHRU
);
1465 e_ij
->probability
= REG_BR_PROB_BASE
;
1466 e_ij
->count
= count
;
1468 e_vj
->probability
= REG_BR_PROB_BASE
;
1469 e_vj
->count
= all
- count
;
1471 /* Insert PHI node for the call result if necessary. */
1472 if (gimple_call_lhs (vcall_stmt
)
1473 && TREE_CODE (gimple_call_lhs (vcall_stmt
)) == SSA_NAME
)
1475 tree result
= gimple_call_lhs (vcall_stmt
);
1476 gimple phi
= create_phi_node (result
, join_bb
);
1477 SSA_NAME_DEF_STMT (result
) = phi
;
1478 gimple_call_set_lhs (vcall_stmt
,
1479 make_ssa_name (SSA_NAME_VAR (result
), vcall_stmt
));
1480 add_phi_arg (phi
, gimple_call_lhs (vcall_stmt
), e_vj
, UNKNOWN_LOCATION
);
1481 gimple_call_set_lhs (icall_stmt
,
1482 make_ssa_name (SSA_NAME_VAR (result
), icall_stmt
));
1483 add_phi_arg (phi
, gimple_call_lhs (icall_stmt
), e_ij
, UNKNOWN_LOCATION
);
1486 /* Because these are all string op builtins, they're all nothrow. */
1487 gcc_assert (!stmt_could_throw_p (vcall_stmt
));
1488 gcc_assert (!stmt_could_throw_p (icall_stmt
));
1491 /* Find values inside STMT for that we want to measure histograms for
1492 division/modulo optimization. */
1494 gimple_stringops_transform (gimple_stmt_iterator
*gsi
)
1496 gimple stmt
= gsi_stmt (*gsi
);
1499 enum built_in_function fcode
;
1500 histogram_value histogram
;
1501 gcov_type count
, all
, val
;
1503 unsigned int dest_align
, src_align
;
1508 if (gimple_code (stmt
) != GIMPLE_CALL
)
1510 fndecl
= gimple_call_fndecl (stmt
);
1513 fcode
= DECL_FUNCTION_CODE (fndecl
);
1514 if (!interesting_stringop_to_profile_p (fndecl
, stmt
, &size_arg
))
1517 blck_size
= gimple_call_arg (stmt
, size_arg
);
1518 if (TREE_CODE (blck_size
) == INTEGER_CST
)
1521 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_SINGLE_VALUE
);
1524 val
= histogram
->hvalue
.counters
[0];
1525 count
= histogram
->hvalue
.counters
[1];
1526 all
= histogram
->hvalue
.counters
[2];
1527 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1528 /* We require that count is at least half of all; this means
1529 that for the transformation to fire the value must be constant
1530 at least 80% of time. */
1531 if ((6 * count
/ 5) < all
|| optimize_bb_for_size_p (gimple_bb (stmt
)))
1533 if (check_counter (stmt
, "value", &count
, &all
, gimple_bb (stmt
)->count
))
1536 prob
= (count
* REG_BR_PROB_BASE
+ all
/ 2) / all
;
1539 dest
= gimple_call_arg (stmt
, 0);
1540 dest_align
= get_pointer_alignment (dest
);
1543 case BUILT_IN_MEMCPY
:
1544 case BUILT_IN_MEMPCPY
:
1545 src
= gimple_call_arg (stmt
, 1);
1546 src_align
= get_pointer_alignment (src
);
1547 if (!can_move_by_pieces (val
, MIN (dest_align
, src_align
)))
1550 case BUILT_IN_MEMSET
:
1551 if (!can_store_by_pieces (val
, builtin_memset_read_str
,
1552 gimple_call_arg (stmt
, 1),
1556 case BUILT_IN_BZERO
:
1557 if (!can_store_by_pieces (val
, builtin_memset_read_str
,
1565 tree_val
= build_int_cst_wide (get_gcov_type (),
1566 (unsigned HOST_WIDE_INT
) val
,
1567 val
>> (HOST_BITS_PER_WIDE_INT
- 1) >> 1);
1570 fprintf (dump_file
, "Single value %i stringop transformation on ",
1572 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1574 gimple_stringop_fixed_value (stmt
, tree_val
, prob
, count
, all
);
1580 stringop_block_profile (gimple stmt
, unsigned int *expected_align
,
1581 HOST_WIDE_INT
*expected_size
)
1583 histogram_value histogram
;
1584 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_AVERAGE
);
1586 *expected_size
= -1;
1587 else if (!histogram
->hvalue
.counters
[1])
1589 *expected_size
= -1;
1590 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1595 size
= ((histogram
->hvalue
.counters
[0]
1596 + histogram
->hvalue
.counters
[1] / 2)
1597 / histogram
->hvalue
.counters
[1]);
1598 /* Even if we can hold bigger value in SIZE, INT_MAX
1599 is safe "infinity" for code generation strategies. */
1602 *expected_size
= size
;
1603 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1605 histogram
= gimple_histogram_value_of_type (cfun
, stmt
, HIST_TYPE_IOR
);
1607 *expected_align
= 0;
1608 else if (!histogram
->hvalue
.counters
[0])
1610 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1611 *expected_align
= 0;
1618 count
= histogram
->hvalue
.counters
[0];
1620 while (!(count
& alignment
)
1621 && (alignment
* 2 * BITS_PER_UNIT
))
1623 *expected_align
= alignment
* BITS_PER_UNIT
;
1624 gimple_remove_histogram_value (cfun
, stmt
, histogram
);
1629 /* Find values inside STMT for that we want to measure histograms for
1630 division/modulo optimization. */
1632 gimple_divmod_values_to_profile (gimple stmt
, histogram_values
*values
)
1634 tree lhs
, divisor
, op0
, type
;
1635 histogram_value hist
;
1637 if (gimple_code (stmt
) != GIMPLE_ASSIGN
)
1640 lhs
= gimple_assign_lhs (stmt
);
1641 type
= TREE_TYPE (lhs
);
1642 if (!INTEGRAL_TYPE_P (type
))
1645 switch (gimple_assign_rhs_code (stmt
))
1647 case TRUNC_DIV_EXPR
:
1648 case TRUNC_MOD_EXPR
:
1649 divisor
= gimple_assign_rhs2 (stmt
);
1650 op0
= gimple_assign_rhs1 (stmt
);
1652 VEC_reserve (histogram_value
, heap
, *values
, 3);
1654 if (is_gimple_reg (divisor
))
1655 /* Check for the case where the divisor is the same value most
1657 VEC_quick_push (histogram_value
, *values
,
1658 gimple_alloc_histogram_value (cfun
,
1659 HIST_TYPE_SINGLE_VALUE
,
1662 /* For mod, check whether it is not often a noop (or replaceable by
1663 a few subtractions). */
1664 if (gimple_assign_rhs_code (stmt
) == TRUNC_MOD_EXPR
1665 && TYPE_UNSIGNED (type
))
1668 /* Check for a special case where the divisor is power of 2. */
1669 VEC_quick_push (histogram_value
, *values
,
1670 gimple_alloc_histogram_value (cfun
, HIST_TYPE_POW2
,
1673 val
= build2 (TRUNC_DIV_EXPR
, type
, op0
, divisor
);
1674 hist
= gimple_alloc_histogram_value (cfun
, HIST_TYPE_INTERVAL
,
1676 hist
->hdata
.intvl
.int_start
= 0;
1677 hist
->hdata
.intvl
.steps
= 2;
1678 VEC_quick_push (histogram_value
, *values
, hist
);
1687 /* Find calls inside STMT for that we want to measure histograms for
1688 indirect/virtual call optimization. */
1691 gimple_indirect_call_to_profile (gimple stmt
, histogram_values
*values
)
1695 if (gimple_code (stmt
) != GIMPLE_CALL
1696 || gimple_call_internal_p (stmt
)
1697 || gimple_call_fndecl (stmt
) != NULL_TREE
)
1700 callee
= gimple_call_fn (stmt
);
1702 VEC_reserve (histogram_value
, heap
, *values
, 3);
1704 VEC_quick_push (histogram_value
, *values
,
1705 gimple_alloc_histogram_value (cfun
, HIST_TYPE_INDIR_CALL
,
1711 /* Find values inside STMT for that we want to measure histograms for
1712 string operations. */
1714 gimple_stringops_values_to_profile (gimple stmt
, histogram_values
*values
)
1721 if (gimple_code (stmt
) != GIMPLE_CALL
)
1723 fndecl
= gimple_call_fndecl (stmt
);
1727 if (!interesting_stringop_to_profile_p (fndecl
, stmt
, &size_arg
))
1730 dest
= gimple_call_arg (stmt
, 0);
1731 blck_size
= gimple_call_arg (stmt
, size_arg
);
1733 if (TREE_CODE (blck_size
) != INTEGER_CST
)
1735 VEC_safe_push (histogram_value
, heap
, *values
,
1736 gimple_alloc_histogram_value (cfun
, HIST_TYPE_SINGLE_VALUE
,
1738 VEC_safe_push (histogram_value
, heap
, *values
,
1739 gimple_alloc_histogram_value (cfun
, HIST_TYPE_AVERAGE
,
1742 if (TREE_CODE (blck_size
) != INTEGER_CST
)
1743 VEC_safe_push (histogram_value
, heap
, *values
,
1744 gimple_alloc_histogram_value (cfun
, HIST_TYPE_IOR
,
1748 /* Find values inside STMT for that we want to measure histograms and adds
1749 them to list VALUES. */
1752 gimple_values_to_profile (gimple stmt
, histogram_values
*values
)
1754 if (flag_value_profile_transformations
)
1756 gimple_divmod_values_to_profile (stmt
, values
);
1757 gimple_stringops_values_to_profile (stmt
, values
);
1758 gimple_indirect_call_to_profile (stmt
, values
);
1763 gimple_find_values_to_profile (histogram_values
*values
)
1766 gimple_stmt_iterator gsi
;
1768 histogram_value hist
= NULL
;
1772 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1773 gimple_values_to_profile (gsi_stmt (gsi
), values
);
1775 FOR_EACH_VEC_ELT (histogram_value
, *values
, i
, hist
)
1779 case HIST_TYPE_INTERVAL
:
1780 hist
->n_counters
= hist
->hdata
.intvl
.steps
+ 2;
1783 case HIST_TYPE_POW2
:
1784 hist
->n_counters
= 2;
1787 case HIST_TYPE_SINGLE_VALUE
:
1788 hist
->n_counters
= 3;
1791 case HIST_TYPE_CONST_DELTA
:
1792 hist
->n_counters
= 4;
1795 case HIST_TYPE_INDIR_CALL
:
1796 hist
->n_counters
= 3;
1799 case HIST_TYPE_AVERAGE
:
1800 hist
->n_counters
= 2;
1804 hist
->n_counters
= 1;
1812 fprintf (dump_file
, "Stmt ");
1813 print_gimple_stmt (dump_file
, hist
->hvalue
.stmt
, 0, TDF_SLIM
);
1814 dump_histogram_value (dump_file
, hist
);