./
[official-gcc.git] / gcc / value-prof.c
blob0a3b26bff1cbe0cbace4c2da7211fea7203c33f0
1 /* Transformations based on profile information for values.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "expr.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "value-prof.h"
30 #include "output.h"
31 #include "flags.h"
32 #include "insn-config.h"
33 #include "recog.h"
34 #include "optabs.h"
35 #include "regs.h"
36 #include "ggc.h"
37 #include "tree-flow.h"
38 #include "tree-flow-inline.h"
39 #include "diagnostic.h"
40 #include "coverage.h"
41 #include "tree.h"
42 #include "gcov-io.h"
44 static struct value_prof_hooks *value_prof_hooks;
46 /* This is the vector of histograms. Created in find_values_to_profile.
47 During profile generation, freed by instrument_values.
48 During profile use, freed by value_profile_transformations. */
50 static histogram_values static_values = NULL;
52 /* In this file value profile based optimizations are placed. Currently the
53 following optimizations are implemented (for more detailed descriptions
54 see comments at value_profile_transformations):
56 1) Division/modulo specialization. Provided that we can determine that the
57 operands of the division have some special properties, we may use it to
58 produce more effective code.
59 2) Speculative prefetching. If we are able to determine that the difference
60 between addresses accessed by a memory reference is usually constant, we
61 may add the prefetch instructions.
63 Every such optimization should add its requirements for profiled values to
64 insn_values_to_profile function. This function is called from branch_prob
65 in profile.c and the requested values are instrumented by it in the first
66 compilation with -fprofile-arcs. The optimization may then read the
67 gathered data in the second compilation with -fbranch-probabilities.
69 There are currently two versions, RTL-based and tree-based. Over time
70 the RTL-based version may go away.
72 In the RTL-based version, the measured data is appended as REG_VALUE_PROFILE
73 note to the instrumented insn. The argument to the note consists of an
74 EXPR_LIST where its members have the following meaning (from the first to
75 the last):
77 -- type of information gathered (HIST_TYPE*)
78 -- the expression that is profiled
79 -- list of counters starting from the first one.
81 In the tree-based version, the measured data is pointed to from the histograms
82 field of the statement annotation of the instrumented insns. It is
83 kept as a linked list of struct histogram_value_t's, which contain the
84 same information as above. */
86 /* For speculative prefetching, the range in that we do not prefetch (because
87 we assume that it will be in cache anyway). The asymmetry between min and
88 max range is trying to reflect the fact that the sequential prefetching
89 of the data is commonly done directly by hardware. Nevertheless, these
90 values are just a guess and should of course be target-specific.
92 FIXME: There is no tree form of speculative prefetching as yet.
94 FIXME: A better approach to instrumentation in the profile-generation
95 pass is to generate calls to magic library functions (to be added to
96 libgcc) rather than inline code. This approach will probably be
97 necessary to get tree-based speculative prefetching working in a useful
98 fashion, as inline code bloats things so much the rest of the compiler has
99 serious problems dealing with it (judging from the rtl behavior). */
101 #ifndef NOPREFETCH_RANGE_MIN
102 #define NOPREFETCH_RANGE_MIN (-16)
103 #endif
104 #ifndef NOPREFETCH_RANGE_MAX
105 #define NOPREFETCH_RANGE_MAX 32
106 #endif
108 static void rtl_divmod_values_to_profile (rtx, histogram_values *);
109 #ifdef HAVE_prefetch
110 static bool insn_prefetch_values_to_profile (rtx, histogram_values *);
111 static int find_mem_reference_1 (rtx *, void *);
112 static void find_mem_reference_2 (rtx, rtx, void *);
113 static bool find_mem_reference (rtx, rtx *, int *);
114 #endif
116 static void rtl_values_to_profile (rtx, histogram_values *);
117 static rtx rtl_divmod_fixed_value (enum machine_mode, enum rtx_code, rtx, rtx,
118 rtx, gcov_type, int);
119 static rtx rtl_mod_pow2 (enum machine_mode, enum rtx_code, rtx, rtx, rtx, int);
120 static rtx rtl_mod_subtract (enum machine_mode, enum rtx_code, rtx, rtx, rtx,
121 int, int, int);
122 #ifdef HAVE_prefetch
123 static rtx gen_speculative_prefetch (rtx, gcov_type, int);
124 #endif
125 static bool rtl_divmod_fixed_value_transform (rtx);
126 static bool rtl_mod_pow2_value_transform (rtx);
127 static bool rtl_mod_subtract_transform (rtx);
128 #ifdef HAVE_prefetch
129 static bool speculative_prefetching_transform (rtx);
130 #endif
131 static void tree_divmod_values_to_profile (tree, histogram_values *);
132 static void tree_values_to_profile (tree, histogram_values *);
133 static tree tree_divmod_fixed_value (tree, tree, tree, tree,
134 tree, int, gcov_type, gcov_type);
135 static tree tree_mod_pow2 (tree, tree, tree, tree, int, gcov_type, gcov_type);
136 static tree tree_mod_subtract (tree, tree, tree, tree, int, int, int,
137 gcov_type, gcov_type, gcov_type);
138 static bool tree_divmod_fixed_value_transform (tree);
139 static bool tree_mod_pow2_value_transform (tree);
140 static bool tree_mod_subtract_transform (tree);
143 /* Find values inside INSN for that we want to measure histograms for
144 division/modulo optimization and stores them to VALUES. */
145 static void
146 rtl_divmod_values_to_profile (rtx insn, histogram_values *values)
148 rtx set, set_src, op1, op2;
149 enum machine_mode mode;
150 histogram_value hist;
152 if (!INSN_P (insn))
153 return;
155 set = single_set (insn);
156 if (!set)
157 return;
159 mode = GET_MODE (SET_DEST (set));
160 if (!INTEGRAL_MODE_P (mode))
161 return;
163 set_src = SET_SRC (set);
164 switch (GET_CODE (set_src))
166 case DIV:
167 case MOD:
168 case UDIV:
169 case UMOD:
170 op1 = XEXP (set_src, 0);
171 op2 = XEXP (set_src, 1);
172 if (side_effects_p (op2))
173 return;
175 /* Check for a special case where the divisor is power of 2. */
176 if ((GET_CODE (set_src) == UMOD) && !CONSTANT_P (op2))
178 hist = ggc_alloc (sizeof (*hist));
179 hist->hvalue.rtl.value = op2;
180 hist->hvalue.rtl.seq = NULL_RTX;
181 hist->hvalue.rtl.mode = mode;
182 hist->hvalue.rtl.insn = insn;
183 hist->type = HIST_TYPE_POW2;
184 hist->hdata.pow2.may_be_other = 1;
185 VEC_safe_push (histogram_value, *values, hist);
188 /* Check whether the divisor is not in fact a constant. */
189 if (!CONSTANT_P (op2))
191 hist = ggc_alloc (sizeof (*hist));
192 hist->hvalue.rtl.value = op2;
193 hist->hvalue.rtl.mode = mode;
194 hist->hvalue.rtl.seq = NULL_RTX;
195 hist->hvalue.rtl.insn = insn;
196 hist->type = HIST_TYPE_SINGLE_VALUE;
197 VEC_safe_push (histogram_value, *values, hist);
200 /* For mod, check whether it is not often a noop (or replaceable by
201 a few subtractions). */
202 if (GET_CODE (set_src) == UMOD && !side_effects_p (op1))
204 rtx tmp;
206 hist = ggc_alloc (sizeof (*hist));
207 start_sequence ();
208 tmp = simplify_gen_binary (DIV, mode, copy_rtx (op1), copy_rtx (op2));
209 hist->hvalue.rtl.value = force_operand (tmp, NULL_RTX);
210 hist->hvalue.rtl.seq = get_insns ();
211 end_sequence ();
212 hist->hvalue.rtl.mode = mode;
213 hist->hvalue.rtl.insn = insn;
214 hist->type = HIST_TYPE_INTERVAL;
215 hist->hdata.intvl.int_start = 0;
216 hist->hdata.intvl.steps = 2;
217 VEC_safe_push (histogram_value, *values, hist);
219 return;
221 default:
222 return;
226 #ifdef HAVE_prefetch
228 /* Called from find_mem_reference through for_each_rtx, finds a memory
229 reference. I.e. if *EXPR is a MEM, the reference to this MEM is stored
230 to *RET and the traversing of the expression is interrupted by returning 1.
231 Otherwise 0 is returned. */
233 static int
234 find_mem_reference_1 (rtx *expr, void *ret)
236 rtx *mem = ret;
238 if (GET_CODE (*expr) == MEM)
240 *mem = *expr;
241 return 1;
243 return 0;
246 /* Called form find_mem_reference through note_stores to find out whether
247 the memory reference MEM is a store. I.e. if EXPR == MEM, the variable
248 FMR2_WRITE is set to true. */
250 static int fmr2_write;
251 static void
252 find_mem_reference_2 (rtx expr, rtx pat ATTRIBUTE_UNUSED, void *mem)
254 if (expr == mem)
255 fmr2_write = true;
258 /* Find a memory reference inside INSN, return it in MEM. Set WRITE to true
259 if it is a write of the mem. Return false if no memory reference is found,
260 true otherwise. */
262 static bool
263 find_mem_reference (rtx insn, rtx *mem, int *write)
265 *mem = NULL_RTX;
266 for_each_rtx (&PATTERN (insn), find_mem_reference_1, mem);
268 if (!*mem)
269 return false;
271 fmr2_write = false;
272 note_stores (PATTERN (insn), find_mem_reference_2, *mem);
273 *write = fmr2_write;
274 return true;
277 /* Find values inside INSN for that we want to measure histograms for
278 a speculative prefetching. Add them to the list VALUES.
279 Returns true if such we found any such value, false otherwise. */
281 static bool
282 insn_prefetch_values_to_profile (rtx insn, histogram_values* values)
284 rtx mem, address;
285 int write;
286 histogram_value hist;
288 /* It only makes sense to look for memory references in ordinary insns. */
289 if (GET_CODE (insn) != INSN)
290 return false;
292 if (!find_mem_reference (insn, &mem, &write))
293 return false;
295 address = XEXP (mem, 0);
296 if (side_effects_p (address))
297 return false;
299 if (CONSTANT_P (address))
300 return false;
302 hist = ggc_alloc (sizeof (*hist));
303 hist->hvalue.rtl.value = address;
304 hist->hvalue.rtl.mode = GET_MODE (address);
305 hist->hvalue.rtl.seq = NULL_RTX;
306 hist->hvalue.rtl.insn = insn;
307 hist->type = HIST_TYPE_CONST_DELTA;
308 VEC_safe_push (histogram_value, *values, hist);
310 return true;
312 #endif
313 /* Find values inside INSN for that we want to measure histograms and adds
314 them to list VALUES (increasing the record of its length in N_VALUES). */
315 static void
316 rtl_values_to_profile (rtx insn, histogram_values *values)
318 if (flag_value_profile_transformations)
319 rtl_divmod_values_to_profile (insn, values);
321 #ifdef HAVE_prefetch
322 if (flag_speculative_prefetching)
323 insn_prefetch_values_to_profile (insn, values);
324 #endif
327 /* Find list of values for that we want to measure histograms. */
328 static void
329 rtl_find_values_to_profile (histogram_values *values)
331 rtx insn;
332 unsigned i, libcall_level;
334 life_analysis (NULL, PROP_DEATH_NOTES);
336 *values = VEC_alloc (histogram_value, 0);
337 libcall_level = 0;
338 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
339 rtl_values_to_profile (insn, values);
340 static_values = *values;
342 for (i = 0; i < VEC_length (histogram_value, *values); i++)
344 histogram_value hist = VEC_index (histogram_value, *values, i);
346 switch (hist->type)
348 case HIST_TYPE_INTERVAL:
349 if (dump_file)
350 fprintf (dump_file,
351 "Interval counter for insn %d, range %d -- %d.\n",
352 INSN_UID ((rtx)hist->hvalue.rtl.insn),
353 hist->hdata.intvl.int_start,
354 (hist->hdata.intvl.int_start
355 + hist->hdata.intvl.steps - 1));
356 hist->n_counters = hist->hdata.intvl.steps + 2;
357 break;
359 case HIST_TYPE_POW2:
360 if (dump_file)
361 fprintf (dump_file,
362 "Pow2 counter for insn %d.\n",
363 INSN_UID ((rtx)hist->hvalue.rtl.insn));
364 hist->n_counters
365 = GET_MODE_BITSIZE (hist->hvalue.rtl.mode)
366 + (hist->hdata.pow2.may_be_other ? 1 : 0);
367 break;
369 case HIST_TYPE_SINGLE_VALUE:
370 if (dump_file)
371 fprintf (dump_file,
372 "Single value counter for insn %d.\n",
373 INSN_UID ((rtx)hist->hvalue.rtl.insn));
374 hist->n_counters = 3;
375 break;
377 case HIST_TYPE_CONST_DELTA:
378 if (dump_file)
379 fprintf (dump_file,
380 "Constant delta counter for insn %d.\n",
381 INSN_UID ((rtx)hist->hvalue.rtl.insn));
382 hist->n_counters = 4;
383 break;
385 default:
386 gcc_unreachable ();
389 allocate_reg_info (max_reg_num (), FALSE, FALSE);
392 /* Main entry point. Finds REG_VALUE_PROFILE notes from profiler and uses
393 them to identify and exploit properties of values that are hard to analyze
394 statically.
396 We do following transformations:
400 x = a / b;
402 where b is almost always a constant N is transformed to
404 if (b == N)
405 x = a / N;
406 else
407 x = a / b;
409 Analogically with %
413 x = a % b
415 where b is almost always a power of 2 and the division is unsigned
416 TODO -- handle signed case as well
418 if ((b & (b - 1)) == 0)
419 x = a & (b - 1);
420 else
421 x = x % b;
423 Note that when b = 0, no error will occur and x = a; this is correct,
424 as result of such operation is undefined.
428 x = a % b
430 where a is almost always less then b and the division is unsigned
431 TODO -- handle signed case as well
433 x = a;
434 if (x >= b)
435 x %= b;
439 x = a % b
441 where a is almost always less then 2 * b and the division is unsigned
442 TODO -- handle signed case as well
444 x = a;
445 if (x >= b)
446 x -= b;
447 if (x >= b)
448 x %= b;
450 It would be possible to continue analogically for K * b for other small
451 K's, but it is probably not useful.
455 Read or write of mem[address], where the value of address changes usually
456 by a constant C != 0 between the following accesses to the computation; with
457 -fspeculative-prefetching we then add a prefetch of address + C before
458 the insn. This handles prefetching of several interesting cases in addition
459 to a simple prefetching for addresses that are induction variables, e. g.
460 linked lists allocated sequentially (even in case they are processed
461 recursively).
463 TODO -- we should also check whether there is not (usually) a small
464 difference with the adjacent memory references, so that we do
465 not issue overlapping prefetches. Also we should employ some
466 heuristics to eliminate cases where prefetching evidently spoils
467 the code.
468 -- it should somehow cooperate with the loop optimizer prefetching
470 TODO:
472 There are other useful cases that could be handled by a similar mechanism,
473 for example:
475 for (i = 0; i < n; i++)
478 transform to (for constant N):
480 if (n == N)
481 for (i = 0; i < N; i++)
483 else
484 for (i = 0; i < n; i++)
486 making unroller happy. Since this may grow the code significantly,
487 we would have to be very careful here. */
489 static bool
490 rtl_value_profile_transformations (void)
492 rtx insn, next;
493 int changed = false;
495 for (insn = get_insns (); insn; insn = next)
497 next = NEXT_INSN (insn);
499 if (!INSN_P (insn))
500 continue;
502 /* Scan for insn carrying a histogram. */
503 if (!find_reg_note (insn, REG_VALUE_PROFILE, 0))
504 continue;
506 /* Ignore cold areas -- we are growing a code. */
507 if (!maybe_hot_bb_p (BLOCK_FOR_INSN (insn)))
508 continue;
510 if (dump_file)
512 fprintf (dump_file, "Trying transformations on insn %d\n",
513 INSN_UID (insn));
514 print_rtl_single (dump_file, insn);
517 /* Transformations: */
518 if (flag_value_profile_transformations
519 && (rtl_mod_subtract_transform (insn)
520 || rtl_divmod_fixed_value_transform (insn)
521 || rtl_mod_pow2_value_transform (insn)))
522 changed = true;
523 #ifdef HAVE_prefetch
524 if (flag_speculative_prefetching
525 && speculative_prefetching_transform (insn))
526 changed = true;
527 #endif
530 if (changed)
532 commit_edge_insertions ();
533 allocate_reg_info (max_reg_num (), FALSE, FALSE);
536 return changed;
539 /* Generate code for transformation 1 (with MODE and OPERATION, operands OP1
540 and OP2, whose value is expected to be VALUE, result TARGET and
541 probability of taking the optimal path PROB). */
542 static rtx
543 rtl_divmod_fixed_value (enum machine_mode mode, enum rtx_code operation,
544 rtx target, rtx op1, rtx op2, gcov_type value,
545 int prob)
547 rtx tmp, tmp1, jump;
548 rtx neq_label = gen_label_rtx ();
549 rtx end_label = gen_label_rtx ();
550 rtx sequence;
552 start_sequence ();
554 if (!REG_P (op2))
556 tmp = gen_reg_rtx (mode);
557 emit_move_insn (tmp, copy_rtx (op2));
559 else
560 tmp = op2;
562 do_compare_rtx_and_jump (tmp, GEN_INT (value), NE, 0, mode, NULL_RTX,
563 NULL_RTX, neq_label);
565 /* Add branch probability to jump we just created. */
566 jump = get_last_insn ();
567 REG_NOTES (jump) = gen_rtx_EXPR_LIST (REG_BR_PROB,
568 GEN_INT (REG_BR_PROB_BASE - prob),
569 REG_NOTES (jump));
571 tmp1 = simplify_gen_binary (operation, mode,
572 copy_rtx (op1), GEN_INT (value));
573 tmp1 = force_operand (tmp1, target);
574 if (tmp1 != target)
575 emit_move_insn (copy_rtx (target), copy_rtx (tmp1));
577 emit_jump_insn (gen_jump (end_label));
578 emit_barrier ();
580 emit_label (neq_label);
581 tmp1 = simplify_gen_binary (operation, mode,
582 copy_rtx (op1), copy_rtx (tmp));
583 tmp1 = force_operand (tmp1, target);
584 if (tmp1 != target)
585 emit_move_insn (copy_rtx (target), copy_rtx (tmp1));
587 emit_label (end_label);
589 sequence = get_insns ();
590 end_sequence ();
591 rebuild_jump_labels (sequence);
592 return sequence;
595 /* Do transform 1) on INSN if applicable. */
596 static bool
597 rtl_divmod_fixed_value_transform (rtx insn)
599 rtx set, set_src, set_dest, op1, op2, value, histogram;
600 enum rtx_code code;
601 enum machine_mode mode;
602 gcov_type val, count, all;
603 edge e;
604 int prob;
606 set = single_set (insn);
607 if (!set)
608 return false;
610 set_src = SET_SRC (set);
611 set_dest = SET_DEST (set);
612 code = GET_CODE (set_src);
613 mode = GET_MODE (set_dest);
615 if (code != DIV && code != MOD && code != UDIV && code != UMOD)
616 return false;
617 op1 = XEXP (set_src, false);
618 op2 = XEXP (set_src, 1);
620 for (histogram = REG_NOTES (insn);
621 histogram;
622 histogram = XEXP (histogram, 1))
623 if (REG_NOTE_KIND (histogram) == REG_VALUE_PROFILE
624 && XEXP (XEXP (histogram, 0), 0) == GEN_INT (HIST_TYPE_SINGLE_VALUE))
625 break;
627 if (!histogram)
628 return false;
630 histogram = XEXP (XEXP (histogram, 0), 1);
631 value = XEXP (histogram, 0);
632 histogram = XEXP (histogram, 1);
633 val = INTVAL (XEXP (histogram, 0));
634 histogram = XEXP (histogram, 1);
635 count = INTVAL (XEXP (histogram, 0));
636 histogram = XEXP (histogram, 1);
637 all = INTVAL (XEXP (histogram, 0));
639 /* We require that count be at least half of all; this means
640 that for the transformation to fire the value must be constant
641 at least 50% of time (and 75% gives the guarantee of usage). */
642 if (!rtx_equal_p (op2, value) || 2 * count < all)
643 return false;
645 if (dump_file)
646 fprintf (dump_file, "Div/mod by constant transformation on insn %d\n",
647 INSN_UID (insn));
649 /* Compute probability of taking the optimal path. */
650 prob = (count * REG_BR_PROB_BASE + all / 2) / all;
652 e = split_block (BLOCK_FOR_INSN (insn), PREV_INSN (insn));
653 delete_insn (insn);
655 insert_insn_on_edge (
656 rtl_divmod_fixed_value (mode, code, set_dest,
657 op1, op2, val, prob), e);
659 return true;
662 /* Generate code for transformation 2 (with MODE and OPERATION, operands OP1
663 and OP2, result TARGET and probability of taking the optimal path PROB). */
664 static rtx
665 rtl_mod_pow2 (enum machine_mode mode, enum rtx_code operation, rtx target,
666 rtx op1, rtx op2, int prob)
668 rtx tmp, tmp1, tmp2, tmp3, jump;
669 rtx neq_label = gen_label_rtx ();
670 rtx end_label = gen_label_rtx ();
671 rtx sequence;
673 start_sequence ();
675 if (!REG_P (op2))
677 tmp = gen_reg_rtx (mode);
678 emit_move_insn (tmp, copy_rtx (op2));
680 else
681 tmp = op2;
683 tmp1 = expand_simple_binop (mode, PLUS, tmp, constm1_rtx, NULL_RTX,
684 0, OPTAB_WIDEN);
685 tmp2 = expand_simple_binop (mode, AND, tmp, tmp1, NULL_RTX,
686 0, OPTAB_WIDEN);
687 do_compare_rtx_and_jump (tmp2, const0_rtx, NE, 0, mode, NULL_RTX,
688 NULL_RTX, neq_label);
690 /* Add branch probability to jump we just created. */
691 jump = get_last_insn ();
692 REG_NOTES (jump) = gen_rtx_EXPR_LIST (REG_BR_PROB,
693 GEN_INT (REG_BR_PROB_BASE - prob),
694 REG_NOTES (jump));
696 tmp3 = expand_simple_binop (mode, AND, op1, tmp1, target,
697 0, OPTAB_WIDEN);
698 if (tmp3 != target)
699 emit_move_insn (copy_rtx (target), tmp3);
700 emit_jump_insn (gen_jump (end_label));
701 emit_barrier ();
703 emit_label (neq_label);
704 tmp1 = simplify_gen_binary (operation, mode, copy_rtx (op1), copy_rtx (tmp));
705 tmp1 = force_operand (tmp1, target);
706 if (tmp1 != target)
707 emit_move_insn (target, tmp1);
709 emit_label (end_label);
711 sequence = get_insns ();
712 end_sequence ();
713 rebuild_jump_labels (sequence);
714 return sequence;
717 /* Do transform 2) on INSN if applicable. */
718 static bool
719 rtl_mod_pow2_value_transform (rtx insn)
721 rtx set, set_src, set_dest, op1, op2, value, histogram;
722 enum rtx_code code;
723 enum machine_mode mode;
724 gcov_type wrong_values, count;
725 edge e;
726 int i, all, prob;
728 set = single_set (insn);
729 if (!set)
730 return false;
732 set_src = SET_SRC (set);
733 set_dest = SET_DEST (set);
734 code = GET_CODE (set_src);
735 mode = GET_MODE (set_dest);
737 if (code != UMOD)
738 return false;
739 op1 = XEXP (set_src, 0);
740 op2 = XEXP (set_src, 1);
742 for (histogram = REG_NOTES (insn);
743 histogram;
744 histogram = XEXP (histogram, 1))
745 if (REG_NOTE_KIND (histogram) == REG_VALUE_PROFILE
746 && XEXP (XEXP (histogram, 0), 0) == GEN_INT (HIST_TYPE_POW2))
747 break;
749 if (!histogram)
750 return false;
752 histogram = XEXP (XEXP (histogram, 0), 1);
753 value = XEXP (histogram, 0);
754 histogram = XEXP (histogram, 1);
755 wrong_values =INTVAL (XEXP (histogram, 0));
756 histogram = XEXP (histogram, 1);
758 count = 0;
759 for (i = 0; i < GET_MODE_BITSIZE (mode); i++)
761 count += INTVAL (XEXP (histogram, 0));
762 histogram = XEXP (histogram, 1);
765 if (!rtx_equal_p (op2, value))
766 return false;
768 /* We require that we hit a power of two at least half of all evaluations. */
769 if (count < wrong_values)
770 return false;
772 if (dump_file)
773 fprintf (dump_file, "Mod power of 2 transformation on insn %d\n",
774 INSN_UID (insn));
776 /* Compute probability of taking the optimal path. */
777 all = count + wrong_values;
778 prob = (count * REG_BR_PROB_BASE + all / 2) / all;
780 e = split_block (BLOCK_FOR_INSN (insn), PREV_INSN (insn));
781 delete_insn (insn);
783 insert_insn_on_edge (
784 rtl_mod_pow2 (mode, code, set_dest, op1, op2, prob), e);
786 return true;
789 /* Generate code for transformations 3 and 4 (with MODE and OPERATION,
790 operands OP1 and OP2, result TARGET, at most SUB subtractions, and
791 probability of taking the optimal path(s) PROB1 and PROB2). */
792 static rtx
793 rtl_mod_subtract (enum machine_mode mode, enum rtx_code operation,
794 rtx target, rtx op1, rtx op2, int sub, int prob1, int prob2)
796 rtx tmp, tmp1, jump;
797 rtx end_label = gen_label_rtx ();
798 rtx sequence;
799 int i;
801 start_sequence ();
803 if (!REG_P (op2))
805 tmp = gen_reg_rtx (mode);
806 emit_move_insn (tmp, copy_rtx (op2));
808 else
809 tmp = op2;
811 emit_move_insn (target, copy_rtx (op1));
812 do_compare_rtx_and_jump (target, tmp, LTU, 0, mode, NULL_RTX,
813 NULL_RTX, end_label);
815 /* Add branch probability to jump we just created. */
816 jump = get_last_insn ();
817 REG_NOTES (jump) = gen_rtx_EXPR_LIST (REG_BR_PROB,
818 GEN_INT (prob1), REG_NOTES (jump));
820 for (i = 0; i < sub; i++)
822 tmp1 = expand_simple_binop (mode, MINUS, target, tmp, target,
823 0, OPTAB_WIDEN);
824 if (tmp1 != target)
825 emit_move_insn (target, tmp1);
826 do_compare_rtx_and_jump (target, tmp, LTU, 0, mode, NULL_RTX,
827 NULL_RTX, end_label);
829 /* Add branch probability to jump we just created. */
830 jump = get_last_insn ();
831 REG_NOTES (jump) = gen_rtx_EXPR_LIST (REG_BR_PROB,
832 GEN_INT (prob2), REG_NOTES (jump));
835 tmp1 = simplify_gen_binary (operation, mode, copy_rtx (target), copy_rtx (tmp));
836 tmp1 = force_operand (tmp1, target);
837 if (tmp1 != target)
838 emit_move_insn (target, tmp1);
840 emit_label (end_label);
842 sequence = get_insns ();
843 end_sequence ();
844 rebuild_jump_labels (sequence);
845 return sequence;
848 /* Do transforms 3) and 4) on INSN if applicable. */
849 static bool
850 rtl_mod_subtract_transform (rtx insn)
852 rtx set, set_src, set_dest, op1, op2, histogram;
853 enum rtx_code code;
854 enum machine_mode mode;
855 gcov_type wrong_values, counts[2], count, all;
856 edge e;
857 int i, prob1, prob2;
859 set = single_set (insn);
860 if (!set)
861 return false;
863 set_src = SET_SRC (set);
864 set_dest = SET_DEST (set);
865 code = GET_CODE (set_src);
866 mode = GET_MODE (set_dest);
868 if (code != UMOD)
869 return false;
870 op1 = XEXP (set_src, 0);
871 op2 = XEXP (set_src, 1);
873 for (histogram = REG_NOTES (insn);
874 histogram;
875 histogram = XEXP (histogram, 1))
876 if (REG_NOTE_KIND (histogram) == REG_VALUE_PROFILE
877 && XEXP (XEXP (histogram, 0), 0) == GEN_INT (HIST_TYPE_INTERVAL))
878 break;
880 if (!histogram)
881 return false;
883 histogram = XEXP (XEXP (histogram, 0), 1);
884 histogram = XEXP (histogram, 1);
886 all = 0;
887 for (i = 0; i < 2; i++)
889 counts[i] = INTVAL (XEXP (histogram, 0));
890 all += counts[i];
891 histogram = XEXP (histogram, 1);
893 wrong_values = INTVAL (XEXP (histogram, 0));
894 histogram = XEXP (histogram, 1);
895 wrong_values += INTVAL (XEXP (histogram, 0));
896 all += wrong_values;
898 /* We require that we use just subtractions in at least 50% of all
899 evaluations. */
900 count = 0;
901 for (i = 0; i < 2; i++)
903 count += counts[i];
904 if (count * 2 >= all)
905 break;
908 if (i == 2)
909 return false;
911 if (dump_file)
912 fprintf (dump_file, "Mod subtract transformation on insn %d\n",
913 INSN_UID (insn));
915 /* Compute probability of taking the optimal path(s). */
916 prob1 = (counts[0] * REG_BR_PROB_BASE + all / 2) / all;
917 prob2 = (counts[1] * REG_BR_PROB_BASE + all / 2) / all;
919 e = split_block (BLOCK_FOR_INSN (insn), PREV_INSN (insn));
920 delete_insn (insn);
922 insert_insn_on_edge (
923 rtl_mod_subtract (mode, code, set_dest,
924 op1, op2, i, prob1, prob2), e);
926 return true;
929 #ifdef HAVE_prefetch
930 /* Generate code for transformation 5 for mem with ADDRESS and a constant
931 step DELTA. WRITE is true if the reference is a store to mem. */
933 static rtx
934 gen_speculative_prefetch (rtx address, gcov_type delta, int write)
936 rtx tmp;
937 rtx sequence;
939 /* TODO: we do the prefetching for just one iteration ahead, which
940 often is not enough. */
941 start_sequence ();
942 if (offsettable_address_p (0, VOIDmode, address))
943 tmp = plus_constant (copy_rtx (address), delta);
944 else
946 tmp = simplify_gen_binary (PLUS, Pmode,
947 copy_rtx (address), GEN_INT (delta));
948 tmp = force_operand (tmp, NULL);
950 if (! (*insn_data[(int)CODE_FOR_prefetch].operand[0].predicate)
951 (tmp, insn_data[(int)CODE_FOR_prefetch].operand[0].mode))
952 tmp = force_reg (Pmode, tmp);
953 emit_insn (gen_prefetch (tmp, GEN_INT (write), GEN_INT (3)));
954 sequence = get_insns ();
955 end_sequence ();
957 return sequence;
960 /* Do transform 5) on INSN if applicable. */
962 static bool
963 speculative_prefetching_transform (rtx insn)
965 rtx histogram, value;
966 gcov_type val, count, all;
967 edge e;
968 rtx mem, address;
969 int write;
971 if (!maybe_hot_bb_p (BLOCK_FOR_INSN (insn)))
972 return false;
974 if (!find_mem_reference (insn, &mem, &write))
975 return false;
977 address = XEXP (mem, 0);
978 if (side_effects_p (address))
979 return false;
981 if (CONSTANT_P (address))
982 return false;
984 for (histogram = REG_NOTES (insn);
985 histogram;
986 histogram = XEXP (histogram, 1))
987 if (REG_NOTE_KIND (histogram) == REG_VALUE_PROFILE
988 && XEXP (XEXP (histogram, 0), 0) == GEN_INT (HIST_TYPE_CONST_DELTA))
989 break;
991 if (!histogram)
992 return false;
994 histogram = XEXP (XEXP (histogram, 0), 1);
995 value = XEXP (histogram, 0);
996 histogram = XEXP (histogram, 1);
997 /* Skip last value referenced. */
998 histogram = XEXP (histogram, 1);
999 val = INTVAL (XEXP (histogram, 0));
1000 histogram = XEXP (histogram, 1);
1001 count = INTVAL (XEXP (histogram, 0));
1002 histogram = XEXP (histogram, 1);
1003 all = INTVAL (XEXP (histogram, 0));
1005 /* With that few executions we do not really have a reason to optimize the
1006 statement, and more importantly, the data about differences of addresses
1007 are spoiled by the first item that had no previous value to compare
1008 with. */
1009 if (all < 4)
1010 return false;
1012 /* We require that count be at least half of all; this means
1013 that for the transformation to fire the value must be constant
1014 at least 50% of time (and 75% gives the guarantee of usage). */
1015 if (!rtx_equal_p (address, value) || 2 * count < all)
1016 return false;
1018 /* If the difference is too small, it does not make too much sense to
1019 prefetch, as the memory is probably already in cache. */
1020 if (val >= NOPREFETCH_RANGE_MIN && val <= NOPREFETCH_RANGE_MAX)
1021 return false;
1023 if (dump_file)
1024 fprintf (dump_file, "Speculative prefetching for insn %d\n",
1025 INSN_UID (insn));
1027 e = split_block (BLOCK_FOR_INSN (insn), PREV_INSN (insn));
1029 insert_insn_on_edge (gen_speculative_prefetch (address, val, write), e);
1031 return true;
1033 #endif /* HAVE_prefetch */
1035 /* Tree based transformations. */
1036 static bool
1037 tree_value_profile_transformations (void)
1039 basic_block bb;
1040 block_stmt_iterator bsi;
1041 bool changed = false;
1043 FOR_EACH_BB (bb)
1045 /* Ignore cold areas -- we are enlarging the code. */
1046 if (!maybe_hot_bb_p (bb))
1047 continue;
1049 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1051 tree stmt = bsi_stmt (bsi);
1052 stmt_ann_t ann = get_stmt_ann (stmt);
1053 histogram_value th = ann->histograms;
1054 if (!th)
1055 continue;
1057 if (dump_file)
1059 fprintf (dump_file, "Trying transformations on insn ");
1060 print_generic_stmt (dump_file, stmt, TDF_SLIM);
1063 /* Transformations: */
1064 /* The order of things in this conditional controls which
1065 transformation is used when more than one is applicable. */
1066 /* It is expected that any code added by the transformations
1067 will be added before the current statement, and that the
1068 current statement remain valid (although possibly
1069 modified) upon return. */
1070 if (flag_value_profile_transformations
1071 && (tree_mod_subtract_transform (stmt)
1072 || tree_divmod_fixed_value_transform (stmt)
1073 || tree_mod_pow2_value_transform (stmt)))
1075 changed = true;
1076 /* Original statement may no longer be in the same block. */
1077 bb = bb_for_stmt (stmt);
1080 /* Free extra storage from compute_value_histograms. */
1081 while (th)
1083 free (th->hvalue.tree.counters);
1084 th = th->hvalue.tree.next;
1086 ann->histograms = 0;
1090 if (changed)
1092 counts_to_freqs ();
1095 return changed;
1098 /* Generate code for transformation 1 (with OPERATION, operands OP1
1099 and OP2, whose value is expected to be VALUE, parent modify-expr STMT and
1100 probability of taking the optimal path PROB, which is equivalent to COUNT/ALL
1101 within roundoff error). This generates the result into a temp and returns
1102 the temp; it does not replace or alter the original STMT. */
1103 static tree
1104 tree_divmod_fixed_value (tree stmt, tree operation,
1105 tree op1, tree op2, tree value, int prob, gcov_type count,
1106 gcov_type all)
1108 tree stmt1, stmt2, stmt3;
1109 tree tmp1, tmp2, tmpv;
1110 tree label_decl1 = create_artificial_label ();
1111 tree label_decl2 = create_artificial_label ();
1112 tree label_decl3 = create_artificial_label ();
1113 tree label1, label2, label3;
1114 tree bb1end, bb2end, bb3end;
1115 basic_block bb, bb2, bb3, bb4;
1116 tree optype = TREE_TYPE (operation);
1117 edge e12, e13, e23, e24, e34;
1118 block_stmt_iterator bsi;
1120 bb = bb_for_stmt (stmt);
1121 bsi = bsi_for_stmt (stmt);
1123 tmpv = create_tmp_var (optype, "PROF");
1124 tmp1 = create_tmp_var (optype, "PROF");
1125 stmt1 = build2 (MODIFY_EXPR, optype, tmpv, fold_convert (optype, value));
1126 stmt2 = build2 (MODIFY_EXPR, optype, tmp1, op2);
1127 stmt3 = build3 (COND_EXPR, void_type_node,
1128 build2 (NE_EXPR, boolean_type_node, tmp1, tmpv),
1129 build1 (GOTO_EXPR, void_type_node, label_decl2),
1130 build1 (GOTO_EXPR, void_type_node, label_decl1));
1131 bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
1132 bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
1133 bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
1134 bb1end = stmt3;
1136 tmp2 = create_tmp_var (optype, "PROF");
1137 label1 = build1 (LABEL_EXPR, void_type_node, label_decl1);
1138 stmt1 = build2 (MODIFY_EXPR, optype, tmp2,
1139 build2 (TREE_CODE (operation), optype, op1, tmpv));
1140 bsi_insert_before (&bsi, label1, BSI_SAME_STMT);
1141 bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
1142 bb2end = stmt1;
1144 label2 = build1 (LABEL_EXPR, void_type_node, label_decl2);
1145 stmt1 = build2 (MODIFY_EXPR, optype, tmp2,
1146 build2 (TREE_CODE (operation), optype, op1, op2));
1147 bsi_insert_before (&bsi, label2, BSI_SAME_STMT);
1148 bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
1149 bb3end = stmt1;
1151 label3 = build1 (LABEL_EXPR, void_type_node, label_decl3);
1152 bsi_insert_before (&bsi, label3, BSI_SAME_STMT);
1154 /* Fix CFG. */
1155 /* Edge e23 connects bb2 to bb3, etc. */
1156 e12 = split_block (bb, bb1end);
1157 bb2 = e12->dest;
1158 bb2->count = count;
1159 e23 = split_block (bb2, bb2end);
1160 bb3 = e23->dest;
1161 bb3->count = all - count;
1162 e34 = split_block (bb3, bb3end);
1163 bb4 = e34->dest;
1164 bb4->count = all;
1166 e12->flags &= ~EDGE_FALLTHRU;
1167 e12->flags |= EDGE_FALSE_VALUE;
1168 e12->probability = prob;
1169 e12->count = count;
1171 e13 = make_edge (bb, bb3, EDGE_TRUE_VALUE);
1172 e13->probability = REG_BR_PROB_BASE - prob;
1173 e13->count = all - count;
1175 remove_edge (e23);
1177 e24 = make_edge (bb2, bb4, EDGE_FALLTHRU);
1178 e24->probability = REG_BR_PROB_BASE;
1179 e24->count = count;
1181 e34->probability = REG_BR_PROB_BASE;
1182 e34->count = all - count;
1184 return tmp2;
1187 /* Do transform 1) on INSN if applicable. */
1188 static bool
1189 tree_divmod_fixed_value_transform (tree stmt)
1191 stmt_ann_t ann = get_stmt_ann (stmt);
1192 histogram_value histogram;
1193 enum tree_code code;
1194 gcov_type val, count, all;
1195 tree modify, op, op1, op2, result, value, tree_val;
1196 int prob;
1198 modify = stmt;
1199 if (TREE_CODE (stmt) == RETURN_EXPR
1200 && TREE_OPERAND (stmt, 0)
1201 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
1202 modify = TREE_OPERAND (stmt, 0);
1203 if (TREE_CODE (modify) != MODIFY_EXPR)
1204 return false;
1205 op = TREE_OPERAND (modify, 1);
1206 if (!INTEGRAL_TYPE_P (TREE_TYPE (op)))
1207 return false;
1208 code = TREE_CODE (op);
1210 if (code != TRUNC_DIV_EXPR && code != TRUNC_MOD_EXPR)
1211 return false;
1213 op1 = TREE_OPERAND (op, 0);
1214 op2 = TREE_OPERAND (op, 1);
1215 if (!ann->histograms)
1216 return false;
1218 for (histogram = ann->histograms; histogram; histogram = histogram->hvalue.tree.next)
1219 if (histogram->type == HIST_TYPE_SINGLE_VALUE)
1220 break;
1222 if (!histogram)
1223 return false;
1225 value = histogram->hvalue.tree.value;
1226 val = histogram->hvalue.tree.counters[0];
1227 count = histogram->hvalue.tree.counters[1];
1228 all = histogram->hvalue.tree.counters[2];
1230 /* We require that count is at least half of all; this means
1231 that for the transformation to fire the value must be constant
1232 at least 50% of time (and 75% gives the guarantee of usage). */
1233 if (simple_cst_equal (op2, value) != 1 || 2 * count < all)
1234 return false;
1236 if (dump_file)
1238 fprintf (dump_file, "Div/mod by constant transformation on insn ");
1239 print_generic_stmt (dump_file, stmt, TDF_SLIM);
1242 /* Compute probability of taking the optimal path. */
1243 prob = (count * REG_BR_PROB_BASE + all / 2) / all;
1245 tree_val = build_int_cst_wide (get_gcov_type (),
1246 (unsigned HOST_WIDE_INT) val,
1247 val >> (HOST_BITS_PER_WIDE_INT - 1) >> 1);
1248 result = tree_divmod_fixed_value (stmt, op, op1, op2, tree_val, prob, count, all);
1250 TREE_OPERAND (modify, 1) = result;
1252 return true;
1255 /* Generate code for transformation 2 (with OPERATION, operands OP1
1256 and OP2, parent modify-expr STMT and probability of taking the optimal
1257 path PROB, which is equivalent to COUNT/ALL within roundoff error).
1258 This generates the result into a temp and returns
1259 the temp; it does not replace or alter the original STMT. */
1260 static tree
1261 tree_mod_pow2 (tree stmt, tree operation, tree op1, tree op2, int prob,
1262 gcov_type count, gcov_type all)
1264 tree stmt1, stmt2, stmt3, stmt4;
1265 tree tmp1, tmp2, tmp3;
1266 tree label_decl1 = create_artificial_label ();
1267 tree label_decl2 = create_artificial_label ();
1268 tree label_decl3 = create_artificial_label ();
1269 tree label1, label2, label3;
1270 tree bb1end, bb2end, bb3end;
1271 basic_block bb, bb2, bb3, bb4;
1272 tree optype = TREE_TYPE (operation);
1273 edge e12, e13, e23, e24, e34;
1274 block_stmt_iterator bsi;
1275 tree result = create_tmp_var (optype, "PROF");
1277 bb = bb_for_stmt (stmt);
1278 bsi = bsi_for_stmt (stmt);
1280 tmp1 = create_tmp_var (optype, "PROF");
1281 tmp2 = create_tmp_var (optype, "PROF");
1282 tmp3 = create_tmp_var (optype, "PROF");
1283 stmt1 = build2 (MODIFY_EXPR, optype, tmp1, fold_convert (optype, op2));
1284 stmt2 = build2 (MODIFY_EXPR, optype, tmp2,
1285 build2 (PLUS_EXPR, optype, op2, integer_minus_one_node));
1286 stmt3 = build2 (MODIFY_EXPR, optype, tmp3,
1287 build2 (BIT_AND_EXPR, optype, tmp2, tmp1));
1288 stmt4 = build3 (COND_EXPR, void_type_node,
1289 build2 (NE_EXPR, boolean_type_node, tmp3, integer_zero_node),
1290 build1 (GOTO_EXPR, void_type_node, label_decl2),
1291 build1 (GOTO_EXPR, void_type_node, label_decl1));
1292 bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
1293 bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
1294 bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
1295 bsi_insert_before (&bsi, stmt4, BSI_SAME_STMT);
1296 bb1end = stmt4;
1298 /* tmp2 == op2-1 inherited from previous block */
1299 label1 = build1 (LABEL_EXPR, void_type_node, label_decl1);
1300 stmt1 = build2 (MODIFY_EXPR, optype, result,
1301 build2 (BIT_AND_EXPR, optype, op1, tmp2));
1302 bsi_insert_before (&bsi, label1, BSI_SAME_STMT);
1303 bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
1304 bb2end = stmt1;
1306 label2 = build1 (LABEL_EXPR, void_type_node, label_decl2);
1307 stmt1 = build2 (MODIFY_EXPR, optype, result,
1308 build2 (TREE_CODE (operation), optype, op1, op2));
1309 bsi_insert_before (&bsi, label2, BSI_SAME_STMT);
1310 bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
1311 bb3end = stmt1;
1313 label3 = build1 (LABEL_EXPR, void_type_node, label_decl3);
1314 bsi_insert_before (&bsi, label3, BSI_SAME_STMT);
1316 /* Fix CFG. */
1317 /* Edge e23 connects bb2 to bb3, etc. */
1318 e12 = split_block (bb, bb1end);
1319 bb2 = e12->dest;
1320 bb2->count = count;
1321 e23 = split_block (bb2, bb2end);
1322 bb3 = e23->dest;
1323 bb3->count = all - count;
1324 e34 = split_block (bb3, bb3end);
1325 bb4 = e34->dest;
1326 bb4->count = all;
1328 e12->flags &= ~EDGE_FALLTHRU;
1329 e12->flags |= EDGE_FALSE_VALUE;
1330 e12->probability = prob;
1331 e12->count = count;
1333 e13 = make_edge (bb, bb3, EDGE_TRUE_VALUE);
1334 e13->probability = REG_BR_PROB_BASE - prob;
1335 e13->count = all - count;
1337 remove_edge (e23);
1339 e24 = make_edge (bb2, bb4, EDGE_FALLTHRU);
1340 e24->probability = REG_BR_PROB_BASE;
1341 e24->count = count;
1343 e34->probability = REG_BR_PROB_BASE;
1344 e34->count = all - count;
1346 return result;
1349 /* Do transform 2) on INSN if applicable. */
1350 static bool
1351 tree_mod_pow2_value_transform (tree stmt)
1353 stmt_ann_t ann = get_stmt_ann (stmt);
1354 histogram_value histogram;
1355 enum tree_code code;
1356 gcov_type count, wrong_values, all;
1357 tree modify, op, op1, op2, result, value;
1358 int prob;
1359 unsigned int i;
1361 modify = stmt;
1362 if (TREE_CODE (stmt) == RETURN_EXPR
1363 && TREE_OPERAND (stmt, 0)
1364 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
1365 modify = TREE_OPERAND (stmt, 0);
1366 if (TREE_CODE (modify) != MODIFY_EXPR)
1367 return false;
1368 op = TREE_OPERAND (modify, 1);
1369 if (!INTEGRAL_TYPE_P (TREE_TYPE (op)))
1370 return false;
1371 code = TREE_CODE (op);
1373 if (code != TRUNC_MOD_EXPR || !TYPE_UNSIGNED (TREE_TYPE (op)))
1374 return false;
1376 op1 = TREE_OPERAND (op, 0);
1377 op2 = TREE_OPERAND (op, 1);
1378 if (!ann->histograms)
1379 return false;
1381 for (histogram = ann->histograms; histogram; histogram = histogram->hvalue.tree.next)
1382 if (histogram->type == HIST_TYPE_POW2)
1383 break;
1385 if (!histogram)
1386 return false;
1388 value = histogram->hvalue.tree.value;
1389 wrong_values = histogram->hvalue.tree.counters[0];
1390 count = 0;
1391 for (i = 1; i <= TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (stmt))); i++)
1392 count += histogram->hvalue.tree.counters[i];
1394 /* We require that we hit a power of 2 at least half of all evaluations. */
1395 if (simple_cst_equal (op2, value) != 1 || count < wrong_values)
1396 return false;
1398 if (dump_file)
1400 fprintf (dump_file, "Mod power of 2 transformation on insn ");
1401 print_generic_stmt (dump_file, stmt, TDF_SLIM);
1404 /* Compute probability of taking the optimal path. */
1405 all = count + wrong_values;
1406 prob = (count * REG_BR_PROB_BASE + all / 2) / all;
1408 result = tree_mod_pow2 (stmt, op, op1, op2, prob, count, all);
1410 TREE_OPERAND (modify, 1) = result;
1412 return true;
1415 /* Generate code for transformations 3 and 4 (with OPERATION, operands OP1
1416 and OP2, parent modify-expr STMT, and NCOUNTS the number of cases to
1417 support. Currently only NCOUNTS==0 or 1 is supported and this is
1418 built into this interface. The probabilities of taking the optimal
1419 paths are PROB1 and PROB2, which are equivalent to COUNT1/ALL and
1420 COUNT2/ALL respectively within roundoff error). This generates the
1421 result into a temp and returns the temp; it does not replace or alter
1422 the original STMT. */
1423 /* FIXME: Generalize the interface to handle NCOUNTS > 1. */
1425 static tree
1426 tree_mod_subtract (tree stmt, tree operation, tree op1, tree op2,
1427 int prob1, int prob2, int ncounts,
1428 gcov_type count1, gcov_type count2, gcov_type all)
1430 tree stmt1, stmt2, stmt3;
1431 tree tmp1;
1432 tree label_decl1 = create_artificial_label ();
1433 tree label_decl2 = create_artificial_label ();
1434 tree label_decl3 = create_artificial_label ();
1435 tree label1, label2, label3;
1436 tree bb1end, bb2end = NULL_TREE, bb3end;
1437 basic_block bb, bb2, bb3, bb4;
1438 tree optype = TREE_TYPE (operation);
1439 edge e12, e23 = 0, e24, e34, e14;
1440 block_stmt_iterator bsi;
1441 tree result = create_tmp_var (optype, "PROF");
1443 bb = bb_for_stmt (stmt);
1444 bsi = bsi_for_stmt (stmt);
1446 tmp1 = create_tmp_var (optype, "PROF");
1447 stmt1 = build2 (MODIFY_EXPR, optype, result, op1);
1448 stmt2 = build2 (MODIFY_EXPR, optype, tmp1, op2);
1449 stmt3 = build3 (COND_EXPR, void_type_node,
1450 build2 (LT_EXPR, boolean_type_node, result, tmp1),
1451 build1 (GOTO_EXPR, void_type_node, label_decl3),
1452 build1 (GOTO_EXPR, void_type_node,
1453 ncounts ? label_decl1 : label_decl2));
1454 bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
1455 bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
1456 bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
1457 bb1end = stmt3;
1459 if (ncounts) /* Assumed to be 0 or 1 */
1461 label1 = build1 (LABEL_EXPR, void_type_node, label_decl1);
1462 stmt1 = build2 (MODIFY_EXPR, optype, result,
1463 build2 (MINUS_EXPR, optype, result, tmp1));
1464 stmt2 = build3 (COND_EXPR, void_type_node,
1465 build2 (LT_EXPR, boolean_type_node, result, tmp1),
1466 build1 (GOTO_EXPR, void_type_node, label_decl3),
1467 build1 (GOTO_EXPR, void_type_node, label_decl2));
1468 bsi_insert_before (&bsi, label1, BSI_SAME_STMT);
1469 bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
1470 bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
1471 bb2end = stmt2;
1474 /* Fallback case. */
1475 label2 = build1 (LABEL_EXPR, void_type_node, label_decl2);
1476 stmt1 = build2 (MODIFY_EXPR, optype, result,
1477 build2 (TREE_CODE (operation), optype, result, tmp1));
1478 bsi_insert_before (&bsi, label2, BSI_SAME_STMT);
1479 bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
1480 bb3end = stmt1;
1482 label3 = build1 (LABEL_EXPR, void_type_node, label_decl3);
1483 bsi_insert_before (&bsi, label3, BSI_SAME_STMT);
1485 /* Fix CFG. */
1486 /* Edge e23 connects bb2 to bb3, etc. */
1487 /* However block 3 is optional; if it is not there, references
1488 to 3 really refer to block 2. */
1489 e12 = split_block (bb, bb1end);
1490 bb2 = e12->dest;
1491 bb2->count = all - count1;
1493 if (ncounts) /* Assumed to be 0 or 1. */
1495 e23 = split_block (bb2, bb2end);
1496 bb3 = e23->dest;
1497 bb3->count = all - count1 - count2;
1500 e34 = split_block (ncounts ? bb3 : bb2, bb3end);
1501 bb4 = e34->dest;
1502 bb4->count = all;
1504 e12->flags &= ~EDGE_FALLTHRU;
1505 e12->flags |= EDGE_FALSE_VALUE;
1506 e12->probability = REG_BR_PROB_BASE - prob1;
1507 e12->count = count1;
1509 e14 = make_edge (bb, bb4, EDGE_TRUE_VALUE);
1510 e14->probability = prob1;
1511 e14->count = all - count1;
1513 if (ncounts) /* Assumed to be 0 or 1. */
1515 e23->flags &= ~EDGE_FALLTHRU;
1516 e23->flags |= EDGE_FALSE_VALUE;
1517 e23->count = all - count1 - count2;
1518 e23->probability = REG_BR_PROB_BASE - prob2;
1520 e24 = make_edge (bb2, bb4, EDGE_TRUE_VALUE);
1521 e24->probability = prob2;
1522 e24->count = count2;
1525 e34->probability = REG_BR_PROB_BASE;
1526 e34->count = all - count1 - count2;
1528 return result;
1531 /* Do transforms 3) and 4) on INSN if applicable. */
1532 static bool
1533 tree_mod_subtract_transform (tree stmt)
1535 stmt_ann_t ann = get_stmt_ann (stmt);
1536 histogram_value histogram;
1537 enum tree_code code;
1538 gcov_type count, wrong_values, all;
1539 tree modify, op, op1, op2, result, value;
1540 int prob1, prob2;
1541 unsigned int i;
1543 modify = stmt;
1544 if (TREE_CODE (stmt) == RETURN_EXPR
1545 && TREE_OPERAND (stmt, 0)
1546 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
1547 modify = TREE_OPERAND (stmt, 0);
1548 if (TREE_CODE (modify) != MODIFY_EXPR)
1549 return false;
1550 op = TREE_OPERAND (modify, 1);
1551 if (!INTEGRAL_TYPE_P (TREE_TYPE (op)))
1552 return false;
1553 code = TREE_CODE (op);
1555 if (code != TRUNC_MOD_EXPR || !TYPE_UNSIGNED (TREE_TYPE (op)))
1556 return false;
1558 op1 = TREE_OPERAND (op, 0);
1559 op2 = TREE_OPERAND (op, 1);
1560 if (!ann->histograms)
1561 return false;
1563 for (histogram = ann->histograms; histogram; histogram = histogram->hvalue.tree.next)
1564 if (histogram->type == HIST_TYPE_INTERVAL)
1565 break;
1567 if (!histogram)
1568 return false;
1570 value = histogram->hvalue.tree.value;
1571 all = 0;
1572 wrong_values = 0;
1573 for (i = 0; i < histogram->hdata.intvl.steps; i++)
1574 all += histogram->hvalue.tree.counters[i];
1576 wrong_values += histogram->hvalue.tree.counters[i];
1577 wrong_values += histogram->hvalue.tree.counters[i+1];
1578 all += wrong_values;
1580 /* Sanity check. */
1581 if (simple_cst_equal (op2, value) != 1)
1582 return false;
1584 /* We require that we use just subtractions in at least 50% of all
1585 evaluations. */
1586 count = 0;
1587 for (i = 0; i < histogram->hdata.intvl.steps; i++)
1589 count += histogram->hvalue.tree.counters[i];
1590 if (count * 2 >= all)
1591 break;
1593 if (i == histogram->hdata.intvl.steps)
1594 return false;
1596 if (dump_file)
1598 fprintf (dump_file, "Mod subtract transformation on insn ");
1599 print_generic_stmt (dump_file, stmt, TDF_SLIM);
1602 /* Compute probability of taking the optimal path(s). */
1603 prob1 = (histogram->hvalue.tree.counters[0] * REG_BR_PROB_BASE + all / 2) / all;
1604 prob2 = (histogram->hvalue.tree.counters[1] * REG_BR_PROB_BASE + all / 2) / all;
1606 /* In practice, "steps" is always 2. This interface reflects this,
1607 and will need to be changed if "steps" can change. */
1608 result = tree_mod_subtract (stmt, op, op1, op2, prob1, prob2, i,
1609 histogram->hvalue.tree.counters[0],
1610 histogram->hvalue.tree.counters[1], all);
1612 TREE_OPERAND (modify, 1) = result;
1614 return true;
1617 /* Connection to the outside world. */
1618 /* Struct for IR-dependent hooks. */
1619 struct value_prof_hooks {
1620 /* Find list of values for which we want to measure histograms. */
1621 void (*find_values_to_profile) (histogram_values *);
1623 /* Identify and exploit properties of values that are hard to analyze
1624 statically. See value-prof.c for more detail. */
1625 bool (*value_profile_transformations) (void);
1628 /* Hooks for RTL-based versions (the only ones that currently work). */
1629 static struct value_prof_hooks rtl_value_prof_hooks =
1631 rtl_find_values_to_profile,
1632 rtl_value_profile_transformations
1635 void
1636 rtl_register_value_prof_hooks (void)
1638 value_prof_hooks = &rtl_value_prof_hooks;
1639 gcc_assert (!ir_type ());
1642 /* Find values inside INSN for that we want to measure histograms for
1643 division/modulo optimization. */
1644 static void
1645 tree_divmod_values_to_profile (tree stmt, histogram_values *values)
1647 tree op, op1, op2;
1648 histogram_value hist;
1650 op = stmt;
1651 if (TREE_CODE (stmt) == RETURN_EXPR
1652 && TREE_OPERAND (stmt, 0)
1653 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
1654 op = TREE_OPERAND (stmt, 0);
1656 if (TREE_CODE (op) != MODIFY_EXPR)
1657 return;
1658 if (!INTEGRAL_TYPE_P (TREE_TYPE (op)))
1659 return;
1660 op = TREE_OPERAND (op, 1);
1661 switch (TREE_CODE (op))
1663 case TRUNC_DIV_EXPR:
1664 case TRUNC_MOD_EXPR:
1665 op1 = TREE_OPERAND (op, 0);
1666 op2 = TREE_OPERAND (op, 1);
1668 /* Check for a special case where the divisor is power(s) of 2.
1669 This is more aggressive than the RTL version, under the
1670 assumption that later phases will reduce / or % by power of 2
1671 to something clever most of the time. Signed or unsigned. */
1672 if (TREE_CODE (op2) != INTEGER_CST)
1674 hist = ggc_alloc (sizeof (*hist));
1675 hist->hvalue.tree.value = op2;
1676 hist->hvalue.tree.stmt = stmt;
1677 hist->type = HIST_TYPE_POW2;
1678 hist->hdata.pow2.may_be_other = 1;
1679 VEC_safe_push (histogram_value, *values, hist);
1682 /* Check for the case where the divisor is the same value most
1683 of the time. */
1684 if (TREE_CODE (op2) != INTEGER_CST)
1686 hist = ggc_alloc (sizeof (*hist));
1687 hist->hvalue.tree.value = op2;
1688 hist->hvalue.tree.stmt = stmt;
1689 hist->type = HIST_TYPE_SINGLE_VALUE;
1690 VEC_safe_push (histogram_value, *values, hist);
1693 /* For mod, check whether it is not often a noop (or replaceable by
1694 a few subtractions). */
1695 if (TREE_CODE (op) == TRUNC_MOD_EXPR && TYPE_UNSIGNED (TREE_TYPE (op)))
1697 hist = ggc_alloc (sizeof (*hist));
1698 hist->hvalue.tree.stmt = stmt;
1699 hist->hvalue.tree.value = op2;
1700 hist->type = HIST_TYPE_INTERVAL;
1701 hist->hdata.intvl.int_start = 0;
1702 hist->hdata.intvl.steps = 2;
1703 VEC_safe_push (histogram_value, *values, hist);
1705 return;
1707 default:
1708 return;
1712 /* Find values inside INSN for that we want to measure histograms and adds
1713 them to list VALUES (increasing the record of its length in N_VALUES). */
1714 static void
1715 tree_values_to_profile (tree stmt, histogram_values *values)
1717 if (flag_value_profile_transformations)
1718 tree_divmod_values_to_profile (stmt, values);
1721 static void
1722 tree_find_values_to_profile (histogram_values *values)
1724 basic_block bb;
1725 block_stmt_iterator bsi;
1726 tree stmt;
1727 unsigned int i;
1729 *values = VEC_alloc (histogram_value, 0);
1730 FOR_EACH_BB (bb)
1731 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1733 tree stmt = bsi_stmt (bsi);
1734 tree_values_to_profile (stmt, values);
1736 static_values = *values;
1738 for (i = 0; i < VEC_length (histogram_value, *values); i++)
1740 histogram_value hist = VEC_index (histogram_value, *values, i);
1742 switch (hist->type)
1744 case HIST_TYPE_INTERVAL:
1745 if (dump_file)
1747 fprintf (dump_file, "Interval counter for tree ");
1748 print_generic_expr (dump_file, hist->hvalue.tree.stmt,
1749 TDF_SLIM);
1750 fprintf (dump_file, ", range %d -- %d.\n",
1751 hist->hdata.intvl.int_start,
1752 (hist->hdata.intvl.int_start
1753 + hist->hdata.intvl.steps - 1));
1755 hist->n_counters = hist->hdata.intvl.steps + 2;
1756 break;
1758 case HIST_TYPE_POW2:
1759 if (dump_file)
1761 fprintf (dump_file, "Pow2 counter for insn ");
1762 print_generic_expr (dump_file, hist->hvalue.tree.stmt, TDF_SLIM);
1763 fprintf (dump_file, ".\n");
1765 stmt = hist->hvalue.tree.stmt;
1766 hist->n_counters
1767 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (stmt)))
1768 + (hist->hdata.pow2.may_be_other ? 1 : 0);
1769 break;
1771 case HIST_TYPE_SINGLE_VALUE:
1772 if (dump_file)
1774 fprintf (dump_file, "Single value counter for insn ");
1775 print_generic_expr (dump_file, hist->hvalue.tree.stmt, TDF_SLIM);
1776 fprintf (dump_file, ".\n");
1778 hist->n_counters = 3;
1779 break;
1781 case HIST_TYPE_CONST_DELTA:
1782 if (dump_file)
1784 fprintf (dump_file, "Constant delta counter for insn ");
1785 print_generic_expr (dump_file, hist->hvalue.tree.stmt, TDF_SLIM);
1786 fprintf (dump_file, ".\n");
1788 hist->n_counters = 4;
1789 break;
1791 default:
1792 abort ();
1797 static struct value_prof_hooks tree_value_prof_hooks = {
1798 tree_find_values_to_profile,
1799 tree_value_profile_transformations
1802 void
1803 tree_register_value_prof_hooks (void)
1805 value_prof_hooks = &tree_value_prof_hooks;
1806 gcc_assert (ir_type ());
1809 /* IR-independent entry points. */
1810 void
1811 find_values_to_profile (histogram_values *values)
1813 (value_prof_hooks->find_values_to_profile) (values);
1816 bool
1817 value_profile_transformations (void)
1819 bool retval = (value_prof_hooks->value_profile_transformations) ();
1820 VEC_free (histogram_value, static_values);
1821 return retval;