c++: only cache constexpr calls that are constant exprs
[official-gcc.git] / gcc / profile-count.h
blobbf1136782a3b4af1a53afa4a17f4cd1a4279ca48
1 /* Profile counter container type.
2 Copyright (C) 2017-2023 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #ifndef GCC_PROFILE_COUNT_H
22 #define GCC_PROFILE_COUNT_H
24 struct function;
25 struct profile_count;
26 class sreal;
28 /* Quality of the profile count. Because gengtype does not support enums
29 inside of classes, this is in global namespace. */
30 enum profile_quality {
31 /* Uninitialized value. */
32 UNINITIALIZED_PROFILE,
34 /* Profile is based on static branch prediction heuristics and may
35 or may not match reality. It is local to function and cannot be compared
36 inter-procedurally. Never used by probabilities (they are always local).
38 GUESSED_LOCAL,
40 /* Profile was read by feedback and was 0, we used local heuristics to guess
41 better. This is the case of functions not run in profile feedback.
42 Never used by probabilities. */
43 GUESSED_GLOBAL0,
45 /* Same as GUESSED_GLOBAL0 but global count is adjusted 0. */
46 GUESSED_GLOBAL0_ADJUSTED,
48 /* Profile is based on static branch prediction heuristics. It may or may
49 not reflect the reality but it can be compared interprocedurally
50 (for example, we inlined function w/o profile feedback into function
51 with feedback and propagated from that).
52 Never used by probabilities. */
53 GUESSED,
55 /* Profile was determined by autofdo. */
56 AFDO,
58 /* Profile was originally based on feedback but it was adjusted
59 by code duplicating optimization. It may not precisely reflect the
60 particular code path. */
61 ADJUSTED,
63 /* Profile was read from profile feedback or determined by accurate static
64 method. */
65 PRECISE
68 extern const char *profile_quality_as_string (enum profile_quality);
69 extern bool parse_profile_quality (const char *value,
70 profile_quality *quality);
72 /* The base value for branch probability notes and edge probabilities. */
73 #define REG_BR_PROB_BASE 10000
75 #define RDIV(X,Y) (((X) + (Y) / 2) / (Y))
77 bool slow_safe_scale_64bit (uint64_t a, uint64_t b, uint64_t c, uint64_t *res);
79 /* Compute RES=(a*b + c/2)/c capping and return false if overflow happened. */
81 inline bool
82 safe_scale_64bit (uint64_t a, uint64_t b, uint64_t c, uint64_t *res)
84 #if (GCC_VERSION >= 5000)
85 uint64_t tmp;
86 if (!__builtin_mul_overflow (a, b, &tmp)
87 && !__builtin_add_overflow (tmp, c/2, &tmp))
89 *res = tmp / c;
90 return true;
92 if (c == 1)
94 *res = (uint64_t) -1;
95 return false;
97 #else
98 if (a < ((uint64_t)1 << 31)
99 && b < ((uint64_t)1 << 31)
100 && c < ((uint64_t)1 << 31))
102 *res = (a * b + (c / 2)) / c;
103 return true;
105 #endif
106 return slow_safe_scale_64bit (a, b, c, res);
109 /* Data type to hold probabilities. It implements fixed point arithmetics
110 with capping so probability is always in range [0,1] and scaling requiring
111 values greater than 1 needs to be represented otherwise.
113 In addition to actual value the quality of profile is tracked and propagated
114 through all operations. Special value UNINITIALIZED_PROFILE is used for probabilities
115 that has not been determined yet (for example because of
116 -fno-guess-branch-probability)
118 Typically probabilities are derived from profile feedback (via
119 probability_in_gcov_type), autoFDO or guessed statically and then propagated
120 thorough the compilation.
122 Named probabilities are available:
123 - never (0 probability)
124 - guessed_never
125 - very_unlikely (1/2000 probability)
126 - unlikely (1/5 probability)
127 - even (1/2 probability)
128 - likely (4/5 probability)
129 - very_likely (1999/2000 probability)
130 - guessed_always
131 - always
133 Named probabilities except for never/always are assumed to be statically
134 guessed and thus not necessarily accurate. The difference between never
135 and guessed_never is that the first one should be used only in case that
136 well behaving program will very likely not execute the "never" path.
137 For example if the path is going to abort () call or it exception handling.
139 Always and guessed_always probabilities are symmetric.
141 For legacy code we support conversion to/from REG_BR_PROB_BASE based fixpoint
142 integer arithmetics. Once the code is converted to branch probabilities,
143 these conversions will probably go away because they are lossy.
146 class GTY((user)) profile_probability
148 static const int n_bits = 29;
149 /* We can technically use ((uint32_t) 1 << (n_bits - 1)) - 2 but that
150 will lead to harder multiplication sequences. */
151 static const uint32_t max_probability = (uint32_t) 1 << (n_bits - 2);
152 static const uint32_t uninitialized_probability
153 = ((uint32_t) 1 << (n_bits - 1)) - 1;
155 uint32_t m_val : 29;
156 enum profile_quality m_quality : 3;
158 friend struct profile_count;
159 public:
160 profile_probability (): m_val (uninitialized_probability),
161 m_quality (GUESSED)
164 profile_probability (uint32_t val, profile_quality quality):
165 m_val (val), m_quality (quality)
168 /* Named probabilities. */
169 static profile_probability never ()
171 profile_probability ret;
172 ret.m_val = 0;
173 ret.m_quality = PRECISE;
174 return ret;
177 static profile_probability guessed_never ()
179 profile_probability ret;
180 ret.m_val = 0;
181 ret.m_quality = GUESSED;
182 return ret;
185 static profile_probability very_unlikely ()
187 /* Be consistent with PROB_VERY_UNLIKELY in predict.h. */
188 profile_probability r = guessed_always () / 2000;
189 r.m_val--;
190 return r;
193 static profile_probability unlikely ()
195 /* Be consistent with PROB_VERY_LIKELY in predict.h. */
196 profile_probability r = guessed_always () / 5;
197 r.m_val--;
198 return r;
201 static profile_probability even ()
203 return guessed_always () / 2;
206 static profile_probability very_likely ()
208 return always () - very_unlikely ();
211 static profile_probability likely ()
213 return always () - unlikely ();
215 /* Return true when value is not zero and can be used for scaling. */
216 bool nonzero_p () const
218 return initialized_p () && m_val != 0;
221 static profile_probability guessed_always ()
223 profile_probability ret;
224 ret.m_val = max_probability;
225 ret.m_quality = GUESSED;
226 return ret;
229 static profile_probability always ()
231 profile_probability ret;
232 ret.m_val = max_probability;
233 ret.m_quality = PRECISE;
234 return ret;
237 /* Probabilities which has not been initialized. Either because
238 initialization did not happen yet or because profile is unknown. */
239 static profile_probability uninitialized ()
241 profile_probability c;
242 c.m_val = uninitialized_probability;
243 c.m_quality = GUESSED;
244 return c;
247 /* Return true if value has been initialized. */
248 bool initialized_p () const
250 return m_val != uninitialized_probability;
253 /* Return true if value can be trusted. */
254 bool reliable_p () const
256 return m_quality >= ADJUSTED;
259 /* Conversion from and to REG_BR_PROB_BASE integer fixpoint arithmetics.
260 this is mostly to support legacy code and should go away. */
261 static profile_probability from_reg_br_prob_base (int v)
263 profile_probability ret;
264 gcc_checking_assert (v >= 0 && v <= REG_BR_PROB_BASE);
265 ret.m_val = RDIV (v * (uint64_t) max_probability, REG_BR_PROB_BASE);
266 ret.m_quality = GUESSED;
267 return ret;
270 /* Return THIS with quality set to ADJUSTED. */
271 profile_probability adjusted () const
273 profile_probability ret = *this;
274 if (!initialized_p ())
275 return *this;
276 ret.m_quality = ADJUSTED;
277 return ret;
280 int to_reg_br_prob_base () const
282 gcc_checking_assert (initialized_p ());
283 return RDIV (m_val * (uint64_t) REG_BR_PROB_BASE, max_probability);
286 /* Conversion to and from RTL representation of profile probabilities. */
287 static profile_probability from_reg_br_prob_note (int v)
289 profile_probability ret;
290 ret.m_val = ((unsigned int)v) / 8;
291 ret.m_quality = (enum profile_quality)(v & 7);
292 return ret;
295 int to_reg_br_prob_note () const
297 gcc_checking_assert (initialized_p ());
298 int ret = m_val * 8 + m_quality;
299 gcc_checking_assert (from_reg_br_prob_note (ret) == *this);
300 return ret;
303 /* Return VAL1/VAL2. */
304 static profile_probability probability_in_gcov_type
305 (gcov_type val1, gcov_type val2)
307 profile_probability ret;
308 gcc_checking_assert (val1 >= 0 && val2 > 0);
309 if (val1 > val2)
310 ret.m_val = max_probability;
311 else
313 uint64_t tmp;
314 safe_scale_64bit (val1, max_probability, val2, &tmp);
315 gcc_checking_assert (tmp <= max_probability);
316 ret.m_val = tmp;
318 ret.m_quality = PRECISE;
319 return ret;
322 /* Basic operations. */
323 bool operator== (const profile_probability &other) const
325 return m_val == other.m_val && m_quality == other.m_quality;
328 profile_probability operator+ (const profile_probability &other) const
330 if (other == never ())
331 return *this;
332 if (*this == never ())
333 return other;
334 if (!initialized_p () || !other.initialized_p ())
335 return uninitialized ();
337 profile_probability ret;
338 ret.m_val = MIN ((uint32_t)(m_val + other.m_val), max_probability);
339 ret.m_quality = MIN (m_quality, other.m_quality);
340 return ret;
343 profile_probability &operator+= (const profile_probability &other)
345 if (other == never ())
346 return *this;
347 if (*this == never ())
349 *this = other;
350 return *this;
352 if (!initialized_p () || !other.initialized_p ())
353 return *this = uninitialized ();
354 else
356 m_val = MIN ((uint32_t)(m_val + other.m_val), max_probability);
357 m_quality = MIN (m_quality, other.m_quality);
359 return *this;
362 profile_probability operator- (const profile_probability &other) const
364 if (*this == never ()
365 || other == never ())
366 return *this;
367 if (!initialized_p () || !other.initialized_p ())
368 return uninitialized ();
369 profile_probability ret;
370 ret.m_val = m_val >= other.m_val ? m_val - other.m_val : 0;
371 ret.m_quality = MIN (m_quality, other.m_quality);
372 return ret;
375 profile_probability &operator-= (const profile_probability &other)
377 if (*this == never ()
378 || other == never ())
379 return *this;
380 if (!initialized_p () || !other.initialized_p ())
381 return *this = uninitialized ();
382 else
384 m_val = m_val >= other.m_val ? m_val - other.m_val : 0;
385 m_quality = MIN (m_quality, other.m_quality);
387 return *this;
390 profile_probability operator* (const profile_probability &other) const
392 if (*this == never ()
393 || other == never ())
394 return never ();
395 if (!initialized_p () || !other.initialized_p ())
396 return uninitialized ();
397 profile_probability ret;
398 ret.m_val = RDIV ((uint64_t)m_val * other.m_val, max_probability);
399 ret.m_quality = MIN (MIN (m_quality, other.m_quality), ADJUSTED);
400 return ret;
403 profile_probability &operator*= (const profile_probability &other)
405 if (*this == never ()
406 || other == never ())
407 return *this = never ();
408 if (!initialized_p () || !other.initialized_p ())
409 return *this = uninitialized ();
410 else
412 m_val = RDIV ((uint64_t)m_val * other.m_val, max_probability);
413 m_quality = MIN (MIN (m_quality, other.m_quality), ADJUSTED);
415 return *this;
418 profile_probability operator/ (const profile_probability &other) const
420 if (*this == never ())
421 return never ();
422 if (!initialized_p () || !other.initialized_p ())
423 return uninitialized ();
424 profile_probability ret;
425 /* If we get probability above 1, mark it as unreliable and return 1. */
426 if (m_val >= other.m_val)
428 ret.m_val = max_probability;
429 ret.m_quality = MIN (MIN (m_quality, other.m_quality),
430 GUESSED);
431 return ret;
433 else if (!m_val)
434 ret.m_val = 0;
435 else
437 gcc_checking_assert (other.m_val);
438 ret.m_val = MIN (RDIV ((uint64_t)m_val * max_probability,
439 other.m_val),
440 max_probability);
442 ret.m_quality = MIN (MIN (m_quality, other.m_quality), ADJUSTED);
443 return ret;
446 profile_probability &operator/= (const profile_probability &other)
448 if (*this == never ())
449 return *this = never ();
450 if (!initialized_p () || !other.initialized_p ())
451 return *this = uninitialized ();
452 else
454 /* If we get probability above 1, mark it as unreliable
455 and return 1. */
456 if (m_val > other.m_val)
458 m_val = max_probability;
459 m_quality = MIN (MIN (m_quality, other.m_quality),
460 GUESSED);
461 return *this;
463 else if (!m_val)
465 else
467 gcc_checking_assert (other.m_val);
468 m_val = MIN (RDIV ((uint64_t)m_val * max_probability,
469 other.m_val),
470 max_probability);
472 m_quality = MIN (MIN (m_quality, other.m_quality), ADJUSTED);
474 return *this;
477 /* Split *THIS (ORIG) probability into 2 probabilities, such that
478 the returned one (FIRST) is *THIS * CPROB and *THIS is
479 adjusted (SECOND) so that FIRST + FIRST.invert () * SECOND
480 == ORIG. This is useful e.g. when splitting a conditional
481 branch like:
482 if (cond)
483 goto lab; // ORIG probability
484 into
485 if (cond1)
486 goto lab; // FIRST = ORIG * CPROB probability
487 if (cond2)
488 goto lab; // SECOND probability
489 such that the overall probability of jumping to lab remains
490 the same. CPROB gives the relative probability between the
491 branches. */
492 profile_probability split (const profile_probability &cprob)
494 profile_probability ret = *this * cprob;
495 /* The following is equivalent to:
496 *this = cprob.invert () * *this / ret.invert ();
497 Avoid scaling when overall outcome is supposed to be always.
498 Without knowing that one is inverse of other, the result would be
499 conservative. */
500 if (!(*this == always ()))
501 *this = (*this - ret) / ret.invert ();
502 return ret;
505 gcov_type apply (gcov_type val) const
507 if (*this == uninitialized ())
508 return val / 2;
509 return RDIV (val * m_val, max_probability);
512 /* Return 1-*THIS. */
513 profile_probability invert () const
515 return always() - *this;
518 /* Return THIS with quality dropped to GUESSED. */
519 profile_probability guessed () const
521 profile_probability ret = *this;
522 ret.m_quality = GUESSED;
523 return ret;
526 /* Return THIS with quality dropped to AFDO. */
527 profile_probability afdo () const
529 profile_probability ret = *this;
530 ret.m_quality = AFDO;
531 return ret;
534 /* Return *THIS * NUM / DEN. */
535 profile_probability apply_scale (int64_t num, int64_t den) const
537 if (*this == never ())
538 return *this;
539 if (!initialized_p ())
540 return uninitialized ();
541 profile_probability ret;
542 uint64_t tmp;
543 safe_scale_64bit (m_val, num, den, &tmp);
544 ret.m_val = MIN (tmp, max_probability);
545 ret.m_quality = MIN (m_quality, ADJUSTED);
546 return ret;
549 /* Return *THIS * NUM / DEN. */
550 profile_probability apply_scale (profile_probability num,
551 profile_probability den) const
553 if (*this == never ())
554 return *this;
555 if (num == never ())
556 return num;
557 if (!initialized_p () || !num.initialized_p () || !den.initialized_p ())
558 return uninitialized ();
559 if (num == den)
560 return *this;
561 gcc_checking_assert (den.m_val);
563 profile_probability ret;
564 uint64_t val;
565 safe_scale_64bit (m_val, num.m_val, den.m_val, &val);
566 ret.m_val = MIN (val, max_probability);
567 ret.m_quality = MIN (MIN (MIN (m_quality, ADJUSTED),
568 num.m_quality), den.m_quality);
569 return ret;
572 /* Return true when the probability of edge is reliable.
574 The profile guessing code is good at predicting branch outcome (i.e.
575 taken/not taken), that is predicted right slightly over 75% of time.
576 It is however notoriously poor on predicting the probability itself.
577 In general the profile appear a lot flatter (with probabilities closer
578 to 50%) than the reality so it is bad idea to use it to drive optimization
579 such as those disabling dynamic branch prediction for well predictable
580 branches.
582 There are two exceptions - edges leading to noreturn edges and edges
583 predicted by number of iterations heuristics are predicted well. This macro
584 should be able to distinguish those, but at the moment it simply check for
585 noreturn heuristic that is only one giving probability over 99% or bellow
586 1%. In future we might want to propagate reliability information across the
587 CFG if we find this information useful on multiple places. */
588 bool probably_reliable_p () const
590 if (m_quality >= ADJUSTED)
591 return true;
592 if (!initialized_p ())
593 return false;
594 return m_val < max_probability / 100
595 || m_val > max_probability - max_probability / 100;
598 /* Return false if profile_probability is bogus. */
599 bool verify () const
601 gcc_checking_assert (m_quality != UNINITIALIZED_PROFILE);
602 if (m_val == uninitialized_probability)
603 return m_quality == GUESSED;
604 else if (m_quality < GUESSED)
605 return false;
606 return m_val <= max_probability;
609 /* Comparisons are three-state and conservative. False is returned if
610 the inequality cannot be decided. */
611 bool operator< (const profile_probability &other) const
613 return initialized_p () && other.initialized_p () && m_val < other.m_val;
616 bool operator> (const profile_probability &other) const
618 return initialized_p () && other.initialized_p () && m_val > other.m_val;
621 bool operator<= (const profile_probability &other) const
623 return initialized_p () && other.initialized_p () && m_val <= other.m_val;
626 bool operator>= (const profile_probability &other) const
628 return initialized_p () && other.initialized_p () && m_val >= other.m_val;
631 profile_probability operator* (int64_t num) const
633 return apply_scale (num, 1);
636 profile_probability operator*= (int64_t num)
638 *this = apply_scale (num, 1);
639 return *this;
642 profile_probability operator/ (int64_t den) const
644 return apply_scale (1, den);
647 profile_probability operator/= (int64_t den)
649 *this = apply_scale (1, den);
650 return *this;
653 /* Get the value of the count. */
654 uint32_t value () const { return m_val; }
656 /* Get the quality of the count. */
657 enum profile_quality quality () const { return m_quality; }
659 /* Output THIS to F. */
660 void dump (FILE *f) const;
662 /* Output THIS to BUFFER. */
663 void dump (char *buffer) const;
665 /* Print THIS to stderr. */
666 void debug () const;
668 /* Return true if THIS is known to differ significantly from OTHER. */
669 bool differs_from_p (profile_probability other) const;
671 /* Return if difference is greater than 50%. */
672 bool differs_lot_from_p (profile_probability other) const;
674 /* COUNT1 times event happens with *THIS probability, COUNT2 times OTHER
675 happens with COUNT2 probability. Return probability that either *THIS or
676 OTHER happens. */
677 profile_probability combine_with_count (profile_count count1,
678 profile_probability other,
679 profile_count count2) const;
681 /* Return probability as sreal. */
682 sreal to_sreal () const;
683 /* LTO streaming support. */
684 static profile_probability stream_in (class lto_input_block *);
685 void stream_out (struct output_block *);
686 void stream_out (struct lto_output_stream *);
689 /* Main data type to hold profile counters in GCC. Profile counts originate
690 either from profile feedback, static profile estimation or both. We do not
691 perform whole program profile propagation and thus profile estimation
692 counters are often local to function, while counters from profile feedback
693 (or special cases of profile estimation) can be used inter-procedurally.
695 There are 3 basic types
696 1) local counters which are result of intra-procedural static profile
697 estimation.
698 2) ipa counters which are result of profile feedback or special case
699 of static profile estimation (such as in function main).
700 3) counters which counts as 0 inter-procedurally (because given function
701 was never run in train feedback) but they hold local static profile
702 estimate.
704 Counters of type 1 and 3 cannot be mixed with counters of different type
705 within operation (because whole function should use one type of counter)
706 with exception that global zero mix in most operations where outcome is
707 well defined.
709 To take local counter and use it inter-procedurally use ipa member function
710 which strips information irrelevant at the inter-procedural level.
712 Counters are 61bit integers representing number of executions during the
713 train run or normalized frequency within the function.
715 As the profile is maintained during the compilation, many adjustments are
716 made. Not all transformations can be made precisely, most importantly
717 when code is being duplicated. It also may happen that part of CFG has
718 profile counts known while other do not - for example when LTO optimizing
719 partly profiled program or when profile was lost due to COMDAT merging.
721 For this reason profile_count tracks more information than
722 just unsigned integer and it is also ready for profile mismatches.
723 The API of this data type represent operations that are natural
724 on profile counts - sum, difference and operation with scales and
725 probabilities. All operations are safe by never getting negative counts
726 and they do end up in uninitialized scale if any of the parameters is
727 uninitialized.
729 All comparisons that are three state and handling of probabilities. Thus
730 a < b is not equal to !(a >= b).
732 The following pre-defined counts are available:
734 profile_count::zero () for code that is known to execute zero times at
735 runtime (this can be detected statically i.e. for paths leading to
736 abort ();
737 profile_count::one () for code that is known to execute once (such as
738 main () function
739 profile_count::uninitialized () for unknown execution count.
743 struct GTY(()) profile_count
745 public:
746 /* Use 62bit to hold basic block counters. Should be at least
747 64bit. Although a counter cannot be negative, we use a signed
748 type to hold various extra stages. */
750 static const int n_bits = 61;
751 static const uint64_t max_count = ((uint64_t) 1 << n_bits) - 2;
752 private:
753 static const uint64_t uninitialized_count = ((uint64_t) 1 << n_bits) - 1;
755 #if defined (__arm__) && (__GNUC__ >= 6 && __GNUC__ <= 8)
756 /* Work-around for PR88469. A bug in the gcc-6/7/8 PCS layout code
757 incorrectly detects the alignment of a structure where the only
758 64-bit aligned object is a bit-field. We force the alignment of
759 the entire field to mitigate this. */
760 #define UINT64_BIT_FIELD_ALIGN __attribute__ ((aligned(8)))
761 #else
762 #define UINT64_BIT_FIELD_ALIGN
763 #endif
764 uint64_t UINT64_BIT_FIELD_ALIGN m_val : n_bits;
765 #undef UINT64_BIT_FIELD_ALIGN
766 enum profile_quality m_quality : 3;
767 public:
769 /* Return true if both values can meaningfully appear in single function
770 body. We have either all counters in function local or global, otherwise
771 operations between them are not really defined well. */
772 bool compatible_p (const profile_count other) const
774 if (!initialized_p () || !other.initialized_p ())
775 return true;
776 if (*this == zero ()
777 || other == zero ())
778 return true;
779 /* Do not allow nonzero global profile together with local guesses
780 that are globally0. */
781 if (ipa ().nonzero_p ()
782 && !(other.ipa () == other))
783 return false;
784 if (other.ipa ().nonzero_p ()
785 && !(ipa () == *this))
786 return false;
788 return ipa_p () == other.ipa_p ();
791 /* Used for counters which are expected to be never executed. */
792 static profile_count zero ()
794 return from_gcov_type (0);
797 static profile_count adjusted_zero ()
799 profile_count c;
800 c.m_val = 0;
801 c.m_quality = ADJUSTED;
802 return c;
805 static profile_count guessed_zero ()
807 profile_count c;
808 c.m_val = 0;
809 c.m_quality = GUESSED;
810 return c;
813 static profile_count one ()
815 return from_gcov_type (1);
818 /* Value of counters which has not been initialized. Either because
819 initialization did not happen yet or because profile is unknown. */
820 static profile_count uninitialized ()
822 profile_count c;
823 c.m_val = uninitialized_count;
824 c.m_quality = GUESSED_LOCAL;
825 return c;
828 /* Conversion to gcov_type is lossy. */
829 gcov_type to_gcov_type () const
831 gcc_checking_assert (initialized_p ());
832 return m_val;
835 /* Return true if value has been initialized. */
836 bool initialized_p () const
838 return m_val != uninitialized_count;
841 /* Return true if value can be trusted. */
842 bool reliable_p () const
844 return m_quality >= ADJUSTED;
847 /* Return true if value can be operated inter-procedurally. */
848 bool ipa_p () const
850 return !initialized_p () || m_quality >= GUESSED_GLOBAL0;
853 /* Return true if quality of profile is precise. */
854 bool precise_p () const
856 return m_quality == PRECISE;
859 /* Get the value of the count. */
860 uint64_t value () const { return m_val; }
862 /* Get the quality of the count. */
863 enum profile_quality quality () const { return m_quality; }
865 /* When merging basic blocks, the two different profile counts are unified.
866 Return true if this can be done without losing info about profile.
867 The only case we care about here is when first BB contains something
868 that makes it terminate in a way not visible in CFG. */
869 bool ok_for_merging (profile_count other) const
871 if (m_quality < ADJUSTED
872 || other.m_quality < ADJUSTED)
873 return true;
874 return !(other < *this);
877 /* When merging two BBs with different counts, pick common count that looks
878 most representative. */
879 profile_count merge (profile_count other) const
881 if (*this == other || !other.initialized_p ()
882 || m_quality > other.m_quality)
883 return *this;
884 if (other.m_quality > m_quality
885 || other > *this)
886 return other;
887 return *this;
890 /* Basic operations. */
891 bool operator== (const profile_count &other) const
893 return m_val == other.m_val && m_quality == other.m_quality;
896 profile_count operator+ (const profile_count &other) const
898 if (other == zero ())
899 return *this;
900 if (*this == zero ())
901 return other;
902 if (!initialized_p () || !other.initialized_p ())
903 return uninitialized ();
905 profile_count ret;
906 gcc_checking_assert (compatible_p (other));
907 ret.m_val = m_val + other.m_val;
908 ret.m_quality = MIN (m_quality, other.m_quality);
909 return ret;
912 profile_count &operator+= (const profile_count &other)
914 if (other == zero ())
915 return *this;
916 if (*this == zero ())
918 *this = other;
919 return *this;
921 if (!initialized_p () || !other.initialized_p ())
922 return *this = uninitialized ();
923 else
925 gcc_checking_assert (compatible_p (other));
926 m_val += other.m_val;
927 m_quality = MIN (m_quality, other.m_quality);
929 return *this;
932 profile_count operator- (const profile_count &other) const
934 if (*this == zero () || other == zero ())
935 return *this;
936 if (!initialized_p () || !other.initialized_p ())
937 return uninitialized ();
938 gcc_checking_assert (compatible_p (other));
939 profile_count ret;
940 ret.m_val = m_val >= other.m_val ? m_val - other.m_val : 0;
941 ret.m_quality = MIN (m_quality, other.m_quality);
942 return ret;
945 profile_count &operator-= (const profile_count &other)
947 if (*this == zero () || other == zero ())
948 return *this;
949 if (!initialized_p () || !other.initialized_p ())
950 return *this = uninitialized ();
951 else
953 gcc_checking_assert (compatible_p (other));
954 m_val = m_val >= other.m_val ? m_val - other.m_val: 0;
955 m_quality = MIN (m_quality, other.m_quality);
957 return *this;
960 /* Return false if profile_count is bogus. */
961 bool verify () const
963 gcc_checking_assert (m_quality != UNINITIALIZED_PROFILE);
964 return m_val != uninitialized_count || m_quality == GUESSED_LOCAL;
967 /* Comparisons are three-state and conservative. False is returned if
968 the inequality cannot be decided. */
969 bool operator< (const profile_count &other) const
971 if (!initialized_p () || !other.initialized_p ())
972 return false;
973 if (*this == zero ())
974 return !(other == zero ());
975 if (other == zero ())
976 return false;
977 gcc_checking_assert (compatible_p (other));
978 return m_val < other.m_val;
981 bool operator> (const profile_count &other) const
983 if (!initialized_p () || !other.initialized_p ())
984 return false;
985 if (*this == zero ())
986 return false;
987 if (other == zero ())
988 return !(*this == zero ());
989 gcc_checking_assert (compatible_p (other));
990 return initialized_p () && other.initialized_p () && m_val > other.m_val;
993 bool operator< (const gcov_type other) const
995 gcc_checking_assert (ipa_p ());
996 gcc_checking_assert (other >= 0);
997 return ipa ().initialized_p () && ipa ().m_val < (uint64_t) other;
1000 bool operator> (const gcov_type other) const
1002 gcc_checking_assert (ipa_p ());
1003 gcc_checking_assert (other >= 0);
1004 return ipa ().initialized_p () && ipa ().m_val > (uint64_t) other;
1007 bool operator<= (const profile_count &other) const
1009 if (!initialized_p () || !other.initialized_p ())
1010 return false;
1011 if (*this == zero ())
1012 return true;
1013 if (other == zero ())
1014 return (*this == zero ());
1015 gcc_checking_assert (compatible_p (other));
1016 return m_val <= other.m_val;
1019 bool operator>= (const profile_count &other) const
1021 if (!initialized_p () || !other.initialized_p ())
1022 return false;
1023 if (other == zero ())
1024 return true;
1025 if (*this == zero ())
1026 return (other == zero ());
1027 gcc_checking_assert (compatible_p (other));
1028 return m_val >= other.m_val;
1031 bool operator<= (const gcov_type other) const
1033 gcc_checking_assert (ipa_p ());
1034 gcc_checking_assert (other >= 0);
1035 return ipa ().initialized_p () && ipa ().m_val <= (uint64_t) other;
1038 bool operator>= (const gcov_type other) const
1040 gcc_checking_assert (ipa_p ());
1041 gcc_checking_assert (other >= 0);
1042 return ipa ().initialized_p () && ipa ().m_val >= (uint64_t) other;
1045 profile_count operator* (int64_t num) const
1047 return apply_scale (num, 1);
1050 profile_count operator*= (int64_t num)
1052 *this = apply_scale (num, 1);
1053 return *this;
1056 profile_count operator/ (int64_t den) const
1058 return apply_scale (1, den);
1061 profile_count operator/= (int64_t den)
1063 *this = apply_scale (1, den);
1064 return *this;
1067 /* Return true when value is not zero and can be used for scaling.
1068 This is different from *this > 0 because that requires counter to
1069 be IPA. */
1070 bool nonzero_p () const
1072 return initialized_p () && m_val != 0;
1075 /* Make counter forcibly nonzero. */
1076 profile_count force_nonzero () const
1078 if (!initialized_p ())
1079 return *this;
1080 profile_count ret = *this;
1081 if (ret.m_val == 0)
1083 ret.m_val = 1;
1084 ret.m_quality = MIN (m_quality, ADJUSTED);
1086 return ret;
1089 profile_count max (profile_count other) const
1091 profile_count val = *this;
1093 /* Always prefer nonzero IPA counts over local counts. */
1094 if (ipa ().nonzero_p () || other.ipa ().nonzero_p ())
1096 val = ipa ();
1097 other = other.ipa ();
1099 if (!initialized_p ())
1100 return other;
1101 if (!other.initialized_p ())
1102 return *this;
1103 if (*this == zero ())
1104 return other;
1105 if (other == zero ())
1106 return *this;
1107 gcc_checking_assert (compatible_p (other));
1108 if (val.m_val < other.m_val || (m_val == other.m_val
1109 && val.m_quality < other.m_quality))
1110 return other;
1111 return *this;
1114 /* PROB is a probability in scale 0...REG_BR_PROB_BASE. Scale counter
1115 accordingly. */
1116 profile_count apply_probability (int prob) const
1118 gcc_checking_assert (prob >= 0 && prob <= REG_BR_PROB_BASE);
1119 if (m_val == 0)
1120 return *this;
1121 if (!initialized_p ())
1122 return uninitialized ();
1123 profile_count ret;
1124 ret.m_val = RDIV (m_val * prob, REG_BR_PROB_BASE);
1125 ret.m_quality = MIN (m_quality, ADJUSTED);
1126 return ret;
1129 /* Scale counter according to PROB. */
1130 profile_count apply_probability (profile_probability prob) const
1132 if (*this == zero ())
1133 return *this;
1134 if (prob == profile_probability::never ())
1135 return zero ();
1136 if (!initialized_p ())
1137 return uninitialized ();
1138 profile_count ret;
1139 uint64_t tmp;
1140 safe_scale_64bit (m_val, prob.m_val, profile_probability::max_probability,
1141 &tmp);
1142 ret.m_val = tmp;
1143 ret.m_quality = MIN (m_quality, prob.m_quality);
1144 return ret;
1147 /* Return *THIS * NUM / DEN. */
1148 profile_count apply_scale (int64_t num, int64_t den) const
1150 if (m_val == 0)
1151 return *this;
1152 if (!initialized_p ())
1153 return uninitialized ();
1154 profile_count ret;
1155 uint64_t tmp;
1157 gcc_checking_assert (num >= 0 && den > 0);
1158 safe_scale_64bit (m_val, num, den, &tmp);
1159 ret.m_val = MIN (tmp, max_count);
1160 ret.m_quality = MIN (m_quality, ADJUSTED);
1161 return ret;
1164 profile_count apply_scale (profile_count num, profile_count den) const
1166 if (*this == zero ())
1167 return *this;
1168 if (num == zero ())
1169 return num;
1170 if (!initialized_p () || !num.initialized_p () || !den.initialized_p ())
1171 return uninitialized ();
1172 if (num == den)
1173 return *this;
1174 gcc_checking_assert (den.m_val);
1176 profile_count ret;
1177 uint64_t val;
1178 safe_scale_64bit (m_val, num.m_val, den.m_val, &val);
1179 ret.m_val = MIN (val, max_count);
1180 ret.m_quality = MIN (MIN (MIN (m_quality, ADJUSTED),
1181 num.m_quality), den.m_quality);
1182 /* Be sure that ret is not local if num is global.
1183 Also ensure that ret is not global0 when num is global. */
1184 if (num.ipa_p ())
1185 ret.m_quality = MAX (ret.m_quality,
1186 num == num.ipa () ? GUESSED : num.m_quality);
1187 return ret;
1190 /* Return THIS with quality dropped to GUESSED_LOCAL. */
1191 profile_count guessed_local () const
1193 profile_count ret = *this;
1194 if (!initialized_p ())
1195 return *this;
1196 ret.m_quality = GUESSED_LOCAL;
1197 return ret;
1200 /* We know that profile is globally 0 but keep local profile if present. */
1201 profile_count global0 () const
1203 profile_count ret = *this;
1204 if (!initialized_p ())
1205 return *this;
1206 ret.m_quality = GUESSED_GLOBAL0;
1207 return ret;
1210 /* We know that profile is globally adjusted 0 but keep local profile
1211 if present. */
1212 profile_count global0adjusted () const
1214 profile_count ret = *this;
1215 if (!initialized_p ())
1216 return *this;
1217 ret.m_quality = GUESSED_GLOBAL0_ADJUSTED;
1218 return ret;
1221 /* Return THIS with quality dropped to GUESSED. */
1222 profile_count guessed () const
1224 profile_count ret = *this;
1225 ret.m_quality = MIN (ret.m_quality, GUESSED);
1226 return ret;
1229 /* Return variant of profile count which is always safe to compare
1230 across functions. */
1231 profile_count ipa () const
1233 if (m_quality > GUESSED_GLOBAL0_ADJUSTED)
1234 return *this;
1235 if (m_quality == GUESSED_GLOBAL0)
1236 return zero ();
1237 if (m_quality == GUESSED_GLOBAL0_ADJUSTED)
1238 return adjusted_zero ();
1239 return uninitialized ();
1242 /* Return THIS with quality dropped to AFDO. */
1243 profile_count afdo () const
1245 profile_count ret = *this;
1246 ret.m_quality = AFDO;
1247 return ret;
1250 /* Return probability of event with counter THIS within event with counter
1251 OVERALL. */
1252 profile_probability probability_in (const profile_count overall) const
1254 if (*this == zero ()
1255 && !(overall == zero ()))
1256 return profile_probability::never ();
1257 if (!initialized_p () || !overall.initialized_p ()
1258 || !overall.m_val)
1259 return profile_probability::uninitialized ();
1260 if (*this == overall && m_quality == PRECISE)
1261 return profile_probability::always ();
1262 profile_probability ret;
1263 gcc_checking_assert (compatible_p (overall));
1265 if (overall.m_val < m_val)
1267 ret.m_val = profile_probability::max_probability;
1268 ret.m_quality = GUESSED;
1269 return ret;
1271 else
1272 ret.m_val = RDIV (m_val * profile_probability::max_probability,
1273 overall.m_val);
1274 ret.m_quality = MIN (MAX (MIN (m_quality, overall.m_quality),
1275 GUESSED), ADJUSTED);
1276 return ret;
1279 /* Return true if profile count is very large, so we risk overflows
1280 with loop transformations. */
1281 bool
1282 very_large_p ()
1284 if (!initialized_p ())
1285 return false;
1286 return m_val > max_count / 65536;
1289 int to_frequency (struct function *fun) const;
1290 int to_cgraph_frequency (profile_count entry_bb_count) const;
1291 sreal to_sreal_scale (profile_count in, bool *known = NULL) const;
1293 /* Output THIS to F. */
1294 void dump (FILE *f, struct function *fun = NULL) const;
1296 /* Output THIS to BUFFER. */
1297 void dump (char *buffer, struct function *fun = NULL) const;
1299 /* Print THIS to stderr. */
1300 void debug () const;
1302 /* Return true if THIS is known to differ significantly from OTHER. */
1303 bool differs_from_p (profile_count other) const;
1305 /* We want to scale profile across function boundary from NUM to DEN.
1306 Take care of the side case when NUM and DEN are zeros of incompatible
1307 kinds. */
1308 static void adjust_for_ipa_scaling (profile_count *num, profile_count *den);
1310 /* THIS is a count of bb which is known to be executed IPA times.
1311 Combine this information into bb counter. This means returning IPA
1312 if it is nonzero, not changing anything if IPA is uninitialized
1313 and if IPA is zero, turning THIS into corresponding local profile with
1314 global0. */
1315 profile_count combine_with_ipa_count (profile_count ipa);
1317 /* Same as combine_with_ipa_count but inside function with count IPA2. */
1318 profile_count combine_with_ipa_count_within
1319 (profile_count ipa, profile_count ipa2);
1321 /* The profiling runtime uses gcov_type, which is usually 64bit integer.
1322 Conversions back and forth are used to read the coverage and get it
1323 into internal representation. */
1324 static profile_count from_gcov_type (gcov_type v,
1325 profile_quality quality = PRECISE);
1327 /* LTO streaming support. */
1328 static profile_count stream_in (class lto_input_block *);
1329 void stream_out (struct output_block *);
1330 void stream_out (struct lto_output_stream *);
1332 #endif